gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.dynamicsqlrow; import java.util.List; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.DatabaseImpact; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; public class DynamicSQLRowMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = DynamicSQLRowMeta.class; // for i18n purposes, needed by Translator2!! /** database connection */ private DatabaseMeta databaseMeta; /** SQL Statement */ private String sql; private String sqlfieldname; /** Number of rows to return (0=ALL) */ private int rowLimit; /** * false: don't return rows where nothing is found true: at least return one source row, the rest is NULL */ private boolean outerJoin; private boolean replacevars; public boolean queryonlyonchange; public DynamicSQLRowMeta() { super(); // allocate BaseStepMeta } /** * @return Returns the database. */ public DatabaseMeta getDatabaseMeta() { return databaseMeta; } /** * @param database * The database to set. */ public void setDatabaseMeta( DatabaseMeta database ) { this.databaseMeta = database; } /** * @return Returns the outerJoin. */ public boolean isOuterJoin() { return outerJoin; } /** * @param outerJoin * The outerJoin to set. */ public void setOuterJoin( boolean outerJoin ) { this.outerJoin = outerJoin; } /** * @return Returns the replacevars. */ public boolean isVariableReplace() { return replacevars; } /** * @param replacevars * The replacevars to set. */ public void setVariableReplace( boolean replacevars ) { this.replacevars = replacevars; } /** * @return Returns the queryonlyonchange. */ public boolean isQueryOnlyOnChange() { return queryonlyonchange; } /** * @param queryonlyonchange * The queryonlyonchange to set. */ public void setQueryOnlyOnChange( boolean queryonlyonchange ) { this.queryonlyonchange = queryonlyonchange; } /** * @return Returns the rowLimit. */ public int getRowLimit() { return rowLimit; } /** * @param rowLimit * The rowLimit to set. */ public void setRowLimit( int rowLimit ) { this.rowLimit = rowLimit; } /** * @return Returns the sql. */ public String getSql() { return sql; } /** * @param sql * The sql to set. */ public void setSql( String sql ) { this.sql = sql; } /** * @return Returns the sqlfieldname. */ public String getSQLFieldName() { return sqlfieldname; } /** * @param sql * The sqlfieldname to set. */ public void setSQLFieldName( String sqlfieldname ) { this.sqlfieldname = sqlfieldname; } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode, databases ); } public Object clone() { DynamicSQLRowMeta retval = (DynamicSQLRowMeta) super.clone(); return retval; } private void readData( Node stepnode, List<DatabaseMeta> databases ) throws KettleXMLException { try { String con = XMLHandler.getTagValue( stepnode, "connection" ); databaseMeta = DatabaseMeta.findDatabase( databases, con ); sql = XMLHandler.getTagValue( stepnode, "sql" ); outerJoin = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "outer_join" ) ); replacevars = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "replace_vars" ) ); queryonlyonchange = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "query_only_on_change" ) ); rowLimit = Const.toInt( XMLHandler.getTagValue( stepnode, "rowlimit" ), 0 ); sqlfieldname = XMLHandler.getTagValue( stepnode, "sql_fieldname" ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "DynamicSQLRowMeta.Exception.UnableToLoadStepInfo" ), e ); } } public void setDefault() { databaseMeta = null; rowLimit = 0; sql = ""; outerJoin = false; replacevars = false; sqlfieldname = null; queryonlyonchange = false; } public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { return; } Database db = new Database( loggingObject, databaseMeta ); databases = new Database[] { db }; // Keep track of this one for cancelQuery // First try without connecting to the database... (can be S L O W) // See if it's in the cache... RowMetaInterface add = null; String realSQL = sql; if ( replacevars ) { realSQL = space.environmentSubstitute( realSQL ); } try { add = db.getQueryFields( realSQL, false ); } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( BaseMessages.getString( PKG, "DynamicSQLRowMeta.Exception.UnableToDetermineQueryFields" ) + Const.CR + sql, dbe ); } if ( add != null ) { // Cache hit, just return it this... for ( int i = 0; i < add.size(); i++ ) { ValueMetaInterface v = add.getValueMeta( i ); v.setOrigin( name ); } row.addRowMeta( add ); } else { // No cache hit, connect to the database, do it the hard way... try { db.connect(); add = db.getQueryFields( realSQL, false ); for ( int i = 0; i < add.size(); i++ ) { ValueMetaInterface v = add.getValueMeta( i ); v.setOrigin( name ); } row.addRowMeta( add ); db.disconnect(); } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( BaseMessages.getString( PKG, "DynamicSQLRowMeta.Exception.ErrorObtainingFields" ), dbe ); } } } public String getXML() { StringBuilder retval = new StringBuilder(); retval .append( " " + XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " + XMLHandler.addTagValue( "rowlimit", rowLimit ) ); retval.append( " " + XMLHandler.addTagValue( "sql", sql ) ); retval.append( " " + XMLHandler.addTagValue( "outer_join", outerJoin ) ); retval.append( " " + XMLHandler.addTagValue( "replace_vars", replacevars ) ); retval.append( " " + XMLHandler.addTagValue( "sql_fieldname", sqlfieldname ) ); retval.append( " " + XMLHandler.addTagValue( "query_only_on_change", queryonlyonchange ) ); return retval.toString(); } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases ); rowLimit = (int) rep.getStepAttributeInteger( id_step, "rowlimit" ); sql = rep.getStepAttributeString( id_step, "sql" ); outerJoin = rep.getStepAttributeBoolean( id_step, "outer_join" ); replacevars = rep.getStepAttributeBoolean( id_step, "replace_vars" ); sqlfieldname = rep.getStepAttributeString( id_step, "sql_fieldname" ); queryonlyonchange = rep.getStepAttributeBoolean( id_step, "query_only_on_change" ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "DynamicSQLRowMeta.Exception.UnexpectedErrorReadingStepInfo" ), e ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta ); rep.saveStepAttribute( id_transformation, id_step, "rowlimit", rowLimit ); rep.saveStepAttribute( id_transformation, id_step, "sql", sql ); rep.saveStepAttribute( id_transformation, id_step, "outer_join", outerJoin ); rep.saveStepAttribute( id_transformation, id_step, "replace_vars", replacevars ); rep.saveStepAttribute( id_transformation, id_step, "sql_fieldname", sqlfieldname ); rep.saveStepAttribute( id_transformation, id_step, "query_only_on_change", queryonlyonchange ); // Also, save the step-database relationship! if ( databaseMeta != null ) { rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "DynamicSQLRowMeta.Exception.UnableToSaveStepInfo" ) + id_step, e ); } } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; String error_message = ""; // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.ReceivingInfo" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.NoInputReceived" ), stepMeta ); remarks.add( cr ); } // Check for SQL field if ( Utils.isEmpty( sqlfieldname ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.SQLFieldNameMissing" ), stepMeta ); remarks.add( cr ); } else { ValueMetaInterface vfield = prev.searchValueMeta( sqlfieldname ); if ( vfield == null ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.SQLFieldNotFound", sqlfieldname ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.SQLFieldFound", sqlfieldname, vfield.getOrigin() ), stepMeta ); } remarks.add( cr ); } if ( databaseMeta != null ) { Database db = new Database( loggingObject, databaseMeta ); databases = new Database[] { db }; // Keep track of this one for cancelQuery try { db.connect(); if ( sql != null && sql.length() != 0 ) { error_message = ""; RowMetaInterface r = db.getQueryFields( sql, true ); if ( r != null ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.QueryOK" ), stepMeta ); remarks.add( cr ); } else { error_message = BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.InvalidDBQuery" ); cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } } } catch ( KettleException e ) { error_message = BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.ErrorOccurred" ) + e.getMessage(); cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } finally { db.disconnect(); } } else { error_message = BaseMessages.getString( PKG, "DynamicSQLRowMeta.CheckResult.InvalidConnection" ); cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta ); remarks.add( cr ); } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new DynamicSQLRow( stepMeta, stepDataInterface, cnr, tr, trans ); } public StepDataInterface getStepData() { return new DynamicSQLRowData(); } public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ) throws KettleStepException { RowMetaInterface out = prev.clone(); getFields( out, stepMeta.getName(), new RowMetaInterface[] { info, }, null, transMeta, repository, metaStore ); if ( out != null ) { for ( int i = 0; i < out.size(); i++ ) { ValueMetaInterface outvalue = out.getValueMeta( i ); DatabaseImpact di = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ, transMeta.getName(), stepMeta.getName(), databaseMeta .getDatabaseName(), "", outvalue.getName(), outvalue.getName(), stepMeta.getName(), sql, BaseMessages.getString( PKG, "DynamicSQLRowMeta.DatabaseImpact.Title" ) ); impact.add( di ); } } } public DatabaseMeta[] getUsedDatabaseConnections() { if ( databaseMeta != null ) { return new DatabaseMeta[] { databaseMeta }; } else { return super.getUsedDatabaseConnections(); } } public boolean supportsErrorHandling() { return true; } }
package cs516.gabrielGheorghian.subViews; import java.awt.Color; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.util.ArrayList; import java.util.Date; import javax.swing.JEditorPane; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import cs516.gabrielGheorghian.JEEPapp.JEEPapp; import cs516.gabrielGheorghian.data.Contact; import cs516.gabrielGheorghian.data.Mail; public class MailSendDisplay extends JPanel implements FocusListener { private static final long serialVersionUID = 1L; private JTextField toRecipientsField; private JTextField ccRecipientsField; private JTextField bccRecipientsField; private JTextField subjectField; private JEditorPane contentField; private JLabel toRecipients; private JLabel ccRecipients; private JLabel bccRecipients; private JLabel subjectLabel; private GridBagLayout gridBagLayout; private JTextField currentFocus; private final String EMAILREGEX = "^[a-zA-Z][\\w\\.-]*[a-zA-Z0-9]@[a-zA-Z0-9][\\w\\.-]*[a-zA-Z0-9]\\.[a-zA-Z][a-zA-Z\\.]*[a-zA-Z]$"; /** * Constructor * * @param mailReceiver * reference to mail sending object */ public MailSendDisplay() { super(); gridBagLayout = new GridBagLayout(); setLayout(gridBagLayout); initialize(); } /** * Create the GUI */ private void initialize() { // Create the labels. toRecipients = new JLabel("To:"); add(toRecipients, getConstraints(0, 0, 1, 1, GridBagConstraints.EAST, 0, 0)); // Create the labels. ccRecipients = new JLabel("Cc:"); add(ccRecipients, getConstraints(0, 1, 1, 1, GridBagConstraints.EAST, 0, 0)); // Create the labels. bccRecipients = new JLabel("Bcc:"); add(bccRecipients, getConstraints(0, 2, 1, 1, GridBagConstraints.EAST, 0, 0)); subjectLabel = new JLabel("Subject:"); add(subjectLabel, getConstraints(0, 3, 1, 1, GridBagConstraints.EAST, 0, 0)); // Only accept valid email addresses toRecipientsField = new JTextField(); toRecipientsField.setColumns(40); toRecipientsField.setEditable(true); toRecipientsField.setMinimumSize(new Dimension(400, 20)); toRecipientsField.addFocusListener(this); toRecipientsField.setBackground(Color.WHITE); add(toRecipientsField, getConstraints(1, 0, 4, 1, GridBagConstraints.WEST, 1, 0)); // Only accept valid email addresses ccRecipientsField = new JTextField(); ccRecipientsField.setColumns(40); ccRecipientsField.setEditable(true); ccRecipientsField.setMinimumSize(new Dimension(400, 20)); ccRecipientsField.addFocusListener(this); ccRecipientsField.setBackground(Color.WHITE); add(ccRecipientsField, getConstraints(1, 1, 4, 1, GridBagConstraints.WEST, 1, 0)); // Only accept valid email addresses bccRecipientsField = new JTextField(); bccRecipientsField.setColumns(40); bccRecipientsField.setEditable(true); bccRecipientsField.setMinimumSize(new Dimension(400, 20)); bccRecipientsField.addFocusListener(this); bccRecipientsField.setBackground(Color.WHITE); add(bccRecipientsField, getConstraints(1, 2, 4, 1, GridBagConstraints.WEST, 1, 0)); subjectField = new JTextField(); subjectField.setColumns(40); subjectField.setEditable(true); subjectField.setMinimumSize(new Dimension(400, 20)); subjectField.setBackground(Color.WHITE); add(subjectField, getConstraints(1, 3, 4, 1, GridBagConstraints.WEST, 1, 0)); contentField = new JEditorPane(); contentField.setContentType("text/plain;"); contentField.setEditable(true); contentField.setPreferredSize(new Dimension(500, 200)); JScrollPane jb = new JScrollPane(contentField); jb.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); add(jb, getConstraints(0, 4, 5, 1, GridBagConstraints.CENTER, 1, 1)); currentFocus = toRecipientsField; } /** * A method for setting grid bag constraints * * @param gridx * @param gridy * @param gridwidth * @param gridheight * @param anchor * @return */ private GridBagConstraints getConstraints(int gridx, int gridy, int gridwidth, int gridheight, int anchor, float weightx, float weighty) { GridBagConstraints c = new GridBagConstraints(); c.insets = new Insets(5, 5, 5, 5); c.ipadx = 0; c.ipady = 0; c.gridx = gridx; c.gridy = gridy; c.gridwidth = gridwidth; c.gridheight = gridheight; c.anchor = anchor; c.weightx = 1.0; c.weighty = weighty; c.fill = GridBagConstraints.BOTH; return c; } /** * returns the email * * @return */ public Mail getEmail() { Mail mail = null; if (!toRecipientsField.getText().equals("") && !subjectField.getText().equals("") && !contentField.getText().equals("")) { // for toRecipients ArrayList<String> toRecipints = new ArrayList<String>(); String[] toReci = toRecipientsField.getText().split(";"); for (String recipient : toReci) { toRecipints.add(recipient); } // for ccRecipients ArrayList<String> ccRecipints = new ArrayList<String>(); String[] ccReci = ccRecipientsField.getText().split(";"); for (String recipient : ccReci) { ccRecipints.add(recipient); } // for ccRecipients ArrayList<String> bccRecipents = new ArrayList<String>(); String[] bccReci = ccRecipientsField.getText().split(";"); for (String recipient : bccReci) { bccRecipents.add(recipient); } mail = new Mail(-1, JEEPapp.mailConfig.getUserEmail(), toRecipints, ccRecipints, bccRecipents, subjectField.getText(), contentField.getText(), new Date(), 3); } return mail; } /** * clears the fields */ public void clear() { toRecipientsField.setText(""); ccRecipientsField.setText(""); bccRecipientsField.setText(""); subjectField.setText(""); contentField.setText(""); } /** * Fill the fields on the form * * @param mail */ public void addContact(Contact contact) { // todo if (contact != null) { // if to recipients has focus if (currentFocus == toRecipientsField) { if (toRecipientsField.getText().length() == 0) { toRecipientsField.setText(toRecipientsField.getText() + contact.getEmail()); } else { toRecipientsField.setText(toRecipientsField.getText() + ";" + contact.getEmail()); } }// if cc recipients has focus else if (currentFocus == ccRecipientsField) { if (ccRecipientsField.getText().length() == 0) { ccRecipientsField.setText(ccRecipientsField.getText() + contact.getEmail()); } else { ccRecipientsField.setText(ccRecipientsField.getText() + ";" + contact.getEmail()); } }// if bcc recipients has focus else if (currentFocus == bccRecipientsField) { if (bccRecipientsField.getText().length() == 0) { bccRecipientsField.setText(bccRecipientsField.getText() + contact.getEmail()); } else { bccRecipientsField.setText(bccRecipientsField.getText() + ";" + contact.getEmail()); } } } } /* * (non-Javadoc) * * @see java.awt.event.FocusListener#focusGained(java.awt.event.FocusEvent) */ @Override public void focusGained(FocusEvent e) { currentFocus = (JTextField) e.getSource(); } /* * (non-Javadoc) * * @see java.awt.event.FocusListener#focusLost(java.awt.event.FocusEvent) */ @Override public void focusLost(FocusEvent e) { JTextField field = (JTextField) e.getSource(); String[] emails = field.getText().split(";"); boolean acceptable = true; for(String email: emails){ if(!email.matches(EMAILREGEX)) acceptable = false; if(!acceptable) break; } if(field.getText().equals("")); if(!acceptable) field.setText(""); } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cli; import com.facebook.buck.command.Build; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.json.BuildFileParseException; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetException; import com.facebook.buck.parser.BuildFileSpec; import com.facebook.buck.parser.ParserConfig; import com.facebook.buck.parser.TargetNodePredicateSpec; import com.facebook.buck.rules.ActionGraphAndResolver; import com.facebook.buck.rules.BuildEngine; import com.facebook.buck.rules.BuildEvent; import com.facebook.buck.rules.CachingBuildEngine; import com.facebook.buck.rules.ExternalTestRunnerRule; import com.facebook.buck.rules.ExternalTestRunnerTestSpec; import com.facebook.buck.rules.Label; import com.facebook.buck.rules.LocalCachingBuildEngineDelegate; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.TargetGraphAndBuildTargets; import com.facebook.buck.rules.TargetNode; import com.facebook.buck.rules.TargetNodes; import com.facebook.buck.rules.TestRule; import com.facebook.buck.step.AdbOptions; import com.facebook.buck.step.DefaultStepRunner; import com.facebook.buck.step.TargetDevice; import com.facebook.buck.step.TargetDeviceOptions; import com.facebook.buck.test.CoverageReportFormat; import com.facebook.buck.test.TestRunningOptions; import com.facebook.buck.util.Console; import com.facebook.buck.util.ForwardingProcessListener; import com.facebook.buck.util.ListeningProcessExecutor; import com.facebook.buck.util.MoreExceptions; import com.facebook.buck.util.ProcessExecutorParams; import com.facebook.buck.util.concurrent.ConcurrencyLimit; import com.facebook.infer.annotation.SuppressFieldNotInitialized; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.kohsuke.args4j.Option; import java.io.IOException; import java.io.PrintStream; import java.nio.channels.Channels; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import javax.annotation.Nullable; public class TestCommand extends BuildCommand { public static final String USE_RESULTS_CACHE = "use_results_cache"; private static final Logger LOG = Logger.get(TestCommand.class); @Option(name = "--all", usage = "Whether all of the tests should be run. " + "If no targets are given, --all is implied") private boolean all = false; @Option(name = "--code-coverage", usage = "Whether code coverage information will be generated.") private boolean isCodeCoverageEnabled = false; @Option(name = "--code-coverage-format", usage = "Format to be used for coverage") private CoverageReportFormat coverageReportFormat = CoverageReportFormat.HTML; @Option(name = "--code-coverage-title", usage = "Title used for coverage") private String coverageReportTitle = "Code-Coverage Analysis"; @Option(name = "--debug", usage = "Whether the test will start suspended with a JDWP debug port of 5005") private boolean isDebugEnabled = false; @Option(name = "--xml", usage = "Where to write test output as XML.") @Nullable private String pathToXmlTestOutput = null; @Option(name = "--run-with-java-agent", usage = "Whether the test will start a java profiling agent") @Nullable private String pathToJavaAgent = null; @Option(name = "--no-results-cache", usage = "Whether to use cached test results.") @Nullable private Boolean isResultsCacheDisabled = null; @Option(name = "--build-filtered", usage = "Whether to build filtered out tests.") @Nullable private Boolean isBuildFiltered = null; // TODO(#9061229): See if we can remove this option entirely. For now, the // underlying code has been removed, and this option is ignored. @Option( name = "--ignore-when-dependencies-fail", aliases = {"-i"}, usage = "Deprecated option (ignored).", hidden = true) @SuppressWarnings("PMD.UnusedPrivateField") private boolean isIgnoreFailingDependencies; @Option( name = "--dry-run", usage = "Print tests that match the given command line options, but don't run them.") private boolean isDryRun; @Option( name = "--shuffle", usage = "Randomize the order in which test classes are executed." + "WARNING: only works for Java tests!") private boolean isShufflingTests; @Option( name = "--exclude-transitive-tests", usage = "Only run the tests targets that were specified on the command line (without adding " + "more tests by following dependencies).") private boolean shouldExcludeTransitiveTests; @Option( name = "--test-runner-env", usage = "Add or override an environment variable passed to the test runner. Later occurrences " + "override earlier occurrences. Currently this only support Apple(ios/osx) tests.", handler = EnvironmentOverrideOptionHandler.class) private Map<String, String> environmentOverrides = new HashMap<>(); @AdditionalOptions @SuppressFieldNotInitialized private AdbCommandLineOptions adbOptions; @AdditionalOptions @SuppressFieldNotInitialized private TargetDeviceCommandLineOptions targetDeviceOptions; @AdditionalOptions @SuppressFieldNotInitialized private TestSelectorOptions testSelectorOptions; @AdditionalOptions @SuppressFieldNotInitialized private TestLabelOptions testLabelOptions; @Option(name = "--", handler = ConsumeAllOptionsHandler.class) private List<String> withDashArguments = Lists.newArrayList(); public boolean isRunAllTests() { return all || getArguments().isEmpty(); } @Override public boolean isCodeCoverageEnabled() { return isCodeCoverageEnabled; } public boolean isResultsCacheEnabled(BuckConfig buckConfig) { // The option is negative (--no-X) but we prefer to reason about positives, in the code. if (isResultsCacheDisabled == null) { boolean isUseResultsCache = buckConfig.getBooleanValue("test", USE_RESULTS_CACHE, true); isResultsCacheDisabled = !isUseResultsCache; } return !isResultsCacheDisabled; } @Override public boolean isDebugEnabled() { return isDebugEnabled; } public Optional<TargetDevice> getTargetDeviceOptional() { return targetDeviceOptions.getTargetDeviceOptional(); } public AdbOptions getAdbOptions(BuckConfig buckConfig) { return adbOptions.getAdbOptions(buckConfig); } public TargetDeviceOptions getTargetDeviceOptions() { return targetDeviceOptions.getTargetDeviceOptions(); } public boolean isDryRun() { return isDryRun; } public boolean isMatchedByLabelOptions(BuckConfig buckConfig, Set<Label> labels) { return testLabelOptions.isMatchedByLabelOptions(buckConfig, labels); } public boolean shouldExcludeTransitiveTests() { return shouldExcludeTransitiveTests; } public boolean shouldExcludeWin() { return testLabelOptions.shouldExcludeWin(); } public boolean isBuildFiltered(BuckConfig buckConfig) { return isBuildFiltered != null ? isBuildFiltered : buckConfig.getBooleanValue("test", "build_filtered_tests", false); } public int getNumTestThreads(BuckConfig buckConfig) { if (isDebugEnabled()) { return 1; } return buckConfig.getNumThreads(); } private TestRunningOptions getTestRunningOptions(CommandRunnerParams params) { return TestRunningOptions.builder() .setCodeCoverageEnabled(isCodeCoverageEnabled) .setRunAllTests(isRunAllTests()) .setTestSelectorList(testSelectorOptions.getTestSelectorList()) .setShouldExplainTestSelectorList(testSelectorOptions.shouldExplain()) .setResultsCacheEnabled(isResultsCacheEnabled(params.getBuckConfig())) .setDryRun(isDryRun) .setShufflingTests(isShufflingTests) .setPathToXmlTestOutput(Optional.fromNullable(pathToXmlTestOutput)) .setPathToJavaAgent(Optional.fromNullable(pathToJavaAgent)) .setCoverageReportFormat(coverageReportFormat) .setCoverageReportTitle(coverageReportTitle) .setEnvironmentOverrides(environmentOverrides) .build(); } private int runTestsInternal( CommandRunnerParams params, BuildEngine buildEngine, Build build, Iterable<TestRule> testRules) throws InterruptedException, IOException { if (!withDashArguments.isEmpty()) { params.getBuckEventBus().post(ConsoleEvent.severe( "Unexpected arguments after \"--\" when using internal runner")); return 1; } ConcurrencyLimit concurrencyLimit = new ConcurrencyLimit( getNumTestThreads(params.getBuckConfig()), params.getBuckConfig().getLoadLimit()); try ( CommandThreadManager testPool = new CommandThreadManager( "Test-Run", concurrencyLimit)) { return TestRunning.runTests( params, testRules, build.getExecutionContext(), getTestRunningOptions(params), testPool.getExecutor(), buildEngine, new DefaultStepRunner(build.getExecutionContext())); } catch (ExecutionException e) { params.getBuckEventBus().post(ConsoleEvent.severe( MoreExceptions.getHumanReadableOrLocalizedMessage(e))); return 1; } } private int runTestsExternal( final CommandRunnerParams params, Build build, Iterable<String> command, Iterable<TestRule> testRules) throws InterruptedException, IOException { TestRunningOptions options = getTestRunningOptions(params); // Walk the test rules, collecting all the specs. List<ExternalTestRunnerTestSpec> specs = Lists.newArrayList(); for (TestRule testRule : testRules) { if (!(testRule instanceof ExternalTestRunnerRule)) { params.getBuckEventBus().post(ConsoleEvent.severe(String.format( "Test %s does not support external test running", testRule.getBuildTarget()))); return 1; } ExternalTestRunnerRule rule = (ExternalTestRunnerRule) testRule; specs.add(rule.getExternalTestRunnerSpec(build.getExecutionContext(), options)); } // Serialize the specs to a file to pass into the test runner. Path infoFile = params.getCell().getFilesystem() .resolve(params.getCell().getFilesystem().getBuckPaths().getScratchDir()) .resolve("external_runner_specs.json"); Files.createDirectories(infoFile.getParent()); Files.deleteIfExists(infoFile); params.getObjectMapper().writerWithDefaultPrettyPrinter().writeValue(infoFile.toFile(), specs); // Launch and run the external test runner, forwarding it's stdout/stderr to the console. // We wait for it to complete then returns its error code. ListeningProcessExecutor processExecutor = new ListeningProcessExecutor(); ProcessExecutorParams processExecutorParams = ProcessExecutorParams.builder() .addAllCommand(command) .addAllCommand(withDashArguments) .setEnvironment(params.getEnvironment()) .addCommand("--buck-test-info", infoFile.toString()) .addCommand( "--jobs", String.valueOf(getConcurrencyLimit(params.getBuckConfig()).threadLimit)) .setDirectory(params.getCell().getFilesystem().getRootPath().toFile()) .build(); ForwardingProcessListener processListener = new ForwardingProcessListener( Channels.newChannel(params.getConsole().getStdOut()), Channels.newChannel(params.getConsole().getStdErr())); ListeningProcessExecutor.LaunchedProcess process = processExecutor.launchProcess(processExecutorParams, processListener); try { return processExecutor.waitForProcess(process); } finally { processExecutor.destroyProcess(process, /* force */ false); processExecutor.waitForProcess(process); } } @Override public int runWithoutHelp(CommandRunnerParams params) throws IOException, InterruptedException { LOG.debug("Running with arguments %s", getArguments()); try (CommandThreadManager pool = new CommandThreadManager( "Test", getConcurrencyLimit(params.getBuckConfig()))) { // Post the build started event, setting it to the Parser recorded start time if appropriate. BuildEvent.Started started = BuildEvent.started(getArguments()); if (params.getParser().getParseStartTime().isPresent()) { params.getBuckEventBus().post( started, params.getParser().getParseStartTime().get()); } else { params.getBuckEventBus().post(started); } // The first step is to parse all of the build files. This will populate the parser and find // all of the test rules. TargetGraph targetGraph; ImmutableSet<BuildTarget> explicitBuildTargets; ParserConfig parserConfig = new ParserConfig(params.getBuckConfig()); try { // If the user asked to run all of the tests, parse all of the build files looking for any // test rules. boolean ignoreBuckAutodepsFiles = false; if (isRunAllTests()) { targetGraph = params.getParser().buildTargetGraphForTargetNodeSpecs( params.getBuckEventBus(), params.getCell(), getEnableParserProfiling(), pool.getExecutor(), ImmutableList.of( TargetNodePredicateSpec.of( new Predicate<TargetNode<?>>() { @Override public boolean apply(TargetNode<?> input) { return input.getType().isTestRule(); } }, BuildFileSpec.fromRecursivePath(Paths.get(""), params.getCell().getRoot()))), ignoreBuckAutodepsFiles, parserConfig.getDefaultFlavorsMode()).getTargetGraph(); explicitBuildTargets = ImmutableSet.of(); // Otherwise, the user specified specific test targets to build and run, so build a graph // around these. } else { LOG.debug("Parsing graph for arguments %s", getArguments()); TargetGraphAndBuildTargets result = params.getParser() .buildTargetGraphForTargetNodeSpecs( params.getBuckEventBus(), params.getCell(), getEnableParserProfiling(), pool.getExecutor(), parseArgumentsAsTargetNodeSpecs( params.getBuckConfig(), getArguments()), ignoreBuckAutodepsFiles, parserConfig.getDefaultFlavorsMode()); targetGraph = result.getTargetGraph(); explicitBuildTargets = result.getBuildTargets(); LOG.debug("Got explicit build targets %s", explicitBuildTargets); ImmutableSet.Builder<BuildTarget> testTargetsBuilder = ImmutableSet.builder(); for (TargetNode<?> node : targetGraph.getAll(explicitBuildTargets)) { ImmutableSortedSet<BuildTarget> nodeTests = TargetNodes.getTestTargetsForNode(node); if (!nodeTests.isEmpty()) { LOG.debug("Got tests for target %s: %s", node.getBuildTarget(), nodeTests); testTargetsBuilder.addAll(nodeTests); } } ImmutableSet<BuildTarget> testTargets = testTargetsBuilder.build(); if (!testTargets.isEmpty()) { LOG.debug("Got related test targets %s, building new target graph...", testTargets); targetGraph = params.getParser().buildTargetGraph( params.getBuckEventBus(), params.getCell(), getEnableParserProfiling(), pool.getExecutor(), Iterables.concat( explicitBuildTargets, testTargets)); LOG.debug("Finished building new target graph with tests."); } } } catch (BuildTargetException | BuildFileParseException e) { params.getBuckEventBus().post(ConsoleEvent.severe( MoreExceptions.getHumanReadableOrLocalizedMessage(e))); return 1; } ActionGraphAndResolver actionGraphAndResolver = Preconditions.checkNotNull( params.getActionGraphCache().getActionGraph( params.getBuckEventBus(), params.getBuckConfig().isActionGraphCheckingEnabled(), targetGraph, params.getBuckConfig().getKeySeed())); // Look up all of the test rules in the action graph. Iterable<TestRule> testRules = Iterables.filter( actionGraphAndResolver.getActionGraph().getNodes(), TestRule.class); // Unless the user requests that we build filtered tests, filter them out here, before // the build. if (!isBuildFiltered(params.getBuckConfig())) { testRules = filterTestRules(params.getBuckConfig(), explicitBuildTargets, testRules); } if (isDryRun()) { printMatchingTestRules(params.getConsole(), testRules); } CachingBuildEngine cachingBuildEngine = new CachingBuildEngine( new LocalCachingBuildEngineDelegate(params.getFileHashCache()), pool.getExecutor(), getBuildEngineMode().or(params.getBuckConfig().getBuildEngineMode()), params.getBuckConfig().getBuildDepFiles(), params.getBuckConfig().getBuildMaxDepFileCacheEntries(), params.getBuckConfig().getBuildArtifactCacheSizeLimit(), params.getBuckConfig().getBuildInputRuleKeyFileSizeLimit(), params.getObjectMapper(), actionGraphAndResolver.getResolver(), params.getBuckConfig().getKeySeed()); try (Build build = createBuild( params.getBuckConfig(), actionGraphAndResolver.getActionGraph(), actionGraphAndResolver.getResolver(), params.getCell(), params.getAndroidPlatformTargetSupplier(), cachingBuildEngine, params.getArtifactCache(), params.getConsole(), params.getBuckEventBus(), getTargetDeviceOptional(), params.getPlatform(), params.getEnvironment(), params.getObjectMapper(), params.getClock(), Optional.of(getAdbOptions(params.getBuckConfig())), Optional.of(getTargetDeviceOptions()), params.getExecutors())) { // Build all of the test rules. int exitCode = build.executeAndPrintFailuresToEventBus( testRules, isKeepGoing(), params.getBuckEventBus(), params.getConsole(), getPathToBuildReport(params.getBuckConfig())); params.getBuckEventBus().post(BuildEvent.finished(started, exitCode)); if (exitCode != 0) { return exitCode; } // If the user requests that we build tests that we filter out, then we perform // the filtering here, after we've done the build but before we run the tests. if (isBuildFiltered(params.getBuckConfig())) { testRules = filterTestRules(params.getBuckConfig(), explicitBuildTargets, testRules); } // Once all of the rules are built, then run the tests. Optional<ImmutableList<String>> externalTestRunner = params.getBuckConfig().getExternalTestRunner(); if (externalTestRunner.isPresent()) { return runTestsExternal( params, build, externalTestRunner.get(), testRules); } return runTestsInternal(params, cachingBuildEngine, build, testRules); } } } @Override public boolean isReadOnly() { return false; } private void printMatchingTestRules(Console console, Iterable<TestRule> testRules) { PrintStream out = console.getStdOut(); ImmutableList<TestRule> list = ImmutableList.copyOf(testRules); out.println(String.format("MATCHING TEST RULES (%d):", list.size())); out.println(""); if (list.isEmpty()) { out.println(" (none)"); } else { for (TestRule testRule : testRules) { out.println(" " + testRule.getBuildTarget()); } } out.println(""); } @VisibleForTesting Iterable<TestRule> filterTestRules( BuckConfig buckConfig, ImmutableSet<BuildTarget> explicitBuildTargets, Iterable<TestRule> testRules) { ImmutableSortedSet.Builder<TestRule> builder = ImmutableSortedSet.orderedBy( new Comparator<TestRule>() { @Override public int compare(TestRule o1, TestRule o2) { return o1.getBuildTarget().getFullyQualifiedName().compareTo( o2.getBuildTarget().getFullyQualifiedName()); } }); for (TestRule rule : testRules) { boolean explicitArgument = explicitBuildTargets.contains(rule.getBuildTarget()); boolean matchesLabel = isMatchedByLabelOptions(buckConfig, rule.getLabels()); // We always want to run the rules that are given on the command line. Always. Unless we don't // want to. if (shouldExcludeWin() && !matchesLabel) { continue; } // The testRules Iterable contains transitive deps of the arguments given on the command line, // filter those out if such is the user's will. if (shouldExcludeTransitiveTests() && !explicitArgument) { continue; } // Normal behavior is to include all rules that match the given label as well as any that // were explicitly specified by the user. if (explicitArgument || matchesLabel) { builder.add(rule); } } return builder.build(); } @Override public String getShortDescription() { return "builds and runs the tests for the specified target"; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.wire; import alluxio.Constants; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; /** * The file information. */ @NotThreadSafe // TODO(jiri): Consolidate with URIStatus. public final class FileInfo implements Serializable { private static final long serialVersionUID = 7119966306934831779L; private long mFileId; private String mName = ""; private String mPath = ""; private String mUfsPath = ""; private long mLength; private long mBlockSizeBytes; private long mCreationTimeMs; private boolean mCompleted; private boolean mFolder; private boolean mPinned; private boolean mCacheable; private boolean mPersisted; private ArrayList<Long> mBlockIds = new ArrayList<>(); private int mInMemoryPercentage; private long mLastModificationTimeMs; private long mTtl; private TtlAction mTtlAction; private String mOwner = ""; private String mGroup = ""; private int mMode; private String mPersistenceState = ""; private boolean mMountPoint; private ArrayList<FileBlockInfo> mFileBlockInfos = new ArrayList<>(); private long mMountId; private int mInAlluxioPercentage; private String mUfsFingerprint = Constants.INVALID_UFS_FINGERPRINT; /** * Creates a new instance of {@link FileInfo}. */ public FileInfo() {} /** * Creates a new instance of {@link FileInfo} from thrift representation. * * @param fileInfo the thrift representation of a file information */ protected FileInfo(alluxio.thrift.FileInfo fileInfo) { mFileId = fileInfo.getFileId(); mName = fileInfo.getName(); mPath = fileInfo.getPath(); mUfsPath = fileInfo.getUfsPath(); mLength = fileInfo.getLength(); mBlockSizeBytes = fileInfo.getBlockSizeBytes(); mCreationTimeMs = fileInfo.getCreationTimeMs(); mCompleted = fileInfo.isCompleted(); mFolder = fileInfo.isFolder(); mPinned = fileInfo.isPinned(); mCacheable = fileInfo.isCacheable(); mPersisted = fileInfo.isPersisted(); mBlockIds = new ArrayList<>(fileInfo.getBlockIds()); mInMemoryPercentage = fileInfo.getInMemoryPercentage(); mLastModificationTimeMs = fileInfo.getLastModificationTimeMs(); mTtl = fileInfo.getTtl(); mTtlAction = ThriftUtils.fromThrift(fileInfo.getTtlAction()); mOwner = fileInfo.getOwner(); mGroup = fileInfo.getGroup(); mMode = fileInfo.getMode(); mPersistenceState = fileInfo.getPersistenceState(); mMountPoint = fileInfo.isMountPoint(); mFileBlockInfos = new ArrayList<>(); if (fileInfo.getFileBlockInfos() != null) { for (alluxio.thrift.FileBlockInfo fileBlockInfo : fileInfo.getFileBlockInfos()) { mFileBlockInfos.add(new FileBlockInfo(fileBlockInfo)); } } mMountId = fileInfo.getMountId(); mInAlluxioPercentage = fileInfo.getInAlluxioPercentage(); if (fileInfo.isSetUfsFingerprint()) { mUfsFingerprint = fileInfo.getUfsFingerprint(); } } /** * @return the file id */ public long getFileId() { return mFileId; } /** * @return the file name */ public String getName() { return mName; } /** * @return the file path */ public String getPath() { return mPath; } /** * @return the file UFS path */ public String getUfsPath() { return mUfsPath; } /** * @return the file length */ public long getLength() { return mLength; } /** * @return the file block size (in bytes) */ public long getBlockSizeBytes() { return mBlockSizeBytes; } /** * @return the file creation time (in milliseconds) */ public long getCreationTimeMs() { return mCreationTimeMs; } /** * @return whether the file is completed */ public boolean isCompleted() { return mCompleted; } /** * @return whether the file is a folder */ public boolean isFolder() { return mFolder; } /** * @return whether the file is pinned */ public boolean isPinned() { return mPinned; } /** * @return whether the file is cacheable */ public boolean isCacheable() { return mCacheable; } /** * @return whether the file is persisted */ public boolean isPersisted() { return mPersisted; } /** * @return the file block ids */ public List<Long> getBlockIds() { return mBlockIds; } /** * @return the file in memory percentage */ public int getInMemoryPercentage() { return mInMemoryPercentage; } /** * @return the file in alluxio percentage */ public int getInAlluxioPercentage() { return mInAlluxioPercentage; } /** * @return the file last modification time (in milliseconds) */ public long getLastModificationTimeMs() { return mLastModificationTimeMs; } /** * @return the file time-to-live (in seconds) */ public long getTtl() { return mTtl; } /** * @return the {@link TtlAction} */ public TtlAction getTtlAction() { return mTtlAction; } /** * @return the file owner */ public String getOwner() { return mOwner; } /** * @return the file owner group */ public String getGroup() { return mGroup; } /** * @return the file mode bits */ public int getMode() { return mMode; } /** * @return the file persistence state */ public String getPersistenceState() { return mPersistenceState; } /** * @return whether the file is a mount point */ public boolean isMountPoint() { return mMountPoint; } /** * @return the list of file block descriptors */ public List<FileBlockInfo> getFileBlockInfos() { return mFileBlockInfos; } /** * @return the id of the mount */ public long getMountId() { return mMountId; } /** * @return the ufs fingerprint for this file */ public String getUfsFingerprint() { return mUfsFingerprint; } /** * @param fileId the file id to use * @return the file information */ public FileInfo setFileId(long fileId) { mFileId = fileId; return this; } /** * @param name the file name to use * @return the file information */ public FileInfo setName(String name) { Preconditions.checkNotNull(name, "name"); mName = name; return this; } /** * @param path the file path to use * @return the file information */ public FileInfo setPath(String path) { Preconditions.checkNotNull(path, "path"); mPath = path; return this; } /** * @param ufsPath the file UFS path to use * @return the file information */ public FileInfo setUfsPath(String ufsPath) { Preconditions.checkNotNull(ufsPath, "ufsPath"); mUfsPath = ufsPath; return this; } /** * @param length the file length to use * @return the file information */ public FileInfo setLength(long length) { mLength = length; return this; } /** * @param blockSizeBytes the file block size (in bytes) to use * @return the file information */ public FileInfo setBlockSizeBytes(long blockSizeBytes) { mBlockSizeBytes = blockSizeBytes; return this; } /** * @param creationTimeMs the file creation time (in milliseconds) to use * @return the file information */ public FileInfo setCreationTimeMs(long creationTimeMs) { mCreationTimeMs = creationTimeMs; return this; } /** * @param completed the completed flag value to use * @return the file information */ public FileInfo setCompleted(boolean completed) { mCompleted = completed; return this; } /** * @param folder the folder flag value to use * @return the file information */ public FileInfo setFolder(boolean folder) { mFolder = folder; return this; } /** * @param pinned the pinned flag value to use * @return the file information */ public FileInfo setPinned(boolean pinned) { mPinned = pinned; return this; } /** * @param cacheable the cacheable flag value to use * @return the file information */ public FileInfo setCacheable(boolean cacheable) { mCacheable = cacheable; return this; } /** * @param persisted the persisted flag value to use * @return the file information */ public FileInfo setPersisted(boolean persisted) { mPersisted = persisted; return this; } /** * @param blockIds the file block ids to use * @return the file information */ public FileInfo setBlockIds(List<Long> blockIds) { Preconditions.checkNotNull(blockIds, "blockIds"); mBlockIds = new ArrayList<>(blockIds); return this; } /** * @param inMemoryPercentage the file in memory percentage to use * @return the file information */ public FileInfo setInMemoryPercentage(int inMemoryPercentage) { mInMemoryPercentage = inMemoryPercentage; return this; } /** * @param inAlluxioPercentage the file in alluxio percentage to use * @return the file information */ public FileInfo setInAlluxioPercentage(int inAlluxioPercentage) { mInAlluxioPercentage = inAlluxioPercentage; return this; } /** * @param lastModificationTimeMs the last modification time (in milliseconds) to use * @return the file information */ public FileInfo setLastModificationTimeMs(long lastModificationTimeMs) { mLastModificationTimeMs = lastModificationTimeMs; return this; } /** * @param ttl the file time-to-live (in seconds) to use * @return the file information */ public FileInfo setTtl(long ttl) { mTtl = ttl; return this; } /** * @param ttlAction the {@link TtlAction} to use * @return the updated options object */ public FileInfo setTtlAction(TtlAction ttlAction) { mTtlAction = ttlAction; return this; } /** * @param owner the file owner * @return the file information */ public FileInfo setOwner(String owner) { Preconditions.checkNotNull(owner, "owner"); mOwner = owner; return this; } /** * @param group the file group * @return the file information */ public FileInfo setGroup(String group) { Preconditions.checkNotNull(group, "group"); mGroup = group; return this; } /** * @param mode the file mode bits * @return the file information */ public FileInfo setMode(int mode) { mMode = mode; return this; } /** * @param persistenceState the file persistence state to use * @return the file information */ public FileInfo setPersistenceState(String persistenceState) { Preconditions.checkNotNull(persistenceState, "persistenceState"); mPersistenceState = persistenceState; return this; } /** * @param mountPoint the mount point flag value to use * @return the file information */ public FileInfo setMountPoint(boolean mountPoint) { mMountPoint = mountPoint; return this; } /** * @param fileBlockInfos the file block descriptors to use * @return the file information */ public FileInfo setFileBlockInfos(List<FileBlockInfo> fileBlockInfos) { mFileBlockInfos = new ArrayList<>(fileBlockInfos); return this; } /** * @param mountId the id of mount * @return the file information */ public FileInfo setMountId(long mountId) { mMountId = mountId; return this; } /** * @param ufsFingerprint the ufs fingerprint to use * @return the file information */ public FileInfo setUfsFingerprint(String ufsFingerprint) { mUfsFingerprint = ufsFingerprint; return this; } /** * @return thrift representation of the file information */ protected alluxio.thrift.FileInfo toThrift() { List<alluxio.thrift.FileBlockInfo> fileBlockInfos = new ArrayList<>(); for (FileBlockInfo fileBlockInfo : mFileBlockInfos) { fileBlockInfos.add(fileBlockInfo.toThrift()); } alluxio.thrift.FileInfo info = new alluxio.thrift.FileInfo(mFileId, mName, mPath, mUfsPath, mLength, mBlockSizeBytes, mCreationTimeMs, mCompleted, mFolder, mPinned, mCacheable, mPersisted, mBlockIds, mInMemoryPercentage, mLastModificationTimeMs, mTtl, mOwner, mGroup, mMode, mPersistenceState, mMountPoint, fileBlockInfos, ThriftUtils.toThrift(mTtlAction), mMountId, mInAlluxioPercentage, mUfsFingerprint); return info; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof FileInfo)) { return false; } FileInfo that = (FileInfo) o; return mFileId == that.mFileId && mName.equals(that.mName) && mPath.equals(that.mPath) && mUfsPath.equals(that.mUfsPath) && mLength == that.mLength && mBlockSizeBytes == that.mBlockSizeBytes && mCreationTimeMs == that.mCreationTimeMs && mCompleted == that.mCompleted && mFolder == that.mFolder && mPinned == that.mPinned && mCacheable == that.mCacheable && mPersisted == that.mPersisted && mBlockIds.equals(that.mBlockIds) && mInMemoryPercentage == that.mInMemoryPercentage && mLastModificationTimeMs == that.mLastModificationTimeMs && mTtl == that.mTtl && mOwner.equals(that.mOwner) && mGroup.equals(that.mGroup) && mMode == that.mMode && mPersistenceState.equals(that.mPersistenceState) && mMountPoint == that.mMountPoint && mFileBlockInfos.equals(that.mFileBlockInfos) && mTtlAction == that.mTtlAction && mMountId == that.mMountId && mInAlluxioPercentage == that.mInAlluxioPercentage && mUfsFingerprint.equals(that.mUfsFingerprint); } @Override public int hashCode() { return Objects.hashCode(mFileId, mName, mPath, mUfsPath, mLength, mBlockSizeBytes, mCreationTimeMs, mCompleted, mFolder, mPinned, mCacheable, mPersisted, mBlockIds, mInMemoryPercentage, mLastModificationTimeMs, mTtl, mOwner, mGroup, mMode, mPersistenceState, mMountPoint, mFileBlockInfos, mTtlAction, mInAlluxioPercentage, mUfsFingerprint); } @Override public String toString() { return Objects.toStringHelper(this).add("fileId", mFileId).add("name", mName).add("path", mPath) .add("ufsPath", mUfsPath).add("length", mLength).add("blockSizeBytes", mBlockSizeBytes) .add("creationTimeMs", mCreationTimeMs).add("completed", mCompleted).add("folder", mFolder) .add("pinned", mPinned).add("cacheable", mCacheable).add("persisted", mPersisted) .add("blockIds", mBlockIds).add("inMemoryPercentage", mInMemoryPercentage) .add("lastModificationTimesMs", mLastModificationTimeMs).add("ttl", mTtl) .add("ttlAction", mTtlAction).add("owner", mOwner).add("group", mGroup).add("mode", mMode) .add("persistenceState", mPersistenceState).add("mountPoint", mMountPoint) .add("fileBlockInfos", mFileBlockInfos) .add("mountId", mMountId).add("inAlluxioPercentage", mInAlluxioPercentage) .add("ufsFingerprint", mUfsFingerprint) .toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.communication.tcp; import java.net.BindException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.managers.communication.GridIoMessageFactory; import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.nio.GridNioRecoveryDescriptor; import org.apache.ignite.internal.util.nio.GridNioServer; import org.apache.ignite.internal.util.nio.GridNioSession; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.CO; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.lang.IgniteRunnable; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.spi.IgniteSpiAdapter; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.communication.CommunicationListener; import org.apache.ignite.spi.communication.CommunicationSpi; import org.apache.ignite.spi.communication.GridTestMessage; import org.apache.ignite.testframework.GridSpiTestContext; import org.apache.ignite.testframework.GridTestNode; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.GridTestKernalContext; import org.apache.ignite.testframework.junits.IgniteTestResources; import org.apache.ignite.testframework.junits.spi.GridSpiAbstractTest; import org.apache.ignite.testframework.junits.spi.GridSpiTest; import org.junit.Test; /** * */ @GridSpiTest(spi = TcpCommunicationSpi.class, group = "Communication SPI") public class IgniteTcpCommunicationRecoveryAckClosureSelfTest<T extends CommunicationSpi> extends GridSpiAbstractTest<T> { /** */ private static final Collection<IgniteTestResources> spiRsrcs = new ArrayList<>(); /** */ protected static final List<TcpCommunicationSpi> spis = new ArrayList<>(); /** */ protected static final List<ClusterNode> nodes = new ArrayList<>(); /** */ private static final int SPI_CNT = 2; /** */ private static GridTimeoutProcessor timeoutProcessor; /** * */ static { GridIoMessageFactory.registerCustom(GridTestMessage.DIRECT_TYPE, new CO<Message>() { @Override public Message apply() { return new GridTestMessage(); } }); } /** * Disable SPI auto-start. */ public IgniteTcpCommunicationRecoveryAckClosureSelfTest() { super(false); } /** */ private class TestListener implements CommunicationListener<Message> { /** */ private GridConcurrentHashSet<Long> msgIds = new GridConcurrentHashSet<>(); /** */ private AtomicInteger rcvCnt = new AtomicInteger(); /** {@inheritDoc} */ @Override public void onMessage(UUID nodeId, Message msg, IgniteRunnable msgC) { assertTrue("Unexpected message: " + msg, msg instanceof GridTestMessage); GridTestMessage msg0 = (GridTestMessage)msg; assertTrue("Duplicated message received: " + msg0, msgIds.add(msg0.getMsgId())); rcvCnt.incrementAndGet(); msgC.run(); } /** {@inheritDoc} */ @Override public void onDisconnected(UUID nodeId) { // No-op. } } /** * @throws Exception If failed. */ @Test public void testAckOnIdle() throws Exception { checkAck(10, 2000, 9); } /** * @throws Exception If failed. */ @Test public void testAckOnCount() throws Exception { checkAck(10, 60_000, 10); } /** * @param ackCnt Recovery acknowledgement count. * @param idleTimeout Idle connection timeout. * @param msgPerIter Messages per iteration. * @throws Exception If failed. */ private void checkAck(int ackCnt, int idleTimeout, int msgPerIter) throws Exception { createSpis(ackCnt, idleTimeout, TcpCommunicationSpi.DFLT_MSG_QUEUE_LIMIT); try { TcpCommunicationSpi spi0 = spis.get(0); TcpCommunicationSpi spi1 = spis.get(1); ClusterNode node0 = nodes.get(0); ClusterNode node1 = nodes.get(1); int msgId = 0; int expMsgs = 0; long totAcked = 0; for (int i = 0; i < 5; i++) { info("Iteration: " + i); final AtomicInteger ackMsgs = new AtomicInteger(0); IgniteInClosure<IgniteException> ackC = new CI1<IgniteException>() { @Override public void apply(IgniteException o) { assert o == null; ackMsgs.incrementAndGet(); } }; for (int j = 0; j < msgPerIter; j++) { spi0.sendMessage(node1, new GridTestMessage(node0.id(), ++msgId, 0), ackC); spi1.sendMessage(node0, new GridTestMessage(node1.id(), ++msgId, 0), ackC); if (j == 0) { final TestListener lsnr0 = (TestListener)spi0.getListener(); final TestListener lsnr1 = (TestListener)spi1.getListener(); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return lsnr0.rcvCnt.get() >= 1 && lsnr1.rcvCnt.get() >= 1; } }, 1000); } } expMsgs += msgPerIter; final long totAcked0 = totAcked; for (TcpCommunicationSpi spi : spis) { GridNioServer srv = U.field(spi, "nioSrvr"); Collection<? extends GridNioSession> sessions = GridTestUtils.getFieldValue(srv, "sessions"); assertFalse(sessions.isEmpty()); boolean found = false; for (GridNioSession ses : sessions) { final GridNioRecoveryDescriptor recoveryDesc = ses.outRecoveryDescriptor(); if (recoveryDesc != null) { found = true; GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { long acked = GridTestUtils.getFieldValue(recoveryDesc, "acked"); return acked > totAcked0; } }, 5000); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return recoveryDesc.messagesRequests().isEmpty(); } }, 10_000); assertEquals("Unexpected messages: " + recoveryDesc.messagesRequests(), 0, recoveryDesc.messagesRequests().size()); break; } } assertTrue(found); } final int expMsgs0 = expMsgs; for (TcpCommunicationSpi spi : spis) { final TestListener lsnr = (TestListener)spi.getListener(); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return lsnr.rcvCnt.get() >= expMsgs0; } }, 5000); assertEquals(expMsgs, lsnr.rcvCnt.get()); } assertEquals(msgPerIter * 2, ackMsgs.get()); totAcked += msgPerIter; } } finally { stopSpis(); } } /** * @throws Exception If failed. */ @Test public void testQueueOverflow() throws Exception { for (int i = 0; i < 3; i++) { try { startSpis(5, 60_000, 10); checkOverflow(); break; } catch (IgniteCheckedException e) { if (e.hasCause(BindException.class)) { if (i < 2) { info("Got exception caused by BindException, will retry after delay: " + e); stopSpis(); U.sleep(10_000); } else throw e; } else throw e; } finally { stopSpis(); } } } /** * @throws Exception If failed. */ private void checkOverflow() throws Exception { TcpCommunicationSpi spi0 = spis.get(0); TcpCommunicationSpi spi1 = spis.get(1); ClusterNode node0 = nodes.get(0); ClusterNode node1 = nodes.get(1); // Await time to close the session by queue overflow. final int awaitTime = 5_000; // Check that session will not be closed by idle timeout because expected close by queue overflow. assertTrue(spi0.getIdleConnectionTimeout() > awaitTime); final GridNioServer srv1 = U.field(spi1, "nioSrvr"); // For prevent session close by write timeout. srv1.writeTimeout(60_000); final AtomicInteger ackMsgs = new AtomicInteger(0); IgniteInClosure<IgniteException> ackC = new CI1<IgniteException>() { @Override public void apply(IgniteException o) { assert o == null; ackMsgs.incrementAndGet(); } }; int msgId = 0; // Send message to establish connection. spi0.sendMessage(node1, new GridTestMessage(node0.id(), ++msgId, 0), ackC); int sentMsgs = 1; // Prevent node1 from send GridTestUtils.setFieldValue(srv1, "skipWrite", true); final GridNioSession ses0 = communicationSession(spi0); int queueLimit = ses0.outRecoveryDescriptor().queueLimit(); for (int i = sentMsgs; i < queueLimit; i++) { try { spi0.sendMessage(node1, new GridTestMessage(node0.id(), ++msgId, 0), ackC); sentMsgs++; } catch (IgniteSpiException e) { log.info("Send error [err=" + e + ", sentMsgs=" + sentMsgs + ']'); break; } } // Wait when session is closed because of queue overflow. GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return ses0.closeTime() != 0; } }, awaitTime); assertTrue("Failed to wait for session close", ses0.closeTime() != 0); GridTestUtils.setFieldValue(srv1, "skipWrite", false); // It to gain all acks since acks have batch nature. int cnt = 100 - sentMsgs % spi0.getAckSendThreshold(); for (int i = 0; i < cnt; i++) spi0.sendMessage(node1, new GridTestMessage(node0.id(), ++msgId, 0), ackC); final int expMsgs = sentMsgs + cnt; final TestListener lsnr = (TestListener)spi1.getListener(); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return lsnr.rcvCnt.get() >= expMsgs; } }, 5000); assertEquals(expMsgs, lsnr.rcvCnt.get()); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return expMsgs == ackMsgs.get(); } }, 5000); assertEquals(expMsgs, ackMsgs.get()); } /** * @param spi SPI. * @return Session. * @throws Exception If failed. */ private GridNioSession communicationSession(TcpCommunicationSpi spi) throws Exception { final GridNioServer srv = U.field(spi, "nioSrvr"); GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { Collection<? extends GridNioSession> sessions = GridTestUtils.getFieldValue(srv, "sessions"); return !sessions.isEmpty(); } }, 5000); Collection<? extends GridNioSession> sessions = GridTestUtils.getFieldValue(srv, "sessions"); assertEquals(1, sessions.size()); return sessions.iterator().next(); } /** * @param ackCnt Recovery acknowledgement count. * @param idleTimeout Idle connection timeout. * @param queueLimit Message queue limit. * @return SPI instance. */ protected TcpCommunicationSpi getSpi(int ackCnt, int idleTimeout, int queueLimit) { TcpCommunicationSpi spi = new TcpCommunicationSpi(); spi.setLocalPort(GridTestUtils.getNextCommPort(getClass())); spi.setIdleConnectionTimeout(idleTimeout); spi.setTcpNoDelay(true); spi.setAckSendThreshold(ackCnt); spi.setMessageQueueLimit(queueLimit); spi.setSharedMemoryPort(-1); spi.setConnectionsPerNode(1); return spi; } /** * @param ackCnt Recovery acknowledgement count. * @param idleTimeout Idle connection timeout. * @param queueLimit Message queue limit. * @throws Exception If failed. */ private void startSpis(int ackCnt, int idleTimeout, int queueLimit) throws Exception { spis.clear(); nodes.clear(); spiRsrcs.clear(); Map<ClusterNode, GridSpiTestContext> ctxs = new HashMap<>(); timeoutProcessor = new GridTimeoutProcessor(new GridTestKernalContext(log)); timeoutProcessor.start(); timeoutProcessor.onKernalStart(true); for (int i = 0; i < SPI_CNT; i++) { TcpCommunicationSpi spi = getSpi(ackCnt, idleTimeout, queueLimit); GridTestUtils.setFieldValue(spi, IgniteSpiAdapter.class, "igniteInstanceName", "grid-" + i); IgniteTestResources rsrcs = new IgniteTestResources(); GridTestNode node = new GridTestNode(rsrcs.getNodeId()); GridSpiTestContext ctx = initSpiContext(); ctx.setLocalNode(node); ctx.timeoutProcessor(timeoutProcessor); spiRsrcs.add(rsrcs); rsrcs.inject(spi); spi.setListener(new TestListener()); node.setAttributes(spi.getNodeAttributes()); node.order(i); nodes.add(node); spi.spiStart(getTestIgniteInstanceName() + (i + 1)); spis.add(spi); spi.onContextInitialized(ctx); ctxs.put(node, ctx); } // For each context set remote nodes. for (Map.Entry<ClusterNode, GridSpiTestContext> e : ctxs.entrySet()) { for (ClusterNode n : nodes) { if (!n.equals(e.getKey())) e.getValue().remoteNodes().add(n); } } } /** * @param ackCnt Recovery acknowledgement count. * @param idleTimeout Idle connection timeout. * @param queueLimit Message queue limit. * @throws Exception If failed. */ private void createSpis(int ackCnt, int idleTimeout, int queueLimit) throws Exception { for (int i = 0; i < 3; i++) { try { startSpis(ackCnt, idleTimeout, queueLimit); break; } catch (IgniteCheckedException e) { if (e.hasCause(BindException.class)) { if (i < 2) { info("Failed to start SPIs because of BindException, will retry after delay."); stopSpis(); U.sleep(10_000); } else throw e; } else throw e; } } } /** * @throws Exception If failed. */ private void stopSpis() throws Exception { if (timeoutProcessor != null) { timeoutProcessor.onKernalStop(true); timeoutProcessor.stop(true); timeoutProcessor = null; } for (CommunicationSpi<Message> spi : spis) { spi.onContextDestroyed(); spi.setListener(null); spi.spiStop(); } for (IgniteTestResources rsrcs : spiRsrcs) rsrcs.stopThreads(); spis.clear(); nodes.clear(); spiRsrcs.clear(); } }
/* * Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is * distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either * express or implied. See the License for the specific language * governing * permissions and limitations under the License. */ package com.amazonaws.metrics; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.amazonaws.util.AWSRequestMetrics.Field; import java.util.Arrays; import java.util.Collections; import java.util.Set; import org.junit.Assert; import org.junit.Test; public class AwsSdkMetricsTest { /** * By default the AWS SDK metric collection is disabled. Enabling it should * fail unless the necessary CloudWatch related jars are on the classpath. * Therefore, this test is expected to fail in enabling the default metric * collection, but have absolutely no impact otherwise. */ @Test public void enableDefaultMetrics() { Assert.assertFalse(AwsSdkMetrics.enableDefaultMetrics()); } @Test public void test() { // by default, it's disabled assertFalse(AwsSdkMetrics.isDefaultMetricsEnabled()); // won't be anble to enable unless the default impl library is on the classpath assertFalse(AwsSdkMetrics.enableDefaultMetrics()); assertFalse(AwsSdkMetrics.isDefaultMetricsEnabled()); assertSame(RequestMetricCollector.NONE, AwsSdkMetrics.getRequestMetricCollector()); assertFalse(AwsSdkMetrics.isDefaultMetricsEnabled()); // effectively no effect AwsSdkMetrics.disableMetrics(); assertFalse(AwsSdkMetrics.isDefaultMetricsEnabled()); } @Test public void defaultMetricTypes() { // Default set of predefined metric types is not empty Set<MetricType> set = AwsSdkMetrics.getPredefinedMetrics(); assertNotNull(set); assertTrue(set.size() > 0); // Clear out the default set of predefined metric types AwsSdkMetrics.set(Collections.<MetricType>emptyList()); Set<MetricType> empty = AwsSdkMetrics.getPredefinedMetrics(); assertNotNull(empty); assertTrue(empty.size() == 0); // Reconfigure the default set of predefined metric types back to the original AwsSdkMetrics.set(set); Set<MetricType> set2 = AwsSdkMetrics.getPredefinedMetrics(); assertNotNull(set2); assertTrue(set2.size() > 0); // Not the same due to ensuring thread-safety assertNotSame(set, set2); } @Test public void setNullOrEmpty() { Set<MetricType> orig = AwsSdkMetrics.getPredefinedMetrics(); assertTrue(orig.size() > 0); AwsSdkMetrics.set(null); Set<MetricType> empty = AwsSdkMetrics.getPredefinedMetrics(); assertTrue(empty.size() == 0); AwsSdkMetrics.set(null); Set<MetricType> stillEmpty = AwsSdkMetrics.getPredefinedMetrics(); assertSame(empty, stillEmpty); AwsSdkMetrics.set(Collections.<MetricType>emptySet()); Set<MetricType> empty3 = AwsSdkMetrics.getPredefinedMetrics(); assertSame(empty, empty3); AwsSdkMetrics.set(orig); } @Test public void addNull() { assertFalse(AwsSdkMetrics.add(null)); } @Test public void addAllNull() { assertFalse(AwsSdkMetrics.addAll(null)); assertFalse(AwsSdkMetrics.addAll(Collections.<MetricType>emptyList())); } @Test public void removeNull() { assertFalse(AwsSdkMetrics.remove(null)); } @Test public void addAndRemove() { Set<MetricType> orig = AwsSdkMetrics.getPredefinedMetrics(); AwsSdkMetrics.set(null); // Test add and remove assertTrue(AwsSdkMetrics.getPredefinedMetrics().isEmpty()); AwsSdkMetrics.add(Field.ClientExecuteTime); assertFalse(AwsSdkMetrics.getPredefinedMetrics().isEmpty()); AwsSdkMetrics.remove(Field.ClientExecuteTime); assertTrue(AwsSdkMetrics.getPredefinedMetrics().isEmpty()); // Test add more than one entry AwsSdkMetrics.add(Field.ClientExecuteTime); AwsSdkMetrics.add(Field.Exception); assertTrue(AwsSdkMetrics.getPredefinedMetrics().size() == 2); AwsSdkMetrics.set(null); assertTrue(AwsSdkMetrics.getPredefinedMetrics().isEmpty()); // Test addAll AwsSdkMetrics.addAll(Arrays.asList(Field.Exception, Field.ClientExecuteTime)); assertTrue(AwsSdkMetrics.getPredefinedMetrics().size() == 2); AwsSdkMetrics.set(orig); assertTrue(AwsSdkMetrics.getPredefinedMetrics().size() == orig.size()); } @Test public void setJvmMetricsExcluded() { final boolean b = AwsSdkMetrics.isMachineMetricExcluded(); AwsSdkMetrics.setMachineMetricsExcluded(b); assertTrue(b == AwsSdkMetrics.isMachineMetricExcluded()); AwsSdkMetrics.setMachineMetricsExcluded(!b); assertFalse(b == AwsSdkMetrics.isMachineMetricExcluded()); AwsSdkMetrics.setMachineMetricsExcluded(b); assertTrue(b == AwsSdkMetrics.isMachineMetricExcluded()); } @Test public void setPerHostMetricsIncluded() { final boolean b = AwsSdkMetrics.isPerHostMetricIncluded(); AwsSdkMetrics.setPerHostMetricsIncluded(b); assertTrue(b == AwsSdkMetrics.isPerHostMetricIncluded()); AwsSdkMetrics.setPerHostMetricsIncluded(!b); assertFalse(b == AwsSdkMetrics.isPerHostMetricIncluded()); AwsSdkMetrics.setPerHostMetricsIncluded(b); assertTrue(b == AwsSdkMetrics.isPerHostMetricIncluded()); } @Test public void testEnableHttpSocketReadMetric() { assertFalse(AwsSdkMetrics.isHttpSocketReadMetricEnabled()); AwsSdkMetrics.enableHttpSocketReadMetric(); assertTrue(AwsSdkMetrics.isHttpSocketReadMetricEnabled()); } @Test public void isMetricEnabled() { // originally disabled assertFalse(AwsSdkMetrics.isMetricsEnabled()); // set to NONE, so still disabled AwsSdkMetrics.setMetricCollector(MetricCollector.NONE); assertFalse(AwsSdkMetrics.isMetricsEnabled()); // set to a custom collector, so now considered enabled AwsSdkMetrics.setMetricCollector(new MetricCollector() { @Override public boolean start() { return true; } @Override public boolean stop() { return false; } @Override public boolean isEnabled() { return true; } @Override public RequestMetricCollector getRequestMetricCollector() { return RequestMetricCollector.NONE; } @Override public ServiceMetricCollector getServiceMetricCollector() { return ServiceMetricCollector.NONE; } }); assertTrue(AwsSdkMetrics.isMetricsEnabled()); } @Test public void setRegion_WithoutRegionsEnum() { AwsSdkMetrics.setRegion("us-east-1"); assertTrue(AwsSdkMetrics.getRegionName().equals("us-east-1")); } @Test public void setRegion_WithNonStandardRegion() { AwsSdkMetrics.setRegion("us-east-9"); assertTrue(AwsSdkMetrics.getRegionName().equals("us-east-9")); } @Test public void setRegions_WhenRegionDoesNotExist_DefaultsToAwsPartition() { AwsSdkMetrics.setRegion("non-existent-region"); assertEquals("non-existent-region", AwsSdkMetrics.getRegionName()); } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.facet.Facets; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; import static org.elasticsearch.search.internal.InternalSearchResponse.readInternalSearchResponse; /** * A response of a search request. */ public class SearchResponse extends ActionResponse implements ToXContent { private InternalSearchResponse internalResponse; private String scrollId; private int totalShards; private int successfulShards; private ShardSearchFailure[] shardFailures; private long tookInMillis; public SearchResponse() { } public SearchResponse(InternalSearchResponse internalResponse, String scrollId, int totalShards, int successfulShards, long tookInMillis, ShardSearchFailure[] shardFailures) { this.internalResponse = internalResponse; this.scrollId = scrollId; this.totalShards = totalShards; this.successfulShards = successfulShards; this.tookInMillis = tookInMillis; this.shardFailures = shardFailures; } public RestStatus status() { if (shardFailures.length == 0) { return RestStatus.OK; } if (successfulShards == 0 && totalShards > 0) { RestStatus status = shardFailures[0].status(); if (shardFailures.length > 1) { for (int i = 1; i < shardFailures.length; i++) { if (shardFailures[i].status().getStatus() >= 500) { status = shardFailures[i].status(); } } } return status; } return RestStatus.OK; } /** * The search hits. */ public SearchHits getHits() { return internalResponse.hits(); } /** * The search facets. */ public Facets getFacets() { return internalResponse.facets(); } public Suggest getSuggest() { return internalResponse.suggest(); } /** * Has the search operation timed out. */ public boolean isTimedOut() { return internalResponse.timedOut(); } /** * How long the search took. */ public TimeValue getTook() { return new TimeValue(tookInMillis); } /** * How long the search took in milliseconds. */ public long getTookInMillis() { return tookInMillis; } /** * The total number of shards the search was executed on. */ public int getTotalShards() { return totalShards; } /** * The successful number of shards the search was executed on. */ public int getSuccessfulShards() { return successfulShards; } /** * The failed number of shards the search was executed on. */ public int getFailedShards() { return totalShards - successfulShards; } /** * The failures that occurred during the search. */ public ShardSearchFailure[] getShardFailures() { return this.shardFailures; } /** * If scrolling was enabled ({@link SearchRequest#scroll(org.elasticsearch.search.Scroll)}, the * scroll id that can be used to continue scrolling. */ public String getScrollId() { return scrollId; } static final class Fields { static final XContentBuilderString _SCROLL_ID = new XContentBuilderString("_scroll_id"); static final XContentBuilderString _SHARDS = new XContentBuilderString("_shards"); static final XContentBuilderString TOTAL = new XContentBuilderString("total"); static final XContentBuilderString SUCCESSFUL = new XContentBuilderString("successful"); static final XContentBuilderString FAILED = new XContentBuilderString("failed"); static final XContentBuilderString FAILURES = new XContentBuilderString("failures"); static final XContentBuilderString STATUS = new XContentBuilderString("status"); static final XContentBuilderString INDEX = new XContentBuilderString("index"); static final XContentBuilderString SHARD = new XContentBuilderString("shard"); static final XContentBuilderString REASON = new XContentBuilderString("reason"); static final XContentBuilderString TOOK = new XContentBuilderString("took"); static final XContentBuilderString TIMED_OUT = new XContentBuilderString("timed_out"); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (scrollId != null) { builder.field(Fields._SCROLL_ID, scrollId); } builder.field(Fields.TOOK, tookInMillis); builder.field(Fields.TIMED_OUT, isTimedOut()); builder.startObject(Fields._SHARDS); builder.field(Fields.TOTAL, getTotalShards()); builder.field(Fields.SUCCESSFUL, getSuccessfulShards()); builder.field(Fields.FAILED, getFailedShards()); if (shardFailures.length > 0) { builder.startArray(Fields.FAILURES); for (ShardSearchFailure shardFailure : shardFailures) { builder.startObject(); if (shardFailure.shard() != null) { builder.field(Fields.INDEX, shardFailure.shard().index()); builder.field(Fields.SHARD, shardFailure.shard().shardId()); } builder.field(Fields.STATUS, shardFailure.status().getStatus()); builder.field(Fields.REASON, shardFailure.reason()); builder.endObject(); } builder.endArray(); } builder.endObject(); internalResponse.toXContent(builder, params); return builder; } public static SearchResponse readSearchResponse(StreamInput in) throws IOException { SearchResponse response = new SearchResponse(); response.readFrom(in); return response; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); internalResponse = readInternalSearchResponse(in); totalShards = in.readVInt(); successfulShards = in.readVInt(); int size = in.readVInt(); if (size == 0) { shardFailures = ShardSearchFailure.EMPTY_ARRAY; } else { shardFailures = new ShardSearchFailure[size]; for (int i = 0; i < shardFailures.length; i++) { shardFailures[i] = readShardSearchFailure(in); } } scrollId = in.readOptionalString(); tookInMillis = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); internalResponse.writeTo(out); out.writeVInt(totalShards); out.writeVInt(successfulShards); out.writeVInt(shardFailures.length); for (ShardSearchFailure shardSearchFailure : shardFailures) { shardSearchFailure.writeTo(out); } out.writeOptionalString(scrollId); out.writeVLong(tookInMillis); } @Override public String toString() { try { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); return builder.string(); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.reflection; import groovy.lang.MetaClassImpl; import groovy.lang.MetaMethod; import groovy.lang.MissingMethodException; import org.codehaus.groovy.classgen.asm.BytecodeHelper; import org.codehaus.groovy.runtime.InvokerInvocationException; import org.codehaus.groovy.runtime.callsite.CallSite; import org.codehaus.groovy.runtime.callsite.CallSiteGenerator; import org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite; import org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite; import org.codehaus.groovy.runtime.callsite.StaticMetaMethodSite; import org.codehaus.groovy.runtime.metaclass.MethodHelper; import java.lang.annotation.Annotation; import java.lang.ref.SoftReference; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; @SuppressWarnings("rawtypes") public class CachedMethod extends MetaMethod implements Comparable { public static final CachedMethod[] EMPTY_ARRAY = new CachedMethod[0]; public static CachedMethod find(final Method method) { CachedMethod[] methods = ReflectionCache.getCachedClass(method.getDeclaringClass()).getMethods(); int i = Arrays.binarySearch(methods, method, (o1, o2) -> { if (o1 instanceof CachedMethod) { return ((CachedMethod) o1).compareTo(o2); } else if (o2 instanceof CachedMethod) { return -((CachedMethod) o2).compareTo(o1); } // really, this should never happen, it's evidence of corruption if it does throw new ClassCastException("One of the two comparables must be a CachedMethod"); }); return (i < 0 ? null : methods[i]); } //-------------------------------------------------------------------------- public final CachedClass cachedClass; private final Method cachedMethod; private int hashCode; private boolean skipCompiled; private boolean accessAllowed; private boolean makeAccessibleDone; private CachedMethod transformedMethod; private SoftReference<Constructor<CallSite>> pogoCallSiteConstructor, pojoCallSiteConstructor, staticCallSiteConstructor; public CachedMethod(final CachedClass clazz, final Method method) { this.cachedMethod = method; this.cachedClass = clazz; } public CachedMethod(final Method method) { this(ReflectionCache.getCachedClass(method.getDeclaringClass()), method); } @Override public int compareTo(final Object other) { if (other == this) return 0; if (other == null) return -1; return (other instanceof CachedMethod ? compareToCachedMethod((CachedMethod) other) : compareToMethod((Method) other)); } private int compareToCachedMethod(final CachedMethod other) { int strComp = getName().compareTo(other.getName()); if (strComp != 0) return strComp; int retComp = getReturnType().getName().compareTo(other.getReturnType().getName()); if (retComp != 0) return retComp; CachedClass[] params = getParameterTypes(); CachedClass[] otherParams = other.getParameterTypes(); int pd = params.length - otherParams.length; if (pd != 0) return pd; for (int i = 0, n = params.length; i < n; i += 1) { final int nameComp = params[i].getName().compareTo(otherParams[i].getName()); if (nameComp != 0) return nameComp; } final int classComp = cachedClass.toString().compareTo(other.getDeclaringClass().toString()); if (classComp != 0) return classComp; throw new RuntimeException("Should never happen"); } private int compareToMethod(final Method other) { int strComp = getName().compareTo(other.getName()); if (strComp != 0) return strComp; int retComp = getReturnType().getName().compareTo(other.getReturnType().getName()); if (retComp != 0) return retComp; CachedClass[] params = getParameterTypes(); Class<?>[] mparams = other.getParameterTypes(); int pd = params.length - mparams.length; if (pd != 0) return pd; for (int i = 0, n = params.length; i < n; i += 1) { final int nameComp = params[i].getName().compareTo(mparams[i].getName()); if (nameComp != 0) return nameComp; } return 0; } @Override public boolean equals(final Object other) { return (other instanceof CachedMethod && cachedMethod.equals(((CachedMethod) other).cachedMethod)) || (other instanceof Method && cachedMethod.equals(other)); } @Override public int hashCode() { if (hashCode == 0) { hashCode = cachedMethod.hashCode(); if (hashCode == 0) hashCode = 0xcafebebe; } return hashCode; } @Override public String toString() { return cachedMethod.toString(); } //-------------------------------------------------------------------------- public boolean canAccessLegally(final Class<?> callerClass) { return ReflectionUtils.checkAccessible(callerClass, cachedMethod.getDeclaringClass(), cachedMethod.getModifiers(), false); } public <T extends Annotation> T getAnnotation(final Class<T> annotationClass) { return cachedMethod.getAnnotation(annotationClass); } public Method getCachedMethod() { makeAccessibleIfNecessary(); if (!accessAllowed) { AccessPermissionChecker.checkAccessPermission(cachedMethod); accessAllowed = true; } return cachedMethod; } @Override public CachedClass getDeclaringClass() { return cachedClass; } @Override public String getDescriptor() { return BytecodeHelper.getMethodDescriptor(getReturnType(), getNativeParameterTypes()); } @Override public int getModifiers() { return cachedMethod.getModifiers(); } @Override public String getName() { return cachedMethod.getName(); } public int getParamsCount() { return getParameterTypes().length; } public ParameterTypes getParamTypes() { return null; } @Override public Class[] getPT() { return cachedMethod.getParameterTypes(); } @Override public Class getReturnType() { return cachedMethod.getReturnType(); } @Override public String getSignature() { return getName() + getDescriptor(); } public CachedMethod getTransformedMethod() { return transformedMethod; } public void setTransformedMethod(final CachedMethod transformedMethod) { this.transformedMethod = transformedMethod; } @Override public boolean isStatic() { return MethodHelper.isStatic(cachedMethod); } public boolean isSynthetic() { return cachedMethod.isSynthetic(); } //-------------------------------------------------------------------------- public CallSite createPogoMetaMethodSite(final CallSite site, final MetaClassImpl metaClass, final Class[] params) { if (!skipCompiled) { Constructor<CallSite> ctor = deref(pogoCallSiteConstructor); if (ctor == null) { if (CallSiteGenerator.isCompilable(this)) { ctor = CallSiteGenerator.compilePogoMethod(this); } if (ctor != null) { pogoCallSiteConstructor = new SoftReference<>(ctor); } else { skipCompiled = true; } } if (ctor != null) { try { return ctor.newInstance(site, metaClass, this, params, ctor); } catch (Error e) { skipCompiled = true; throw e; } catch (Throwable e) { skipCompiled = true; } } } return new PogoMetaMethodSite.PogoCachedMethodSiteNoUnwrapNoCoerce(site, metaClass, this, params); } public CallSite createPojoMetaMethodSite(final CallSite site, final MetaClassImpl metaClass, final Class[] params) { if (!skipCompiled) { Constructor<CallSite> ctor = deref(pojoCallSiteConstructor); if (ctor == null) { if (CallSiteGenerator.isCompilable(this)) { ctor = CallSiteGenerator.compilePojoMethod(this); } if (ctor != null) { pojoCallSiteConstructor = new SoftReference<>(ctor); } else { skipCompiled = true; } } if (ctor != null) { try { return ctor.newInstance(site, metaClass, this, params, ctor); } catch (Error e) { skipCompiled = true; throw e; } catch (Throwable e) { skipCompiled = true; } } } return new PojoMetaMethodSite.PojoCachedMethodSiteNoUnwrapNoCoerce(site, metaClass, this, params); } public CallSite createStaticMetaMethodSite(final CallSite site, final MetaClassImpl metaClass, final Class[] params) { if (!skipCompiled) { Constructor<CallSite> ctor = deref(staticCallSiteConstructor); if (ctor == null) { if (CallSiteGenerator.isCompilable(this)) { ctor = CallSiteGenerator.compileStaticMethod(this); } if (ctor != null) { staticCallSiteConstructor = new SoftReference<>(ctor); } else { skipCompiled = true; } } if (ctor != null) { try { return ctor.newInstance(site, metaClass, this, params, ctor); } catch (Error e) { skipCompiled = true; throw e; } catch (Throwable e) { skipCompiled = true; } } } return new StaticMetaMethodSite.StaticMetaMethodSiteNoUnwrapNoCoerce(site, metaClass, this, params); } private static <T> Constructor<T> deref(final SoftReference<Constructor<T>> ref) { return (ref != null ? ref.get() : null); } @Override public final Object invoke(final Object object, final Object[] arguments) { makeAccessibleIfNecessary(); if (!accessAllowed) { try { AccessPermissionChecker.checkAccessPermission(cachedMethod); accessAllowed = true; } catch (CacheAccessControlException ex) { throw new InvokerInvocationException(ex); } } try { return cachedMethod.invoke(object, arguments); } catch (IllegalArgumentException | IllegalAccessException e) { throw new InvokerInvocationException(e); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); throw (cause instanceof RuntimeException && !(cause instanceof MissingMethodException)) ? (RuntimeException) cause : new InvokerInvocationException(e); } } private void makeAccessibleIfNecessary() { if (!makeAccessibleDone) { ReflectionUtils.makeAccessibleInPrivilegedAction(cachedMethod); makeAccessibleDone = true; } } public final Method setAccessible() { return getCachedMethod(); } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package software.amazon.awssdk.services.route53.internal; import java.util.stream.Collectors; import software.amazon.awssdk.annotations.SdkInternalApi; import software.amazon.awssdk.core.SdkResponse; import software.amazon.awssdk.core.interceptor.Context; import software.amazon.awssdk.core.interceptor.ExecutionAttributes; import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; import software.amazon.awssdk.services.route53.model.ActivateKeySigningKeyResponse; import software.amazon.awssdk.services.route53.model.AliasTarget; import software.amazon.awssdk.services.route53.model.ChangeInfo; import software.amazon.awssdk.services.route53.model.ChangeResourceRecordSetsResponse; import software.amazon.awssdk.services.route53.model.CreateHealthCheckResponse; import software.amazon.awssdk.services.route53.model.CreateHostedZoneResponse; import software.amazon.awssdk.services.route53.model.CreateKeySigningKeyResponse; import software.amazon.awssdk.services.route53.model.CreateReusableDelegationSetResponse; import software.amazon.awssdk.services.route53.model.DeactivateKeySigningKeyResponse; import software.amazon.awssdk.services.route53.model.DelegationSet; import software.amazon.awssdk.services.route53.model.DeleteHostedZoneResponse; import software.amazon.awssdk.services.route53.model.DeleteKeySigningKeyResponse; import software.amazon.awssdk.services.route53.model.DisableHostedZoneDnssecResponse; import software.amazon.awssdk.services.route53.model.EnableHostedZoneDnssecResponse; import software.amazon.awssdk.services.route53.model.GetChangeResponse; import software.amazon.awssdk.services.route53.model.GetHealthCheckResponse; import software.amazon.awssdk.services.route53.model.GetHostedZoneResponse; import software.amazon.awssdk.services.route53.model.GetReusableDelegationSetResponse; import software.amazon.awssdk.services.route53.model.HealthCheck; import software.amazon.awssdk.services.route53.model.HostedZone; import software.amazon.awssdk.services.route53.model.ListHealthChecksResponse; import software.amazon.awssdk.services.route53.model.ListHostedZonesResponse; import software.amazon.awssdk.services.route53.model.ListResourceRecordSetsResponse; import software.amazon.awssdk.services.route53.model.ListReusableDelegationSetsResponse; import software.amazon.awssdk.services.route53.model.ResourceRecordSet; /** * Route 53 returns a portion of the URL resource path as the ID for a few * elements, but when the service accepts those IDs, the resource path portion * cannot be included, otherwise requests fail. This handler removes those * partial resource path elements from IDs returned by Route 53. */ @SdkInternalApi public final class Route53IdInterceptor implements ExecutionInterceptor { @Override public SdkResponse modifyResponse(Context.ModifyResponse context, ExecutionAttributes executionAttributes) { SdkResponse response = context.response(); if (response instanceof ChangeResourceRecordSetsResponse) { ChangeResourceRecordSetsResponse result = (ChangeResourceRecordSetsResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof CreateHostedZoneResponse) { CreateHostedZoneResponse result = (CreateHostedZoneResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .hostedZone(removePrefix(result.hostedZone())) .delegationSet(removePrefix(result.delegationSet())) .build(); } else if (response instanceof DeleteHostedZoneResponse) { DeleteHostedZoneResponse result = (DeleteHostedZoneResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof GetChangeResponse) { GetChangeResponse result = (GetChangeResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof GetHostedZoneResponse) { GetHostedZoneResponse result = (GetHostedZoneResponse) response; return result.toBuilder() .hostedZone(removePrefix(result.hostedZone())) .delegationSet(removePrefix(result.delegationSet())) .build(); } else if (response instanceof ListHostedZonesResponse) { ListHostedZonesResponse result = (ListHostedZonesResponse) response; return result.toBuilder() .hostedZones(result.hostedZones().stream() .map(this::removePrefix) .collect(Collectors.toList())) .build(); } else if (response instanceof ListResourceRecordSetsResponse) { ListResourceRecordSetsResponse result = (ListResourceRecordSetsResponse) response; return result.toBuilder() .resourceRecordSets(result.resourceRecordSets().stream() .map(this::removePrefix) .collect(Collectors.toList())) .build(); } else if (response instanceof CreateHealthCheckResponse) { CreateHealthCheckResponse result = (CreateHealthCheckResponse) response; return result.toBuilder() .healthCheck(removePrefix(result.healthCheck())) .build(); } else if (response instanceof GetHealthCheckResponse) { GetHealthCheckResponse result = (GetHealthCheckResponse) response; return result.toBuilder() .healthCheck(removePrefix(result.healthCheck())) .build(); } else if (response instanceof ListHealthChecksResponse) { ListHealthChecksResponse result = (ListHealthChecksResponse) response; return result.toBuilder() .healthChecks(result.healthChecks().stream() .map(this::removePrefix) .collect(Collectors.toList())) .build(); } else if (response instanceof CreateReusableDelegationSetResponse) { CreateReusableDelegationSetResponse result = (CreateReusableDelegationSetResponse) response; return result.toBuilder() .delegationSet(removePrefix(result.delegationSet())) .build(); } else if (response instanceof GetReusableDelegationSetResponse) { GetReusableDelegationSetResponse result = (GetReusableDelegationSetResponse) response; return result.toBuilder() .delegationSet(removePrefix(result.delegationSet())) .build(); } else if (response instanceof ListReusableDelegationSetsResponse) { ListReusableDelegationSetsResponse result = (ListReusableDelegationSetsResponse) response; return result.toBuilder() .delegationSets(result.delegationSets().stream() .map(this::removePrefix) .collect(Collectors.toList())) .build(); } else if (response instanceof CreateKeySigningKeyResponse) { CreateKeySigningKeyResponse result = (CreateKeySigningKeyResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof DeleteKeySigningKeyResponse) { DeleteKeySigningKeyResponse result = (DeleteKeySigningKeyResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof ActivateKeySigningKeyResponse) { ActivateKeySigningKeyResponse result = (ActivateKeySigningKeyResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof DeactivateKeySigningKeyResponse) { DeactivateKeySigningKeyResponse result = (DeactivateKeySigningKeyResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof EnableHostedZoneDnssecResponse) { EnableHostedZoneDnssecResponse result = (EnableHostedZoneDnssecResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } else if (response instanceof DisableHostedZoneDnssecResponse) { DisableHostedZoneDnssecResponse result = (DisableHostedZoneDnssecResponse) response; return result.toBuilder() .changeInfo(removePrefix(result.changeInfo())) .build(); } return response; } private ResourceRecordSet removePrefix(ResourceRecordSet rrset) { if (rrset == null) { return null; } return rrset.toBuilder() .aliasTarget(removePrefix(rrset.aliasTarget())) .healthCheckId(removePrefix(rrset.healthCheckId())) .setIdentifier(removePrefix(rrset.setIdentifier())) .build(); } private AliasTarget removePrefix(AliasTarget aliasTarget) { if (aliasTarget == null) { return null; } return aliasTarget.toBuilder() .hostedZoneId(removePrefix(aliasTarget.hostedZoneId())) .build(); } private ChangeInfo removePrefix(ChangeInfo changeInfo) { if (changeInfo == null) { return null; } return changeInfo.toBuilder() .id(removePrefix(changeInfo.id())) .build(); } private HostedZone removePrefix(HostedZone hostedZone) { if (hostedZone == null) { return null; } return hostedZone.toBuilder() .id(removePrefix(hostedZone.id())) .build(); } private HealthCheck removePrefix(HealthCheck healthCheck) { if (healthCheck == null) { return null; } return healthCheck.toBuilder() .id(removePrefix(healthCheck.id())) .build(); } private DelegationSet removePrefix(DelegationSet delegationSet) { if (delegationSet == null) { return null; } return delegationSet.toBuilder() .id(removePrefix(delegationSet.id())) .build(); } private String removePrefix(String s) { if (s == null) { return null; } int lastIndex = s.lastIndexOf('/'); if (lastIndex > 0) { return s.substring(lastIndex + 1); } return s; } }
/* * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apach3.http.client.utils; import java.net.URI; import java.net.URISyntaxException; import java.util.Stack; import org.apach3.http.annotation.Immutable; import org.apach3.http.HttpHost; /** * A collection of utilities for {@link URI URIs}, to workaround * bugs within the class or for ease-of-use features. * * @since 4.0 */ @Immutable public class URIUtils { /** * Constructs a {@link URI} using all the parameters. This should be * used instead of * {@link URI#URI(String, String, String, int, String, String, String)} * or any of the other URI multi-argument URI constructors. * * @param scheme * Scheme name * @param host * Host name * @param port * Port number * @param path * Path * @param query * Query * @param fragment * Fragment * * @throws URISyntaxException * If both a scheme and a path are given but the path is * relative, if the URI string constructed from the given * components violates RFC&nbsp;2396, or if the authority * component of the string is present but cannot be parsed * as a server-based authority * * @deprecated (4.2) use {@link URIBuilder}. */ @Deprecated public static URI createURI( final String scheme, final String host, int port, final String path, final String query, final String fragment) throws URISyntaxException { StringBuilder buffer = new StringBuilder(); if (host != null) { if (scheme != null) { buffer.append(scheme); buffer.append("://"); } buffer.append(host); if (port > 0) { buffer.append(':'); buffer.append(port); } } if (path == null || !path.startsWith("/")) { buffer.append('/'); } if (path != null) { buffer.append(path); } if (query != null) { buffer.append('?'); buffer.append(query); } if (fragment != null) { buffer.append('#'); buffer.append(fragment); } return new URI(buffer.toString()); } /** * A convenience method for creating a new {@link URI} whose scheme, host * and port are taken from the target host, but whose path, query and * fragment are taken from the existing URI. The fragment is only used if * dropFragment is false. * * @param uri * Contains the path, query and fragment to use. * @param target * Contains the scheme, host and port to use. * @param dropFragment * True if the fragment should not be copied. * * @throws URISyntaxException * If the resulting URI is invalid. */ public static URI rewriteURI( final URI uri, final HttpHost target, boolean dropFragment) throws URISyntaxException { if (uri == null) { throw new IllegalArgumentException("URI may not be null"); } URIBuilder uribuilder = new URIBuilder(uri); if (target != null) { uribuilder.setScheme(target.getSchemeName()); uribuilder.setHost(target.getHostName()); uribuilder.setPort(target.getPort()); } else { uribuilder.setScheme(null); uribuilder.setHost(null); uribuilder.setPort(-1); } if (dropFragment) { uribuilder.setFragment(null); } return uribuilder.build(); } /** * A convenience method for * {@link URIUtils#rewriteURI(URI, HttpHost, boolean)} that always keeps the * fragment. */ public static URI rewriteURI( final URI uri, final HttpHost target) throws URISyntaxException { return rewriteURI(uri, target, false); } /** * A convenience method that creates a new {@link URI} whose scheme, host, port, path, * query are taken from the existing URI, dropping any fragment or user-information. * The existing URI is returned unmodified if it has no fragment or user-information. * * @param uri * original URI. * @throws URISyntaxException * If the resulting URI is invalid. */ public static URI rewriteURI(final URI uri) throws URISyntaxException { if (uri == null) { throw new IllegalArgumentException("URI may not be null"); } if (uri.getFragment() != null || uri.getUserInfo() != null) { return new URIBuilder(uri).setFragment(null).setUserInfo(null).build(); } else { return uri; } } /** * Resolves a URI reference against a base URI. Work-around for bug in * java.net.URI (<http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4708535>) * * @param baseURI the base URI * @param reference the URI reference * @return the resulting URI */ public static URI resolve(final URI baseURI, final String reference) { return URIUtils.resolve(baseURI, URI.create(reference)); } /** * Resolves a URI reference against a base URI. Work-around for bugs in * java.net.URI (e.g. <http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4708535>) * * @param baseURI the base URI * @param reference the URI reference * @return the resulting URI */ public static URI resolve(final URI baseURI, URI reference){ if (baseURI == null) { throw new IllegalArgumentException("Base URI may nor be null"); } if (reference == null) { throw new IllegalArgumentException("Reference URI may nor be null"); } String s = reference.toString(); if (s.startsWith("?")) { return resolveReferenceStartingWithQueryString(baseURI, reference); } boolean emptyReference = s.length() == 0; if (emptyReference) { reference = URI.create("#"); } URI resolved = baseURI.resolve(reference); if (emptyReference) { String resolvedString = resolved.toString(); resolved = URI.create(resolvedString.substring(0, resolvedString.indexOf('#'))); } return removeDotSegments(resolved); } /** * Resolves a reference starting with a query string. * * @param baseURI the base URI * @param reference the URI reference starting with a query string * @return the resulting URI */ private static URI resolveReferenceStartingWithQueryString( final URI baseURI, final URI reference) { String baseUri = baseURI.toString(); baseUri = baseUri.indexOf('?') > -1 ? baseUri.substring(0, baseUri.indexOf('?')) : baseUri; return URI.create(baseUri + reference.toString()); } /** * Removes dot segments according to RFC 3986, section 5.2.4 * * @param uri the original URI * @return the URI without dot segments */ private static URI removeDotSegments(URI uri) { String path = uri.getPath(); if ((path == null) || (path.indexOf("/.") == -1)) { // No dot segments to remove return uri; } String[] inputSegments = path.split("/"); Stack<String> outputSegments = new Stack<String>(); for (int i = 0; i < inputSegments.length; i++) { if ((inputSegments[i].length() == 0) || (".".equals(inputSegments[i]))) { // Do nothing } else if ("..".equals(inputSegments[i])) { if (!outputSegments.isEmpty()) { outputSegments.pop(); } } else { outputSegments.push(inputSegments[i]); } } StringBuilder outputBuffer = new StringBuilder(); for (String outputSegment : outputSegments) { outputBuffer.append('/').append(outputSegment); } try { return new URI(uri.getScheme(), uri.getAuthority(), outputBuffer.toString(), uri.getQuery(), uri.getFragment()); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } } /** * Extracts target host from the given {@link URI}. * * @param uri * @return the target host if the URI is absolute or <code>null</null> if the URI is * relative or does not contain a valid host name. * * @since 4.1 */ public static HttpHost extractHost(final URI uri) { if (uri == null) { return null; } HttpHost target = null; if (uri.isAbsolute()) { int port = uri.getPort(); // may be overridden later String host = uri.getHost(); if (host == null) { // normal parse failed; let's do it ourselves // authority does not seem to care about the valid character-set for host names host = uri.getAuthority(); if (host != null) { // Strip off any leading user credentials int at = host.indexOf('@'); if (at >= 0) { if (host.length() > at+1 ) { host = host.substring(at+1); } else { host = null; // @ on its own } } // Extract the port suffix, if present if (host != null) { int colon = host.indexOf(':'); if (colon >= 0) { int pos = colon + 1; int len = 0; for (int i = pos; i < host.length(); i++) { if (Character.isDigit(host.charAt(i))) { len++; } else { break; } } if (len > 0) { try { port = Integer.parseInt(host.substring(pos, pos + len)); } catch (NumberFormatException ex) { } } host = host.substring(0, colon); } } } } String scheme = uri.getScheme(); if (host != null) { target = new HttpHost(host, port, scheme); } } return target; } /** * This class should not be instantiated. */ private URIUtils() { } }
package com.jiit.minor2.shubhamjoshi.box.signup; import android.app.AlertDialog; import android.app.Dialog; import android.app.FragmentTransaction; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.text.Html; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.widget.EditText; import android.widget.TextView; import com.facebook.AccessToken; import com.facebook.CallbackManager; import com.facebook.FacebookCallback; import com.facebook.FacebookException; import com.facebook.GraphRequest; import com.facebook.GraphResponse; import com.facebook.login.LoginResult; import com.facebook.login.widget.LoginButton; import com.firebase.client.AuthData; import com.firebase.client.Firebase; import com.firebase.client.FirebaseError; import com.jiit.minor2.shubhamjoshi.box.R; import com.jiit.minor2.shubhamjoshi.box.chooser.Chooser; import com.jiit.minor2.shubhamjoshi.box.dialogs.DateDialogPicker; import com.jiit.minor2.shubhamjoshi.box.model.User; import com.jiit.minor2.shubhamjoshi.box.utils.Constants; import org.json.JSONException; import org.json.JSONObject; import java.net.MalformedURLException; import java.net.URL; import java.util.Arrays; import java.util.Map; public class SignUp extends AppCompatActivity { private TextView dateTextView; private EditText email; private EditText username; private TextView dob; private View facebookLoginButton; private EditText password; private TextView genderTextView; private ProgressDialog mProgress; private Firebase baseUrl; private LoginButton mLoginButton; private CallbackManager mCallbackManager; private String Email; private String Username; private String Dob; private String Gender; private URL profileImage; private final static String TAG = SignUp.class.getSimpleName(); @Override protected void onStart() { super.onStart(); //Boiler Plate Code dateTextView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { DateDialogPicker picker = new DateDialogPicker(v); FragmentTransaction ft = getFragmentManager().beginTransaction(); picker.show(ft, getString(R.string.Date)); return false; } }); genderTextView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { final CharSequence[] gender = {getString(R.string.Male), getString(R.string.Female)}; AlertDialog.Builder alert = new AlertDialog.Builder(SignUp.this, AlertDialog.THEME_HOLO_DARK); alert.setTitle(Html.fromHtml(getString(R.string.green_color_hack) + getString(R.string.Gender) + getString(R.string.font_tag))); alert.setItems(gender, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { if (gender[which] == getString(R.string.Male)) { genderTextView.setText(getString(R.string.Male)); } else genderTextView.setText(getString(R.string.Female)); } }); /****HACK **/ Dialog d = alert.show(); int dividerId = d.getContext().getResources().getIdentifier(getString(R.string.hack), null, null); View divider = d.findViewById(dividerId); divider.setBackgroundColor(Color.parseColor("#1db954")); return false; } }); } //For storing in db Firebase.AuthResultHandler authResultHandler = new Firebase.AuthResultHandler() { @Override public void onAuthenticated(AuthData authData) { // Authenticated successfully with payload authData User user = new User(Email, Username, Dob, Gender, ""); Firebase child = baseUrl.child(Constants.USER).child(Constants.encodeEmail(Email)); child.setValue(user); sharedPrefCreator(Email); Intent intent = new Intent(getBaseContext(), Chooser.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intent); finish(); } @Override public void onAuthenticationError(FirebaseError firebaseError) { // Authenticated failed with error firebaseError } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_signup); mProgress = new ProgressDialog(SignUp.this, ProgressDialog.STYLE_HORIZONTAL); mProgress.setTitle(getString(R.string.processing)); mProgress.setMessage(getString(R.string.please_wait)); mProgress.setCancelable(false); mProgress.setIndeterminate(true); init(); baseUrl = new Firebase(Constants.FIREBASE_URL); mCallbackManager = CallbackManager.Factory.create(); //Crux of facebook Login fbLoginFunctionality(); //Facebook Login Screen facebookLoginButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mLoginButton.performClick(); } }); View S = findViewById(R.id.signUp); S.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mProgress.show(); Email = email.getText().toString(); final String Password = password.getText().toString(); Username = username.getText().toString(); Dob = dob.getText().toString(); Gender = genderTextView.getText().toString(); //FireBase Normal Login baseUrl.createUser(Email, Password, new Firebase.ValueResultHandler<Map<String, Object>>() { @Override public void onSuccess(Map<String, Object> result) { baseUrl.authWithPassword(Email, Password, authResultHandler); mProgress.dismiss(); //dismiss the progress dialog } @Override public void onError(FirebaseError firebaseError) { // there was an error mProgress.dismiss(); if (firebaseError.getCode() == FirebaseError.INVALID_EMAIL) email.setError(firebaseError.getMessage()); if (firebaseError.getCode() == FirebaseError.INVALID_PASSWORD) password.setError(firebaseError.getMessage()); } }); } }); } private void fbLoginFunctionality() { mLoginButton.setReadPermissions(Arrays.asList(Constants.USER_PHOTO, Constants.EMAIL, Constants.BIRTHDAY, Constants.PUBLIC_PROFILE,"user_friends")); mLoginButton.registerCallback(mCallbackManager, new FacebookCallback<LoginResult>() { @Override public void onSuccess(final LoginResult loginResult) { System.out.println("onSuccess"); final String accessToken = loginResult.getAccessToken() .getToken(); Log.e(TAG, loginResult.toString()); GraphRequest request = GraphRequest.newMeRequest(loginResult.getAccessToken(), new GraphRequest.GraphJSONObjectCallback() { @Override public void onCompleted(JSONObject object, GraphResponse response) { try { String id = object.getString("id"); try { URL profile_pic = new URL( "https://graph.facebook.com/" + id + "/picture?type=large"); profileImage = profile_pic; } catch (MalformedURLException e) { e.printStackTrace(); } String name = object.getString("name"); String email = object.getString("email"); String gender = object.getString("gender"); //String birthday = object.getString("birthday"); String birthday = "20/05/1995"; onFacebookAccessTokenChange(loginResult.getAccessToken(), name, email, birthday, gender); } catch (JSONException e) { e.printStackTrace(); } } }); Bundle parameters = new Bundle(); parameters.putString("fields", "id,name,email,gender,birthday"); request.setParameters(parameters); request.executeAsync(); } @Override public void onCancel() { System.out.println("onCancel"); } @Override public void onError(FacebookException error) { System.out.println(error.getMessage()); } }); } private void onFacebookAccessTokenChange(AccessToken token, final String name, final String email, final String dob, final String gender) { if (token != null) { baseUrl.authWithOAuthToken("facebook", token.getToken(), new Firebase.AuthResultHandler() { @Override public void onAuthenticated(AuthData authData) { //add to db User user = new User(email, name, dob, gender, profileImage.toString()); Firebase child = baseUrl.child(Constants.USER).child(Constants.encodeEmail(email)); child.setValue(user); sharedPrefCreator(email); Intent intent = new Intent(getBaseContext(), Chooser.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intent); finish(); // Log.e("SJSJ", profileImage.toString()); } @Override public void onAuthenticationError(FirebaseError firebaseError) { } }); } else { baseUrl.unauth(); } } @Override protected void onActivityResult(int requestCode, int responseCode, Intent data) { //get responce super.onActivityResult(requestCode, responseCode, data); mCallbackManager.onActivityResult(requestCode, responseCode, data); } public void init() { email = (EditText) findViewById(R.id.email); username = (EditText) findViewById(R.id.username); password = (EditText) findViewById(R.id.password); dateTextView = (TextView) findViewById(R.id.date); genderTextView = (TextView) findViewById(R.id.gender); dob = (TextView) findViewById(R.id.date); facebookLoginButton = findViewById(R.id.fbLogin); mLoginButton = (LoginButton) findViewById(R.id.login_button); } private void sharedPrefCreator(String Email) { //key value pair SharedPreferences sp = getSharedPreferences(Constants.SHAREDPREF_EMAIL, Context.MODE_PRIVATE); SharedPreferences.Editor mEditor = sp.edit(); mEditor.putString(Constants.SPEMAIL, Constants.encodeEmail(Email)); mEditor.apply(); } }
package org.blockframe.core; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import org.blockframe.core.Block.PlacedBlock; /** * Logs debugging messages. * <p> * The fields {@link Block#loggingVerbosity}, {@link Canvas#loggingVerbosity} and {@link #loggingVerbosity} are used to filter messages. * Conventional values for verbosity can be found in {@link Verbosity}. * This interface can be inherited by classes (eg, {@link Block}), injecting the constants into the class's name-space. * <p> * Calls to the {@link #add(double, Block, Canvas, String, boolean)} (or similar) method with verbosity less than {@link Verbosity#WARNING_4} * should be written specifically for the current bug, and be deleted when it is solved. * It would be surprising to have a call with such a low level in production code. * <p> * Nothing is written to the console until {@link #out()} or {@link #err()} are called. * This can be annoying when you've got an infinite loop, but it does make the {@link #makeCheckpoint()} functionality possible. */ public class DebugLog { /** * Constants giving conventional debug-verbosity levels. * <p> * Each {@link Block} and {@link Canvas} has a messaging level. Also, {@link DebugLog} has a global level. * Likewise, each message has a required verbosity level. * If the current level of the block (or of the canvas, or the global level) is greater than the message's required level, the message is recorded. * (Otherwise, it is discarded.) * <p> * So, setting the {@link Block#loggingVerbosity} high will log musch detail, setting it to zero means nothing will be logged. * <p> * A message with a low required verbosity will usually be recorded. * Messages with a high required verbosity will recorded in fewer blocks. * <p> * Messages with a verbosity under {@link #WARNING_4} should only be temporary, while debugging code. * The permanent code should not have any messages with very low verbosity. */ public static interface Verbosity { /** No messages. */ public static final double NONE_0 = 0 ; /** Temporary messages, while chasing a particular bug. */ public static final double BUG_CONTEXT_2 = 2 ; /** Temporary messages, while chasing a particular bug. */ public static final double BUG_DETAIL_3 = 3 ; /** Messages warning about unexpected conditions. For example, a child's measured area going outside its container's measured area. */ public static final double WARNING_4 = 4 ; /** Records entering the major methods of the block, such as {@link Block#fill(Quill, Layout)} and {@link Block#draw(Canvas, double, double)}. */ public static final double ENTERING_5 = 5 ; /** Records entering and leaving the major methods of the block, such as {@link Block#fill(Quill, Layout)} and {@link Block#draw(Canvas, double, double)}. */ public static final double LEAVING_6 = 6 ; /** Messages about broad state of the block's operations. */ public static final double GENERAL_7 = 7 ; /** Messages about detail of the block's operations. */ public static final double DETAIL_8 = 8 ; /** All messages. */ public static final double ALL_9 = 9 ; } /** * List of unprinted messages. */ public static final ArrayList<String> messages = new ArrayList<String>(); private static final Object lock = new Object(); /** * Global verbosity threshold. * In this class, we should almost always leave the value at zero. */ public static double loggingVerbosity = 0 ; /** * The thread that made the last call to <code>add</code> a message. */ private static Thread prevThread ; /** * Clears the list of messages (@link {@link #messages}). */ public static void clear() { synchronized (lock) { messages.clear(); } } /** * Writes all messages to <code>System.out</code>., then clears the list */ public static void out() { synchronized (lock) { for (String message : messages) System.out.println(message); messages.clear(); } } /** * Writes all messages to <code>System.err</code>, then clears the list. */ public static void err() { synchronized (lock) { for (String message : messages) System.err.println(message); messages.clear(); } } /** * Writes all messages to the given file, then clears the list. */ public static void file(File file) throws IOException { synchronized (lock) { PrintWriter writer = new PrintWriter(file); for (String message : messages) writer.println(message); writer.close(); messages.clear(); } } /** * Notes any changes to which thread is logging messages. * This should be called inside the <code>synchronized</code> code, prior to adding to {@link #messages}. */ private static void noteThread() { Thread thread = Thread.currentThread(); if (thread!=prevThread) { messages.add("---------------- Thread is: "+thread.getName()); prevThread = thread ; } } // TODO Code the missing addPlain() methods. /** * Similar to {@link #add(double, Block, Canvas, String, boolean)}, except that the message is written as is, with no prefix or stack. */ public static void addPlain(double requiredVerbosity, Block block, Canvas canvas, String message) { if (requiredVerbosity<=0) return ; if ((block!=null&&requiredVerbosity<=block.loggingVerbosity) || (canvas!=null&&requiredVerbosity<=canvas.loggingVerbosity) || (requiredVerbosity<=loggingVerbosity)) { synchronized (lock) { noteThread(); messages.add(message); } } } /** * Similar to {@link #add(double, Block, Canvas, String, boolean)}, except that the message is written as returned from the {@linkplain StringGetter}, with no prefix or stack. */ public static void addPlain(double requiredVerbosity, PlacedBlock placedBlock, Canvas canvas, StringGetter messageGetter, Object messageArg0, Object messageArg1) { if (requiredVerbosity<=0) return ; Block block = placedBlock.getBlock(); if ((block!=null&&requiredVerbosity<=block.loggingVerbosity) || (canvas!=null&&requiredVerbosity<=canvas.loggingVerbosity) || (requiredVerbosity<=loggingVerbosity)) { synchronized (lock) { noteThread(); messages.add(messageGetter.getString(block, placedBlock, messageArg0, messageArg1)); } } } /** * Always adds the message to the debug-log. */ public static void add(Block block, String message, boolean wantStack) { synchronized (lock) { noteThread(); String fullMessage = "" ; if (block!=null) fullMessage += block.getLogName()+" " ; if (message!=null) fullMessage += message+" " ; if (wantStack) fullMessage += "\t"+currentStack(); messages.add(fullMessage); } } /** * Always adds the message to the debug-log. */ private static void add(Block block, PlacedBlock placedBlock, StringGetter messageGetter, Object messageArg0, Object messageArg1, boolean wantStack) { add(block, messageGetter.getString(block, placedBlock, messageArg0, messageArg1), wantStack); } /** * Always adds the message to the debug-log. */ public static void add(Block block, StringGetter messageGetter, Object messageArg0, Object messageArg1, boolean wantStack) { add(block, null, messageGetter, messageArg0, messageArg1, wantStack); } /** * Always adds the message to the debug-log. */ public static void add(PlacedBlock placedBlock, StringGetter messageGetter, Object messageArg0, Object messageArg1, boolean wantStack) { add(placedBlock.getBlock(), placedBlock, messageGetter, messageArg0, messageArg1, wantStack); } // TODO Code the missing add()methods. /** * Adds the message if its <code>requiredVerbosity</code> is equal or less any of the verbosities in {@link Block}, {@link Canvas} or globally. * However, if the <code>level</code> is zero or less, the message is discarded. * <p> * The message is prefixed with the block's name, and optionally followed by a compact listing of the calling stack. * @see Block#getLogName() */ public static void add(double requiredVerbosity, Block block, Canvas canvas, String message, boolean wantStack) { if (requiredVerbosity<=0) return ; if ((block!=null&&requiredVerbosity<=block.loggingVerbosity) || (canvas!=null&&requiredVerbosity<=canvas.loggingVerbosity) || (requiredVerbosity<=loggingVerbosity)) { add(block, message, wantStack); } } /** * Adds the message if its <code>requiredVerbosity</code> is equal or less any of the verbosities in {@link Block}, {@link Canvas} or globally. * However, if the <code>level</code> is zero or less, the message is discarded. * <p> * The message is prefixed with the block's name, and optionally followed by a compact listing of the calling stack. * @see Block#getLogName() */ public static void add(double requiredVerbosity, Block block, Canvas canvas, StringGetter messageGetter, Object messageArg0, Object messageArg1, boolean wantStack) { if (requiredVerbosity<=0) return ; if ((block!=null&&requiredVerbosity<=block.loggingVerbosity) || (canvas!=null&&requiredVerbosity<=canvas.loggingVerbosity) || (requiredVerbosity<=loggingVerbosity)) { add(block, null, messageGetter, messageArg0, messageArg1, wantStack); } } /** * Adds the message if its <code>requiredVerbosity</code> is equal or less any of the verbosities in {@link Block}, {@link Canvas} or globally. * However, if the <code>level</code> is zero or less, the message is discarded. * <p> * The message is prefixed with the block's name, and optionally followed by a compact listing of the calling stack. * * @param placedBlock The {@linkplain Block} associated with this is used. * @see Block#getLogName() */ public static void add(double requiredVerbosity, PlacedBlock placedBlock, Canvas canvas, StringGetter messageGetter, Object messageArg0, Object messageArg1, boolean wantStack) { if (requiredVerbosity<=0) return ; Block block = placedBlock.getBlock(); if ((block!=null&&requiredVerbosity<=block.loggingVerbosity) || (canvas!=null&&requiredVerbosity<=canvas.loggingVerbosity) || (requiredVerbosity<=loggingVerbosity)) { add(placedBlock, messageGetter, messageArg0, messageArg1, wantStack); } } /** * Makes a checkpoint after the most recent message. * If {@link MessagesCheckpoint#revert()} is called, all messages between now and then are discarded. */ public static MessagesCheckpoint makeCheckpoint() { return new MessagesCheckpoint(); } /** * Holds a checkpoint. * @see DebugLog#makeCheckpoint() */ public static class MessagesCheckpoint { private final int count; private MessagesCheckpoint() { this.count = messages.size(); } public void revert() { while (messages.size()>count) messages.remove(messages.size()-1); } } // TODO StringGetter implementations should handle null messageArgs sensibly. public static interface StringGetter { String getString(Block block, PlacedBlock placedBlock, Object messageArg0, Object messageArg1); } public static final StringGetter stack = new StringGetter() { public String getString(Block block, PlacedBlock placedBlock, Object ignore0, Object ignore1) { return currentStack(); } }; /** * Returns a single-line form of the current stack. */ public static String currentStack() { return theStack(Thread.currentThread().getStackTrace(), 0); } private static String theStack(StackTraceElement[] stackTrace, int skipCount) { StringBuilder sb = new StringBuilder(); build(sb, stackTrace, skipCount); return sb.toString(); } /** * Appends the stack-trace to the string-builder. * A compact single-line version of the stack is appended - most classes are omitted, including android library classes. * <p> * @param skipCount The number of items on the stack to be skipped - mostly calls within this class. */ private static void build(StringBuilder sb, StackTraceElement[] stackTrace, int skipCount) { String prevClassFullName = null ; // Prev-iteration value of 'classFullName' boolean wasInAppCode = true ; // Prev-iteration value of 'isInAppCode' for (int i=skipCount ; i<stackTrace.length ; i++) { StackTraceElement stackItem = stackTrace[i]; String classFullName = stackItem.getClassName(); if (classFullName.startsWith(DebugLog.class.getName())) continue ; boolean isInAppCode = true ; if (classFullName.startsWith("java.")) isInAppCode = false ; else if (classFullName.startsWith("javax.")) isInAppCode = false ; else if (classFullName.startsWith("com.android.")) isInAppCode = false ; else if (classFullName.startsWith("android.")) isInAppCode = false ; else if (classFullName.startsWith("dalvik.")) isInAppCode = false ; if (isInAppCode) { if (classFullName.equals(prevClassFullName)) { sb.append(","); } else { if (prevClassFullName!=null) sb.append(" "); String[] parts = classFullName.split("\\."); // 'split()' takes a regular-expression argument. try { sb.append(parts[parts.length-1]); } catch (Exception e) { sb.append("*"+classFullName); } sb.append(":"); } sb.append(stackItem.getLineNumber()); } else { if (wasInAppCode) sb.append("*"); } prevClassFullName = classFullName ; wasInAppCode = isInAppCode ; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.dataflow.std.intersect; import java.nio.ByteBuffer; import java.util.BitSet; import org.apache.hyracks.api.comm.IFrameWriter; import org.apache.hyracks.api.comm.VSizeFrame; import org.apache.hyracks.api.context.IHyracksTaskContext; import org.apache.hyracks.api.dataflow.ActivityId; import org.apache.hyracks.api.dataflow.IActivity; import org.apache.hyracks.api.dataflow.IActivityGraphBuilder; import org.apache.hyracks.api.dataflow.IOperatorNodePushable; import org.apache.hyracks.api.dataflow.value.IBinaryComparator; import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory; import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputer; import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory; import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider; import org.apache.hyracks.api.dataflow.value.RecordDescriptor; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.exceptions.HyracksException; import org.apache.hyracks.api.job.IOperatorDescriptorRegistry; import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor; import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender; import org.apache.hyracks.dataflow.common.comm.util.FrameUtils; import org.apache.hyracks.dataflow.std.base.AbstractActivityNode; import org.apache.hyracks.dataflow.std.base.AbstractOperatorDescriptor; import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputOperatorNodePushable; /** * This intersection operator is to get the common elements from multiple way inputs. * It will only produce the projected fields which are used for comparison. */ public class IntersectOperatorDescriptor extends AbstractOperatorDescriptor { private final int[][] projectFields; private final INormalizedKeyComputerFactory firstKeyNormalizerFactory; private final IBinaryComparatorFactory[] comparatorFactory; /** * @param spec * @param nInputs Number of inputs * @param compareAndProjectFields The project field list of each input. * All the fields order should be the same with the comparatorFactories * @param firstKeyNormalizerFactory Normalizer for the first comparison key. * @param comparatorFactories A list of comparators for each field * @param recordDescriptor * @throws HyracksException */ public IntersectOperatorDescriptor(IOperatorDescriptorRegistry spec, int nInputs, int[][] compareAndProjectFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory, IBinaryComparatorFactory[] comparatorFactories, RecordDescriptor recordDescriptor) throws HyracksException { super(spec, nInputs, 1); recordDescriptors[0] = recordDescriptor; validateParameters(compareAndProjectFields, comparatorFactories); this.projectFields = compareAndProjectFields; this.firstKeyNormalizerFactory = firstKeyNormalizerFactory; this.comparatorFactory = comparatorFactories; } private void validateParameters(int[][] compareAndProjectFields, IBinaryComparatorFactory[] comparatorFactories) throws HyracksException { int firstLength = compareAndProjectFields[0].length; for (int[] fields : compareAndProjectFields) { if (fields.length != firstLength) { throw new HyracksException("The given input comparison fields is not equal"); } for (int fid : fields) { if (fid < 0) { throw new HyracksException("Invalid field index in given comparison fields array"); } } } if (firstLength != comparatorFactories.length) { throw new HyracksException("The size of given fields is not equal with the number of comparators"); } } @Override public void contributeActivities(IActivityGraphBuilder builder) { IActivity intersectActivity = new IntersectActivity(new ActivityId(getOperatorId(), 0)); builder.addActivity(this, intersectActivity); for (int i = 0; i < getInputArity(); i++) { builder.addSourceEdge(i, intersectActivity, i); } builder.addTargetEdge(0, intersectActivity, 0); } private class IntersectActivity extends AbstractActivityNode { public IntersectActivity(ActivityId activityId) { super(activityId); } @Override public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException { RecordDescriptor[] inputRecordDesc = new RecordDescriptor[inputArity]; for (int i = 0; i < inputRecordDesc.length; i++) { inputRecordDesc[i] = recordDescProvider.getInputRecordDescriptor(getActivityId(), i); } return new IntersectOperatorNodePushable(ctx, inputArity, inputRecordDesc, projectFields, firstKeyNormalizerFactory, comparatorFactory); } } public static class IntersectOperatorNodePushable extends AbstractUnaryOutputOperatorNodePushable { private enum ACTION {FAILED, CLOSE} private final int inputArity; private final int[][] projectFields; private final BitSet consumed; private final int[] tupleIndexMarker; private final FrameTupleAccessor[] refAccessor; private final FrameTupleAppender appender; private final INormalizedKeyComputer firstKeyNormalizerComputer; private final IBinaryComparator[] comparators; private boolean done = false; public IntersectOperatorNodePushable(IHyracksTaskContext ctx, int inputArity, RecordDescriptor[] inputRecordDescriptors, int[][] projectFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory, IBinaryComparatorFactory[] comparatorFactory) throws HyracksDataException { this.inputArity = inputArity; this.projectFields = projectFields; this.firstKeyNormalizerComputer = firstKeyNormalizerFactory == null ? null : firstKeyNormalizerFactory.createNormalizedKeyComputer(); comparators = new IBinaryComparator[projectFields[0].length]; for (int i = 0; i < comparators.length; i++) { comparators[i] = comparatorFactory[i].createBinaryComparator(); } appender = new FrameTupleAppender(new VSizeFrame(ctx)); refAccessor = new FrameTupleAccessor[inputArity]; for (int i = 0; i < inputArity; i++) { refAccessor[i] = new FrameTupleAccessor(inputRecordDescriptors[i]); } consumed = new BitSet(inputArity); consumed.set(0, inputArity); tupleIndexMarker = new int[inputArity]; } @Override public int getInputArity() { return inputArity; } @Override public IFrameWriter getInputFrameWriter(final int index) { return new IFrameWriter() { @Override public void open() throws HyracksDataException { if (index == 0) { writer.open(); } } @Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { synchronized (IntersectOperatorNodePushable.this) { if (done) { return; } refAccessor[index].reset(buffer); tupleIndexMarker[index] = 0; consumed.clear(index); if (index != 0) { if (allInputArrived()) { IntersectOperatorNodePushable.this.notifyAll(); } while (!consumed.get(index) && !done) { waitOrHyracksException(); } } else { //(index == 0) while (!consumed.get(0)) { while (!allInputArrived() && !done) { waitOrHyracksException(); } if (done) { break; } intersectAllInputs(); IntersectOperatorNodePushable.this.notifyAll(); } } } } private void waitOrHyracksException() throws HyracksDataException { try { IntersectOperatorNodePushable.this.wait(); } catch (InterruptedException e) { throw new HyracksDataException(e); } } private boolean allInputArrived() { return consumed.cardinality() == 0; } private void intersectAllInputs() throws HyracksDataException { do { int maxInput = findMaxInput(); int match = 1; boolean needToUpdateMax = false; for (int i = 0; i < inputArity; i++) { if (i == maxInput) { continue; } while (tupleIndexMarker[i] < refAccessor[i].getTupleCount()) { int cmp = compare(i, refAccessor[i], tupleIndexMarker[i], maxInput, refAccessor[maxInput], tupleIndexMarker[maxInput]); if (cmp == 0) { match++; break; } else if (cmp < 0) { tupleIndexMarker[i]++; } else { needToUpdateMax = true; break; } } if (tupleIndexMarker[i] >= refAccessor[i].getTupleCount()) { consumed.set(i); } } if (match == inputArity) { FrameUtils.appendProjectionToWriter(writer, appender, refAccessor[maxInput], tupleIndexMarker[maxInput], projectFields[maxInput]); for (int i = 0; i < inputArity; i++) { tupleIndexMarker[i]++; if (tupleIndexMarker[i] >= refAccessor[i].getTupleCount()) { consumed.set(i); } } } else if (needToUpdateMax) { tupleIndexMarker[maxInput]++; if (tupleIndexMarker[maxInput] >= refAccessor[maxInput].getTupleCount()) { consumed.set(maxInput); } } } while (consumed.nextSetBit(0) < 0); appender.write(writer, true); } private int compare(int input1, FrameTupleAccessor frameTupleAccessor1, int tid1, int input2, FrameTupleAccessor frameTupleAccessor2, int tid2) throws HyracksDataException { int firstNorm1 = getFirstNorm(input1, frameTupleAccessor1, tid1); int firstNorm2 = getFirstNorm(input2, frameTupleAccessor2, tid2); if (firstNorm1 < firstNorm2) { return -1; } else if (firstNorm1 > firstNorm2) { return 1; } for (int i = 0; i < comparators.length; i++) { int cmp = comparators[i].compare(frameTupleAccessor1.getBuffer().array(), frameTupleAccessor1.getAbsoluteFieldStartOffset(tid1, projectFields[input1][i]), frameTupleAccessor1.getFieldLength(tid1, projectFields[input1][i]), frameTupleAccessor2.getBuffer().array(), frameTupleAccessor2.getAbsoluteFieldStartOffset(tid2, projectFields[input2][i]), frameTupleAccessor2.getFieldLength(tid2, projectFields[input2][i])); if (cmp != 0) { return cmp; } } return 0; } private int getFirstNorm(int inputId1, FrameTupleAccessor frameTupleAccessor1, int tid1) { return firstKeyNormalizerComputer == null ? 0 : firstKeyNormalizerComputer.normalize(frameTupleAccessor1.getBuffer().array(), frameTupleAccessor1.getAbsoluteFieldStartOffset(tid1, projectFields[inputId1][0]), frameTupleAccessor1.getFieldLength(tid1, projectFields[inputId1][0])); } private int findMaxInput() throws HyracksDataException { int max = 0; for (int i = 1; i < inputArity; i++) { int cmp = compare(max, refAccessor[max], tupleIndexMarker[max], i, refAccessor[i], tupleIndexMarker[i]); if (cmp < 0) { max = i; } } return max; } @Override public void fail() throws HyracksDataException { clearStateWith(ACTION.FAILED); } @Override public void close() throws HyracksDataException { clearStateWith(ACTION.CLOSE); } private void clearStateWith(ACTION action) throws HyracksDataException { synchronized (IntersectOperatorNodePushable.this) { if (index == 0) { doAction(action); } if (done) { return; } consumed.set(index); refAccessor[index] = null; done = true; IntersectOperatorNodePushable.this.notifyAll(); } } private void doAction(ACTION action) throws HyracksDataException { switch (action) { case CLOSE: writer.close(); break; case FAILED: writer.fail(); break; } } }; } } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.oauth.endpoint.token; import org.apache.axiom.util.base64.Base64Utils; import org.apache.commons.collections.iterators.IteratorEnumeration; import org.apache.oltu.oauth2.as.validator.AuthorizationCodeValidator; import org.apache.oltu.oauth2.as.validator.ClientCredentialValidator; import org.apache.oltu.oauth2.as.validator.PasswordValidator; import org.apache.oltu.oauth2.as.validator.RefreshTokenValidator; import org.apache.oltu.oauth2.common.OAuth; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.common.validators.OAuthValidator; import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.powermock.core.classloader.annotations.PrepareForTest; import org.testng.annotations.AfterTest; import org.testng.annotations.BeforeTest; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.wso2.carbon.base.CarbonBaseConstants; import org.wso2.carbon.identity.central.log.mgt.utils.LoggerUtils; import org.wso2.carbon.identity.core.util.IdentityDatabaseUtil; import org.wso2.carbon.identity.core.util.IdentityTenantUtil; import org.wso2.carbon.identity.oauth.common.NTLMAuthenticationValidator; import org.wso2.carbon.identity.oauth.common.OAuth2ErrorCodes; import org.wso2.carbon.identity.oauth.common.OAuthConstants; import org.wso2.carbon.identity.oauth.common.SAML2GrantValidator; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; import org.wso2.carbon.identity.oauth.endpoint.exception.InvalidRequestParentException; import org.wso2.carbon.identity.oauth.endpoint.expmapper.InvalidRequestExceptionMapper; import org.wso2.carbon.identity.oauth.endpoint.util.EndpointUtil; import org.wso2.carbon.identity.oauth.endpoint.util.TestOAuthEndpointBase; import org.wso2.carbon.identity.oauth.tokenprocessor.TokenPersistenceProcessor; import org.wso2.carbon.identity.oauth2.OAuth2Service; import org.wso2.carbon.identity.oauth2.ResponseHeader; import org.wso2.carbon.identity.oauth2.dto.OAuth2AccessTokenReqDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2AccessTokenRespDTO; import org.wso2.carbon.identity.oauth2.model.CarbonOAuthTokenRequest; import java.lang.reflect.Method; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.HttpMethod; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.powermock.api.mockito.PowerMockito.doAnswer; import static org.powermock.api.mockito.PowerMockito.doReturn; import static org.powermock.api.mockito.PowerMockito.mock; import static org.powermock.api.mockito.PowerMockito.mockStatic; import static org.powermock.api.mockito.PowerMockito.spy; import static org.powermock.api.mockito.PowerMockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; @PrepareForTest({EndpointUtil.class, IdentityDatabaseUtil.class, OAuthServerConfiguration.class, CarbonOAuthTokenRequest.class, LoggerUtils.class, IdentityTenantUtil.class}) public class OAuth2TokenEndpointTest extends TestOAuthEndpointBase { @Mock OAuth2Service oAuth2Service; @Mock OAuthServerConfiguration oAuthServerConfiguration; @Mock TokenPersistenceProcessor tokenPersistenceProcessor; @Mock HttpServletResponse httpServletResponse; @Mock OAuth2AccessTokenRespDTO oAuth2AccessTokenRespDTO; @Mock CarbonOAuthTokenRequest carbonOAuthTokenRequest; private static final String SQL_ERROR = "sql_error"; private static final String TOKEN_ERROR = "token_error"; private static final String CLIENT_ID_VALUE = "ca19a540f544777860e44e75f605d927"; private static final String APP_NAME = "myApp"; private static final String INACTIVE_CLIENT_ID_VALUE = "inactiveId"; private static final String SECRET = "87n9a540f544777860e44e75f605d435"; private static final String INACTIVE_APP_NAME = "inactiveApp"; private static final String USERNAME = "user1"; private static final String REALM = "Basic realm=is.com"; private static final String APP_REDIRECT_URL = "http://localhost:8080/redirect"; private static final String ACCESS_TOKEN = "1234-542230-45220-54245"; private static final String REFRESH_TOKEN = "1234-542230-45220-54245"; private static final String AUTHORIZATION_HEADER = "Basic " + Base64Utils.encode((CLIENT_ID_VALUE + ":" + SECRET).getBytes()); private OAuth2TokenEndpoint oAuth2TokenEndpoint; @BeforeTest public void setUp() throws Exception { System.setProperty( CarbonBaseConstants.CARBON_HOME, Paths.get(System.getProperty("user.dir"), "src", "test", "resources").toString() ); oAuth2TokenEndpoint = new OAuth2TokenEndpoint(); initiateInMemoryH2(); createOAuthApp(CLIENT_ID_VALUE, SECRET, USERNAME, APP_NAME, "ACTIVE"); createOAuthApp(INACTIVE_CLIENT_ID_VALUE, "dummySecret", USERNAME, INACTIVE_APP_NAME, "INACTIVE"); } @AfterTest public void clear() throws Exception { super.cleanData(); } @DataProvider(name = "testIssueAccessTokenDataProvider") public Object[][] testIssueAccessTokenDataProvider() { MultivaluedMap<String, String> mapWithCredentials = new MultivaluedHashMap<String, String>(); List<String> clientId = new ArrayList<>(); clientId.add(CLIENT_ID_VALUE); List<String> secret = new ArrayList<>(); secret.add(SECRET); mapWithCredentials.put(OAuth.OAUTH_CLIENT_ID, clientId); mapWithCredentials.put(OAuth.OAUTH_CLIENT_SECRET, secret); MultivaluedMap<String, String> mapWithClientId = new MultivaluedHashMap<>(); mapWithClientId.put(OAuth.OAUTH_CLIENT_ID, clientId); String inactiveClientHeader = "Basic " + Base64Utils.encode((INACTIVE_CLIENT_ID_VALUE + ":dummySecret").getBytes()); String invalidClientHeader = "Basic " + Base64Utils.encode(("invalidId:dummySecret").getBytes()); String inCorrectAuthzHeader = "Basic value1 value2"; ResponseHeader contentType = new ResponseHeader(); contentType.setKey(OAuth.HeaderType.CONTENT_TYPE); contentType.setValue(OAuth.ContentType.URL_ENCODED); ResponseHeader[] headers1 = new ResponseHeader[]{contentType}; ResponseHeader[] headers2 = new ResponseHeader[]{null}; ResponseHeader[] headers3 = new ResponseHeader[0]; Map<String, String> customResponseParamMap = new HashMap<>(); customResponseParamMap.put("param_key_1", "param_value_1"); customResponseParamMap.put("param_key_2", "param_value_2"); return new Object[][]{ // Request with multivalued client_id parameter. Will return bad request error {CLIENT_ID_VALUE + ",clientId2", null, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), null, null, null, null, HttpServletResponse.SC_BAD_REQUEST, OAuth2ErrorCodes .INVALID_REQUEST}, // Request with invalid authorization header. Will return bad request error {CLIENT_ID_VALUE, inCorrectAuthzHeader, mapWithClientId, GrantType.PASSWORD.toString(), null, null, null, null, HttpServletResponse.SC_BAD_REQUEST, OAuth2ErrorCodes.INVALID_REQUEST}, // Request from inactive client. Will give correct response, inactive client state should be handled // in access token issuer {INACTIVE_CLIENT_ID_VALUE, inactiveClientHeader, new MultivaluedHashMap<String, String>(), GrantType .PASSWORD.toString(), null, null, null, null, HttpServletResponse.SC_OK, ""}, // Request from invalid client. Will give correct response, invalid-id is handles in access token issuer {"invalidId", invalidClientHeader, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), null, null, null, null, HttpServletResponse.SC_OK, ""}, // Request without client id and authz header. Will give bad request error {null, null, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD.toString(), null, null, null, null, HttpServletResponse.SC_BAD_REQUEST, OAuth2ErrorCodes.INVALID_REQUEST}, // Request with client id but no authz header. Will give bad request error {CLIENT_ID_VALUE, null, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD.toString(), null, null, null, null, HttpServletResponse.SC_BAD_REQUEST, null}, // Request with unsupported grant type. Will give bad request error {CLIENT_ID_VALUE, AUTHORIZATION_HEADER, new MultivaluedHashMap<String, String>(), "dummyGrant", null, null, null, null, HttpServletResponse.SC_BAD_REQUEST, null}, // Successful request without id token request. No headers {CLIENT_ID_VALUE, AUTHORIZATION_HEADER, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), null, null, null, null, HttpServletResponse.SC_OK, null}, // Successful request with id token request. With header values {CLIENT_ID_VALUE, AUTHORIZATION_HEADER, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), "idTokenValue", headers1, null, null, HttpServletResponse.SC_OK, null}, // Successful request with id token request. With header which contains null values {CLIENT_ID_VALUE, AUTHORIZATION_HEADER, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), "idTokenValue", headers2, null, null, HttpServletResponse.SC_OK, null}, // Successful request with id token request. With empty header array {CLIENT_ID_VALUE, AUTHORIZATION_HEADER, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), "idTokenValue", headers3, null, null, HttpServletResponse.SC_OK, null}, // Successful token request that will return custom response parameters in response. {CLIENT_ID_VALUE, AUTHORIZATION_HEADER, new MultivaluedHashMap<String, String>(), GrantType.PASSWORD .toString(), null, null, customResponseParamMap, null, HttpServletResponse.SC_OK, null} }; } @Test(dataProvider = "testIssueAccessTokenDataProvider", groups = "testWithConnection") public void testIssueAccessToken(String clientId, String authzHeader, Object paramMapObj, String grantType, String idToken, Object headerObj, Object customResponseParamObj, Exception e, int expectedStatus, String expectedErrorCode) throws Exception { MultivaluedMap<String, String> paramMap = (MultivaluedMap<String, String>) paramMapObj; ResponseHeader[] responseHeaders = (ResponseHeader[]) headerObj; Map<String, String> customResponseParameters = (Map<String, String>) customResponseParamObj; Map<String, String[]> requestParams = new HashMap<>(); if (clientId != null) { requestParams.put(OAuth.OAUTH_CLIENT_ID, clientId.split(",")); } requestParams.put(OAuth.OAUTH_GRANT_TYPE, new String[]{grantType}); requestParams.put(OAuth.OAUTH_SCOPE, new String[]{"scope1"}); requestParams.put(OAuth.OAUTH_REDIRECT_URI, new String[]{APP_REDIRECT_URL}); requestParams.put(OAuth.OAUTH_USERNAME, new String[]{USERNAME}); requestParams.put(OAuth.OAUTH_PASSWORD, new String[]{"password"}); mockStatic(LoggerUtils.class); when(LoggerUtils.isDiagnosticLogsEnabled()).thenReturn(true); mockStatic(IdentityTenantUtil.class); when(IdentityTenantUtil.getTenantId(anyString())).thenReturn(-1234); HttpServletRequest request = mockHttpRequest(requestParams, new HashMap<String, Object>()); when(request.getHeader(OAuthConstants.HTTP_REQ_HEADER_AUTHZ)).thenReturn(authzHeader); when(request.getHeaderNames()).thenReturn( Collections.enumeration(new ArrayList<String>() {{ add(OAuthConstants.HTTP_REQ_HEADER_AUTHZ); }})); spy(EndpointUtil.class); doReturn(REALM).when(EndpointUtil.class, "getRealmInfo"); doReturn(oAuth2Service).when(EndpointUtil.class, "getOAuth2Service"); when(oAuth2Service.issueAccessToken(any(OAuth2AccessTokenReqDTO.class))).thenReturn(oAuth2AccessTokenRespDTO); when(oAuth2AccessTokenRespDTO.getAccessToken()).thenReturn(ACCESS_TOKEN); when(oAuth2AccessTokenRespDTO.getRefreshToken()).thenReturn(REFRESH_TOKEN); when(oAuth2AccessTokenRespDTO.getExpiresIn()).thenReturn(3600L); when(oAuth2AccessTokenRespDTO.getAuthorizedScopes()).thenReturn("scope1"); when(oAuth2AccessTokenRespDTO.getIDToken()).thenReturn(idToken); when(oAuth2AccessTokenRespDTO.getResponseHeaders()).thenReturn(responseHeaders); when(oAuth2AccessTokenRespDTO.getParameters()).thenReturn(customResponseParameters); mockOAuthServerConfiguration(); mockStatic(IdentityDatabaseUtil.class); when(IdentityDatabaseUtil.getDBConnection()).thenReturn(connection); Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> grantTypeValidators = new Hashtable<>(); grantTypeValidators.put(GrantType.PASSWORD.toString(), PasswordValidator.class); when(oAuthServerConfiguration.getSupportedGrantTypeValidators()).thenReturn(grantTypeValidators); when(oAuth2Service.getOauthApplicationState(CLIENT_ID_VALUE)).thenReturn("ACTIVE"); Response response; try { response = oAuth2TokenEndpoint.issueAccessToken(request, paramMap); } catch (InvalidRequestParentException ire) { InvalidRequestExceptionMapper invalidRequestExceptionMapper = new InvalidRequestExceptionMapper(); response = invalidRequestExceptionMapper.toResponse(ire); } assertNotNull(response, "Token response is null"); assertEquals(response.getStatus(), expectedStatus, "Unexpected HTTP response status"); assertNotNull(response.getEntity(), "Response entity is null"); final String responseBody = response.getEntity().toString(); if (customResponseParameters != null) { customResponseParameters.forEach((key, value) -> assertTrue(responseBody.contains(key) && responseBody .contains(value), "Expected custom response parameter: " + key + " not found in token response.")); } if (expectedErrorCode != null) { assertTrue(responseBody.contains(expectedErrorCode), "Expected error code not found"); } else if (HttpServletResponse.SC_OK == expectedStatus) { assertTrue(responseBody.contains(ACCESS_TOKEN), "Successful response should contain access token"); } } @DataProvider(name = "testTokenErrorResponseDataProvider") public Object[][] testTokenErrorResponseDataProvider() { ResponseHeader contentType = new ResponseHeader(); contentType.setKey(OAuth.HeaderType.CONTENT_TYPE); contentType.setValue(OAuth.ContentType.URL_ENCODED); ResponseHeader[] headers1 = new ResponseHeader[]{contentType}; ResponseHeader[] headers2 = new ResponseHeader[]{null}; ResponseHeader[] headers3 = new ResponseHeader[0]; // This object provides data to cover all the scenarios with token error response return new Object[][]{ {OAuth2ErrorCodes.INVALID_CLIENT, null, HttpServletResponse.SC_UNAUTHORIZED, OAuth2ErrorCodes.INVALID_CLIENT}, {OAuth2ErrorCodes.SERVER_ERROR, null, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, OAuth2ErrorCodes.SERVER_ERROR}, {SQL_ERROR, null, HttpServletResponse.SC_BAD_GATEWAY, OAuth2ErrorCodes.SERVER_ERROR}, {TOKEN_ERROR, null, HttpServletResponse.SC_BAD_REQUEST, TOKEN_ERROR}, {TOKEN_ERROR, headers1, HttpServletResponse.SC_BAD_REQUEST, TOKEN_ERROR}, {TOKEN_ERROR, headers2, HttpServletResponse.SC_BAD_REQUEST, TOKEN_ERROR}, {TOKEN_ERROR, headers3, HttpServletResponse.SC_BAD_REQUEST, TOKEN_ERROR}, }; } @Test(dataProvider = "testTokenErrorResponseDataProvider", groups = "testWithConnection") public void testTokenErrorResponse(String errorCode, Object headerObj, int expectedStatus, String expectedErrorCode) throws Exception { ResponseHeader[] responseHeaders = (ResponseHeader[]) headerObj; Map<String, String[]> requestParams = new HashMap<>(); requestParams.put(OAuth.OAUTH_GRANT_TYPE, new String[]{GrantType.PASSWORD.toString()}); requestParams.put(OAuth.OAUTH_USERNAME, new String[]{USERNAME}); requestParams.put(OAuth.OAUTH_PASSWORD, new String[]{"password"}); mockStatic(LoggerUtils.class); when(LoggerUtils.isDiagnosticLogsEnabled()).thenReturn(true); mockStatic(IdentityTenantUtil.class); when(IdentityTenantUtil.getTenantId(anyString())).thenReturn(-1234); HttpServletRequest request = mockHttpRequest(requestParams, new HashMap<String, Object>()); when(request.getHeader(OAuthConstants.HTTP_REQ_HEADER_AUTHZ)).thenReturn(AUTHORIZATION_HEADER); when(request.getHeaderNames()).thenReturn( Collections.enumeration(new ArrayList<String>() {{ add(OAuthConstants.HTTP_REQ_HEADER_AUTHZ); }})); spy(EndpointUtil.class); doReturn(REALM).when(EndpointUtil.class, "getRealmInfo"); doReturn(oAuth2Service).when(EndpointUtil.class, "getOAuth2Service"); when(oAuth2Service.issueAccessToken(any(OAuth2AccessTokenReqDTO.class))).thenReturn(oAuth2AccessTokenRespDTO); when(oAuth2AccessTokenRespDTO.getErrorMsg()).thenReturn("Token Response error"); when(oAuth2AccessTokenRespDTO.getErrorCode()).thenReturn(errorCode); when(oAuth2AccessTokenRespDTO.getResponseHeaders()).thenReturn(responseHeaders); mockOAuthServerConfiguration(); mockStatic(IdentityDatabaseUtil.class); when(IdentityDatabaseUtil.getDBConnection()).thenReturn(connection); Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> grantTypeValidators = new Hashtable<>(); grantTypeValidators.put(GrantType.PASSWORD.toString(), PasswordValidator.class); when(oAuthServerConfiguration.getSupportedGrantTypeValidators()).thenReturn(grantTypeValidators); when(oAuth2Service.getOauthApplicationState(CLIENT_ID_VALUE)).thenReturn("ACTIVE"); Response response; try { response = oAuth2TokenEndpoint.issueAccessToken(request, new MultivaluedHashMap<String, String>()); } catch (InvalidRequestParentException ire) { InvalidRequestExceptionMapper invalidRequestExceptionMapper = new InvalidRequestExceptionMapper(); response = invalidRequestExceptionMapper.toResponse(ire); } assertNotNull(response, "Token response is null"); assertEquals(response.getStatus(), expectedStatus, "Unexpected HTTP response status"); assertNotNull(response.getEntity(), "Response entity is null"); assertTrue(response.getEntity().toString().contains(expectedErrorCode), "Expected error code not found"); } @DataProvider(name = "testGetAccessTokenDataProvider") public Object[][] testGetAccessTokenDataProvider() { return new Object[][]{ {GrantType.AUTHORIZATION_CODE.toString(), OAuth.OAUTH_CODE}, {GrantType.PASSWORD.toString(), OAuth.OAUTH_USERNAME + "," + OAuth.OAUTH_PASSWORD}, {GrantType.REFRESH_TOKEN.toString(), OAuth.OAUTH_REFRESH_TOKEN}, {org.wso2.carbon.identity.oauth.common.GrantType.SAML20_BEARER.toString(), OAuth.OAUTH_ASSERTION}, {org.wso2.carbon.identity.oauth.common.GrantType.IWA_NTLM.toString(), OAuthConstants.WINDOWS_TOKEN}, {GrantType.CLIENT_CREDENTIALS.toString(), OAuth.OAUTH_GRANT_TYPE}, }; } @Test(dataProvider = "testGetAccessTokenDataProvider") public void testGetAccessToken(String grantType, String additionalParameters) throws Exception { Map<String, String[]> requestParams = new HashMap<>(); requestParams.put(OAuth.OAUTH_CLIENT_ID, new String[]{CLIENT_ID_VALUE}); requestParams.put(OAuth.OAUTH_GRANT_TYPE, new String[]{grantType}); requestParams.put(OAuth.OAUTH_SCOPE, new String[]{"scope1"}); // Required params for authorization_code grant type requestParams.put(OAuth.OAUTH_REDIRECT_URI, new String[]{APP_REDIRECT_URL}); requestParams.put(OAuth.OAUTH_CODE, new String[]{"auth_code"}); // Required params for password grant type requestParams.put(OAuth.OAUTH_USERNAME, new String[]{USERNAME}); requestParams.put(OAuth.OAUTH_PASSWORD, new String[]{"password"}); // Required params for refresh token grant type requestParams.put(OAuth.OAUTH_REFRESH_TOKEN, new String[]{REFRESH_TOKEN}); // Required params for saml2 bearer grant type requestParams.put(OAuth.OAUTH_ASSERTION, new String[]{"dummyAssertion"}); // Required params for IWA_NLTM grant type requestParams.put(OAuthConstants.WINDOWS_TOKEN, new String[]{"dummyWindowsToken"}); HttpServletRequest request = mockHttpRequest(requestParams, new HashMap<String, Object>()); when(request.getHeader(OAuthConstants.HTTP_REQ_HEADER_AUTHZ)).thenReturn(AUTHORIZATION_HEADER); when(request.getHeaderNames()).thenReturn( Collections.enumeration(new ArrayList<String>() {{ add(OAuthConstants.HTTP_REQ_HEADER_AUTHZ); }})); Map<String, Class<? extends OAuthValidator<HttpServletRequest>>> grantTypeValidators = new Hashtable<>(); grantTypeValidators.put(GrantType.PASSWORD.toString(), PasswordValidator.class); grantTypeValidators.put(GrantType.CLIENT_CREDENTIALS.toString(), ClientCredentialValidator.class); grantTypeValidators.put(GrantType.AUTHORIZATION_CODE.toString(), AuthorizationCodeValidator.class); grantTypeValidators.put(GrantType.REFRESH_TOKEN.toString(), RefreshTokenValidator.class); grantTypeValidators.put(org.wso2.carbon.identity.oauth.common.GrantType.IWA_NTLM.toString(), NTLMAuthenticationValidator.class); grantTypeValidators.put(org.wso2.carbon.identity.oauth.common.GrantType.SAML20_BEARER.toString(), SAML2GrantValidator.class); mockOAuthServerConfiguration(); when(oAuthServerConfiguration.getSupportedGrantTypeValidators()).thenReturn(grantTypeValidators); spy(EndpointUtil.class); doReturn(oAuth2Service).when(EndpointUtil.class, "getOAuth2Service"); final Map<String, String> parametersSetToRequest = new HashMap<>(); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { OAuth2AccessTokenReqDTO request = (OAuth2AccessTokenReqDTO) invocation.getArguments()[0]; parametersSetToRequest.put(OAuth.OAUTH_CODE, request.getAuthorizationCode()); parametersSetToRequest.put(OAuth.OAUTH_USERNAME, request.getResourceOwnerUsername()); parametersSetToRequest.put(OAuth.OAUTH_PASSWORD, request.getResourceOwnerPassword()); parametersSetToRequest.put(OAuth.OAUTH_REFRESH_TOKEN, request.getRefreshToken()); parametersSetToRequest.put(OAuth.OAUTH_ASSERTION, request.getAssertion()); parametersSetToRequest.put(OAuthConstants.WINDOWS_TOKEN, request.getWindowsToken()); parametersSetToRequest.put(OAuth.OAUTH_GRANT_TYPE, request.getGrantType()); OAuth2AccessTokenRespDTO tokenRespDTO = new OAuth2AccessTokenRespDTO(); return tokenRespDTO; } }).when(oAuth2Service).issueAccessToken(any(OAuth2AccessTokenReqDTO.class)); CarbonOAuthTokenRequest oauthRequest = new CarbonOAuthTokenRequest(request); HttpServletRequestWrapper httpServletRequestWrapper = new HttpServletRequestWrapper(request); Class<?> clazz = OAuth2TokenEndpoint.class; Object tokenEndpointObj = clazz.newInstance(); Method getAccessToken = tokenEndpointObj.getClass(). getDeclaredMethod("issueAccessToken", CarbonOAuthTokenRequest.class, HttpServletRequestWrapper.class); getAccessToken.setAccessible(true); OAuth2AccessTokenRespDTO tokenRespDTO = (OAuth2AccessTokenRespDTO) getAccessToken.invoke(tokenEndpointObj, oauthRequest, httpServletRequestWrapper); assertNotNull(tokenRespDTO, "ResponseDTO is null"); String[] paramsToCheck = additionalParameters.split(","); for (String param : paramsToCheck) { assertNotNull(parametersSetToRequest.get(param), "Required parameter " + param + " is not set for " + grantType + "grant type"); } } private HttpServletRequest mockHttpRequest(final Map<String, String[]> requestParams, final Map<String, Object> requestAttributes) { HttpServletRequest httpServletRequest = mock(HttpServletRequest.class); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { String key = (String) invocation.getArguments()[0]; return requestParams.get(key) != null ? requestParams.get(key)[0] : null; } }).when(httpServletRequest).getParameter(anyString()); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { String key = (String) invocation.getArguments()[0]; return requestAttributes.get(key); } }).when(httpServletRequest).getAttribute(anyString()); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { String key = (String) invocation.getArguments()[0]; Object value = invocation.getArguments()[1]; requestAttributes.put(key, value); return null; } }).when(httpServletRequest).setAttribute(anyString(), Matchers.anyObject()); when(httpServletRequest.getParameterMap()).thenReturn(requestParams); when(httpServletRequest.getParameterNames()).thenReturn( new IteratorEnumeration(requestParams.keySet().iterator())); when(httpServletRequest.getMethod()).thenReturn(HttpMethod.POST); when(httpServletRequest.getContentType()).thenReturn(OAuth.ContentType.URL_ENCODED); return httpServletRequest; } private void mockOAuthServerConfiguration() throws Exception { mockStatic(OAuthServerConfiguration.class); when(OAuthServerConfiguration.getInstance()).thenReturn(oAuthServerConfiguration); when(oAuthServerConfiguration.getPersistenceProcessor()).thenReturn(tokenPersistenceProcessor); when(tokenPersistenceProcessor.getProcessedClientId(anyString())).thenAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { return (String) invocation.getArguments()[0]; } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.cxf.jaxrs; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.net.ssl.HostnameVerifier; import org.apache.camel.Component; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.RuntimeCamelException; import org.apache.camel.Service; import org.apache.camel.component.cxf.NullFaultListener; import org.apache.camel.http.common.cookie.CookieHandler; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.support.EndpointHelper; import org.apache.camel.support.SynchronousDelegateProducer; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.cxf.Bus; import org.apache.cxf.BusFactory; import org.apache.cxf.common.util.ModCountCopyOnWriteArrayList; import org.apache.cxf.common.util.StringUtils; import org.apache.cxf.ext.logging.LoggingFeature; import org.apache.cxf.feature.Feature; import org.apache.cxf.interceptor.AbstractBasicInterceptorProvider; import org.apache.cxf.interceptor.Interceptor; import org.apache.cxf.jaxrs.AbstractJAXRSFactoryBean; import org.apache.cxf.jaxrs.JAXRSServerFactoryBean; import org.apache.cxf.jaxrs.client.JAXRSClientFactoryBean; import org.apache.cxf.jaxrs.model.ClassResourceInfo; import org.apache.cxf.jaxrs.model.UserResource; import org.apache.cxf.jaxrs.utils.ResourceUtils; import org.apache.cxf.logging.FaultListener; import org.apache.cxf.message.Message; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The cxfrs component is used for JAX-RS REST services using Apache CXF. */ @UriEndpoint(firstVersion = "2.0.0", scheme = "cxfrs", title = "CXF-RS", syntax = "cxfrs:beanId:address", label = "rest", lenientProperties = true) public class CxfRsEndpoint extends DefaultEndpoint implements HeaderFilterStrategyAware, Service { private static final Logger LOG = LoggerFactory.getLogger(CxfRsEndpoint.class); @UriParam(label = "advanced") protected Bus bus; private final InterceptorHolder interceptorHolder = new InterceptorHolder(); private Map<String, String> parameters; private Map<String, Object> properties; @UriPath(description = "To lookup an existing configured CxfRsEndpoint. Must used bean: as prefix.") private String beanId; @UriPath private String address; @UriParam private List<Class<?>> resourceClasses; @UriParam(label = "consumer,advanced") private String serviceBeans; @UriParam private String modelRef; @UriParam(label = "consumer", defaultValue = "Default") private BindingStyle bindingStyle = BindingStyle.Default; @UriParam(label = "consumer") private String publishedEndpointUrl; @UriParam(label = "advanced") private HeaderFilterStrategy headerFilterStrategy; @UriParam(label = "advanced") private CxfRsBinding binding; @UriParam(javaType = "java.lang.String") private List<Object> providers = new LinkedList<>(); private String providersRef; @UriParam private List<String> schemaLocations; @UriParam private List<Feature> features = new ModCountCopyOnWriteArrayList<>(); @UriParam(label = "producer,advanced", defaultValue = "true") private boolean httpClientAPI = true; @UriParam(label = "producer,advanced") private boolean ignoreDeleteMethodMessageBody; @UriParam(label = "producer", defaultValue = "true") private boolean throwExceptionOnFailure = true; @UriParam(label = "producer,advanced", defaultValue = "10") private int maxClientCacheSize = 10; @UriParam(label = "producer") private SSLContextParameters sslContextParameters; @UriParam(label = "producer") private HostnameVerifier hostnameVerifier; @UriParam private boolean loggingFeatureEnabled; @UriParam private int loggingSizeLimit; @UriParam private boolean skipFaultLogging; @UriParam(label = "advanced", defaultValue = "30000") private long continuationTimeout = 30000; @UriParam(label = "advanced") private boolean defaultBus; @UriParam(label = "advanced") private boolean performInvocation; @UriParam(label = "advanced") private boolean propagateContexts; @UriParam(label = "advanced") private CxfRsConfigurer cxfRsConfigurer; @UriParam(label = "producer") private CookieHandler cookieHandler; public CxfRsEndpoint() { } public CxfRsEndpoint(String endpointUri, Component component) { super(endpointUri, component); setAddress(endpointUri); } @Override public boolean isLenientProperties() { return true; } // This method is for CxfRsComponent setting the EndpointUri protected void updateEndpointUri(String endpointUri) { super.setEndpointUri(endpointUri); } public void setParameters(Map<String, String> param) { parameters = param; } public Map<String, String> getParameters() { return parameters; } /** * If it is true, the CxfRsProducer will use the HttpClientAPI to invoke the service. * If it is false, the CxfRsProducer will use the ProxyClientAPI to invoke the service */ public void setHttpClientAPI(boolean clientAPI) { httpClientAPI = clientAPI; } public boolean isHttpClientAPI() { return httpClientAPI; } @Override public HeaderFilterStrategy getHeaderFilterStrategy() { return headerFilterStrategy; } /** * To use a custom HeaderFilterStrategy to filter header to and from Camel message. */ @Override public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) { headerFilterStrategy = strategy; } @Override public Consumer createConsumer(Processor processor) throws Exception { CxfRsConsumer answer = new CxfRsConsumer(this, processor); configureConsumer(answer); return answer; } @Override public Producer createProducer() throws Exception { if (bindingStyle == BindingStyle.SimpleConsumer) { throw new IllegalArgumentException("The SimpleConsumer Binding Style cannot be used in a camel-cxfrs producer"); } final CxfRsProducer cxfRsProducer = new CxfRsProducer(this); if (isSynchronous()) { return new SynchronousDelegateProducer(cxfRsProducer); } else { return cxfRsProducer; } } /** * To use a custom CxfBinding to control the binding between Camel Message and CXF Message. */ public void setBinding(CxfRsBinding binding) { this.binding = binding; } public CxfRsBinding getBinding() { return binding; } public boolean isSkipFaultLogging() { return skipFaultLogging; } public CxfRsConfigurer getChainedCxfRsEndpointConfigurer() { return ChainedCxfRsConfigurer .create(getNullSafeCxfRsEndpointConfigurer(), SslCxfRsConfigurer.create(sslContextParameters, getCamelContext())) .addChild(HostnameVerifierCxfRsConfigurer.create(hostnameVerifier)); } /** * This option controls whether the PhaseInterceptorChain skips logging the Fault that it catches. */ public void setSkipFaultLogging(boolean skipFaultLogging) { this.skipFaultLogging = skipFaultLogging; } protected void checkBeanType(Object object, Class<?> clazz) { if (!clazz.isAssignableFrom(object.getClass())) { throw new IllegalArgumentException("The configure bean is not the instance of " + clazz.getName()); } } protected void setupJAXRSServerFactoryBean(JAXRSServerFactoryBean sfb) { // address if (getAddress() != null) { sfb.setAddress(getAddress()); } processResourceModel(sfb); if (getResourceClasses() != null) { sfb.setResourceClasses(getResourceClasses()); } if (serviceBeans != null) { List<Object> beans = EndpointHelper.resolveReferenceListParameter(getCamelContext(), serviceBeans, Object.class); sfb.setServiceBeans(beans); } // setup the resource providers for interfaces List<ClassResourceInfo> cris = sfb.getServiceFactory().getClassResourceInfo(); for (ClassResourceInfo cri : cris) { final Class<?> serviceClass = cri.getServiceClass(); if (serviceClass.isInterface()) { cri.setResourceProvider(new CamelResourceProvider(serviceClass)); } } setupCommonFactoryProperties(sfb); sfb.setStart(false); getNullSafeCxfRsEndpointConfigurer().configure(sfb); } protected CxfRsConfigurer getNullSafeCxfRsEndpointConfigurer() { if (cxfRsConfigurer == null) { return new ChainedCxfRsConfigurer.NullCxfRsConfigurer(); } return cxfRsConfigurer; } private void processResourceModel(JAXRSServerFactoryBean sfb) { // Currently a CXF model document is the only possible source // of the model. Other sources will be supported going forward if (modelRef != null) { List<UserResource> resources = ResourceUtils.getUserResources(modelRef, sfb.getBus()); processUserResources(sfb, resources); } } /* * Prepare model beans and set them on the factory. * The model beans can be created from a variety of sources such as * CXF Model extensions but also other documents (to be supported in the future). */ private void processUserResources(JAXRSServerFactoryBean sfb, List<UserResource> resources) { for (UserResource resource : resources) { if (StringUtils.isEmpty(resource.getName())) { resource.setName(DefaultModelResource.class.getName()); } } // The CXF to Camel exchange binding may need to be customized // for the operation name, request, response types be derived from // the model info (when a given model does provide this info) as opposed // to a matched method which is of no real use with a default handler. sfb.setModelBeans(resources); } protected void setupJAXRSClientFactoryBean(JAXRSClientFactoryBean cfb, String address) { if (modelRef != null) { cfb.setModelRef(modelRef); } if (getResourceClasses() != null && !getResourceClasses().isEmpty()) { cfb.setResourceClass(getResourceClasses().get(0)); cfb.getServiceFactory().setResourceClasses(getResourceClasses()); } setupCommonFactoryProperties(cfb); cfb.setThreadSafe(true); getNullSafeCxfRsEndpointConfigurer().configure(cfb); // Add the address could be override by message header if (address != null) { cfb.setAddress(address); } } protected void setupCommonFactoryProperties(AbstractJAXRSFactoryBean factory) { // let customer to override the default setting of provider if (!getProviders().isEmpty()) { factory.setProviders(getProviders()); } // setup the features if (!getFeatures().isEmpty()) { factory.getFeatures().addAll(getFeatures()); } if (publishedEndpointUrl != null) { factory.setPublishedEndpointUrl(publishedEndpointUrl); } // we need to avoid flushing the setting from spring or blueprint if (!interceptorHolder.getInInterceptors().isEmpty()) { factory.setInInterceptors(interceptorHolder.getInInterceptors()); } if (!interceptorHolder.getOutInterceptors().isEmpty()) { factory.setOutInterceptors(interceptorHolder.getOutInterceptors()); } if (!interceptorHolder.getOutFaultInterceptors().isEmpty()) { factory.setOutFaultInterceptors(interceptorHolder.getOutFaultInterceptors()); } if (!interceptorHolder.getInFaultInterceptors().isEmpty()) { factory.setInFaultInterceptors(interceptorHolder.getInFaultInterceptors()); } if (getProperties() != null) { if (factory.getProperties() != null) { // add to existing properties factory.getProperties().putAll(getProperties()); } else { factory.setProperties(getProperties()); } LOG.debug("JAXRS FactoryBean: {} added properties: {}", factory, getProperties()); } if (isLoggingFeatureEnabled()) { LoggingFeature loggingFeature = new LoggingFeature(); if (getLoggingSizeLimit() > 0) { loggingFeature.setLimit(getLoggingSizeLimit()); } factory.getFeatures().add(loggingFeature); } if (this.isSkipFaultLogging()) { if (factory.getProperties() == null) { factory.setProperties(new HashMap<String, Object>()); } factory.getProperties().put(FaultListener.class.getName(), new NullFaultListener()); } } protected JAXRSServerFactoryBean newJAXRSServerFactoryBean() { return new JAXRSServerFactoryBean() { protected boolean isValidClassResourceInfo(ClassResourceInfo cri) { // CXF will consider interfaces created for managing model resources // invalid - however it is fine with Camel processors if no service invocation // is requested. return !performInvocation || !cri.getServiceClass().isInterface(); } }; } protected JAXRSClientFactoryBean newJAXRSClientFactoryBean() { return new JAXRSClientFactoryBean(); } protected String resolvePropertyPlaceholders(String str) { try { if (getCamelContext() != null) { return getCamelContext().resolvePropertyPlaceholders(str); } else { return str; } } catch (Exception ex) { throw RuntimeCamelException.wrapRuntimeCamelException(ex); } } public JAXRSServerFactoryBean createJAXRSServerFactoryBean() { JAXRSServerFactoryBean answer = newJAXRSServerFactoryBean(); setupJAXRSServerFactoryBean(answer); return answer; } public JAXRSClientFactoryBean createJAXRSClientFactoryBean() { return createJAXRSClientFactoryBean(getAddress()); } public JAXRSClientFactoryBean createJAXRSClientFactoryBean(String address) { JAXRSClientFactoryBean answer = newJAXRSClientFactoryBean(); setupJAXRSClientFactoryBean(answer, address); return answer; } public List<Class<?>> getResourceClasses() { return resourceClasses; } public void addResourceClass(Class<?> resourceClass) { if (resourceClasses == null) { resourceClasses = new ArrayList<>(); } resourceClasses.add(resourceClass); } /** * The resource classes which you want to export as REST service. Multiple classes can be separated by comma. */ public void setResourceClasses(List<Class<?>> resourceClasses) { this.resourceClasses = resourceClasses; } public void setResourceClasses(Class<?>... classes) { setResourceClasses(Arrays.asList(classes)); } public String getServiceBeans() { return serviceBeans; } /** * The service beans (the bean ids to lookup in the registry) which you want to export as REST service. * Multiple beans can be separated by comma */ public void setServiceBeans(String beans) { this.serviceBeans = beans; } /** * The service publish address. */ public void setAddress(String address) { this.address = address; } public String getModelRef() { return modelRef; } /** * This option is used to specify the model file which is useful for the resource class without annotation. * When using this option, then the service class can be omitted, to emulate document-only endpoints */ public void setModelRef(String ref) { this.modelRef = ref; } public String getAddress() { return resolvePropertyPlaceholders(address); } public String getPublishedEndpointUrl() { return publishedEndpointUrl; } /** * This option can override the endpointUrl that published from the WADL which can be accessed with resource address url plus ?_wadl */ public void setPublishedEndpointUrl(String publishedEndpointUrl) { this.publishedEndpointUrl = publishedEndpointUrl; } /** * This option enables CXF Logging Feature which writes inbound and outbound REST messages to log. */ public boolean isLoggingFeatureEnabled() { return loggingFeatureEnabled; } public void setLoggingFeatureEnabled(boolean loggingFeatureEnabled) { this.loggingFeatureEnabled = loggingFeatureEnabled; } public int getLoggingSizeLimit() { return loggingSizeLimit; } /** * To limit the total size of number of bytes the logger will output when logging feature has been enabled. */ public void setLoggingSizeLimit(int loggingSizeLimit) { this.loggingSizeLimit = loggingSizeLimit; } public boolean isThrowExceptionOnFailure() { return throwExceptionOnFailure; } /** * This option tells the CxfRsProducer to inspect return codes and will generate an Exception if the return code is larger than 207. */ public void setThrowExceptionOnFailure(boolean throwExceptionOnFailure) { this.throwExceptionOnFailure = throwExceptionOnFailure; } /** * This option allows you to configure the maximum size of the cache. * The implementation caches CXF clients or ClientFactoryBean in CxfProvider and CxfRsProvider. */ public void setMaxClientCacheSize(int maxClientCacheSize) { this.maxClientCacheSize = maxClientCacheSize; } public int getMaxClientCacheSize() { return maxClientCacheSize; } /** * To use a custom configured CXF Bus. */ public void setBus(Bus bus) { this.bus = bus; if (defaultBus) { BusFactory.setDefaultBus(bus); LOG.debug("Set bus {} as thread default bus", bus); } } public Bus getBus() { return bus; } /** * Will set the default bus when CXF endpoint create a bus by itself */ public void setDefaultBus(boolean isSetDefaultBus) { this.defaultBus = isSetDefaultBus; } public boolean isDefaultBus() { return defaultBus; } public boolean isIgnoreDeleteMethodMessageBody() { return ignoreDeleteMethodMessageBody; } /** * This option is used to tell CxfRsProducer to ignore the message body of the DELETE method when using HTTP API. */ public void setIgnoreDeleteMethodMessageBody(boolean ignoreDeleteMethodMessageBody) { this.ignoreDeleteMethodMessageBody = ignoreDeleteMethodMessageBody; } public BindingStyle getBindingStyle() { return bindingStyle; } public List<?> getProviders() { return providers; } /** * Set custom JAX-RS provider(s) list to the CxfRs endpoint. * You can specify a string with a list of providers to lookup in the registy separated by comma. */ public void setProviders(List<?> providers) { this.providers.addAll(providers); } /** * Set custom JAX-RS provider(s) list which is looked up in the registry. Multiple entries can be separated by comma. */ public void setProviders(String providers) { this.providersRef = providers; } /** * Set custom JAX-RS provider to the CxfRs endpoint. */ public void setProvider(Object provider) { providers.add(provider); } /** * Sets the locations of the schema(s) which can be used to validate the incoming XML or JAXB-driven JSON. */ public void setSchemaLocation(String schema) { setSchemaLocations(Collections.singletonList(schema)); } /** * Sets the locations of the schema(s) which can be used to validate the incoming XML or JAXB-driven JSON. */ public void setSchemaLocations(List<String> schemas) { this.schemaLocations = schemas; } public List<String> getSchemaLocations() { return schemaLocations; } public List<Interceptor<? extends Message>> getOutFaultInterceptors() { return interceptorHolder.getOutFaultInterceptors(); } public List<Interceptor<? extends Message>> getInFaultInterceptors() { return interceptorHolder.getInFaultInterceptors(); } public List<Interceptor<? extends Message>> getInInterceptors() { return interceptorHolder.getInInterceptors(); } public List<Interceptor<? extends Message>> getOutInterceptors() { return interceptorHolder.getOutInterceptors(); } /** * Set the inInterceptors to the CxfRs endpoint. */ public void setInInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setInInterceptors(interceptors); } /** * Set the inFaultInterceptors to the CxfRs endpoint. */ public void setInFaultInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setInFaultInterceptors(interceptors); } /** * Set the outInterceptor to the CxfRs endpoint. */ public void setOutInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setOutInterceptors(interceptors); } /** * Set the outFaultInterceptors to the CxfRs endpoint. */ public void setOutFaultInterceptors(List<Interceptor<? extends Message>> interceptors) { interceptorHolder.setOutFaultInterceptors(interceptors); } public List<Feature> getFeatures() { return features; } /** * Set the feature list to the CxfRs endpoint. */ public void setFeatures(List<Feature> features) { this.features = features; } public Map<String, Object> getProperties() { return properties; } public void setProperties(Map<String, Object> properties) { if (this.properties == null) { this.properties = properties; } else { this.properties.putAll(properties); } } /** * Sets how requests and responses will be mapped to/from Camel. Two values are possible: * <ul> * <li>SimpleConsumer: This binding style processes request parameters, multiparts, etc. and maps them to IN headers, IN attachments and to the message body. * It aims to eliminate low-level processing of {@link org.apache.cxf.message.MessageContentsList}. * It also also adds more flexibility and simplicity to the response mapping. * Only available for consumers. * </li> * <li>Default: The default style. For consumers this passes on a MessageContentsList to the route, requiring low-level processing in the route. * This is the traditional binding style, which simply dumps the {@link org.apache.cxf.message.MessageContentsList} coming in from the CXF stack * onto the IN message body. The user is then responsible for processing it according to the contract defined by the JAX-RS method signature. * </li> * <li>Custom: allows you to specify a custom binding through the binding option.</li> * </ul> */ public void setBindingStyle(BindingStyle bindingStyle) { this.bindingStyle = bindingStyle; } public String getBeanId() { return beanId; } public void setBeanId(String beanId) { this.beanId = beanId; } @Override protected void doStart() throws Exception { if (headerFilterStrategy == null) { headerFilterStrategy = new CxfRsHeaderFilterStrategy(); } // if the user explicitly selected the Custom binding style, he must provide a binding if (bindingStyle == BindingStyle.Custom && binding == null) { throw new IllegalArgumentException("Custom binding style selected, but no binding was supplied"); } // if the user has set a binding, do nothing, just make sure that BindingStyle = Custom for coherency purposes if (binding != null) { bindingStyle = BindingStyle.Custom; } // set the right binding based on the binding style if (bindingStyle == BindingStyle.SimpleConsumer) { binding = new SimpleCxfRsBinding(); } else if (bindingStyle == BindingStyle.Custom) { // do nothing } else { binding = new DefaultCxfRsBinding(); } if (binding instanceof HeaderFilterStrategyAware) { ((HeaderFilterStrategyAware) binding).setHeaderFilterStrategy(getHeaderFilterStrategy()); } if (providersRef != null) { String[] names = providersRef.split(","); for (String name : names) { Object provider = EndpointHelper.resolveReferenceParameter(getCamelContext(), name, Object.class, true); setProvider(provider); } } } @Override protected void doStop() throws Exception { // noop } public long getContinuationTimeout() { return continuationTimeout; } /** * This option is used to set the CXF continuation timeout which could be used in CxfConsumer by default when the CXF server is using Jetty or Servlet transport. */ public void setContinuationTimeout(long continuationTimeout) { this.continuationTimeout = continuationTimeout; } public boolean isPerformInvocation() { return performInvocation; } /** * When the option is true, Camel will perform the invocation of the resource class instance and put the response object into the exchange for further processing. */ public void setPerformInvocation(boolean performInvocation) { this.performInvocation = performInvocation; } public boolean isPropagateContexts() { return propagateContexts; } /** * When the option is true, JAXRS UriInfo, HttpHeaders, Request and SecurityContext contexts will be available to * custom CXFRS processors as typed Camel exchange properties. * These contexts can be used to analyze the current requests using JAX-RS API. */ public void setPropagateContexts(boolean propagateContexts) { this.propagateContexts = propagateContexts; } private static class InterceptorHolder extends AbstractBasicInterceptorProvider { } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } /** * The Camel SSL setting reference. Use the # notation to reference the SSL Context. */ public void setSslContextParameters(SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } public HostnameVerifier getHostnameVerifier() { return hostnameVerifier; } /** * The hostname verifier to be used. Use the # notation to reference a HostnameVerifier * from the registry. */ public void setHostnameVerifier(HostnameVerifier hostnameVerifier) { this.hostnameVerifier = hostnameVerifier; } public CxfRsConfigurer getCxfRsConfigurer() { return cxfRsConfigurer; } /** * This option could apply the implementation of org.apache.camel.component.cxf.jaxrs.CxfRsEndpointConfigurer which supports to configure the CXF endpoint * in programmatic way. User can configure the CXF server and client by implementing configure{Server/Client} method of CxfEndpointConfigurer. */ public void setCxfRsConfigurer(CxfRsConfigurer configurer) { this.cxfRsConfigurer = configurer; } public CookieHandler getCookieHandler() { return cookieHandler; } /** * Configure a cookie handler to maintain a HTTP session */ public void setCookieHandler(CookieHandler cookieHandler) { this.cookieHandler = cookieHandler; } }
package com.sakebook.android.template; import android.app.Activity; import android.app.ActionBar; import android.app.Fragment; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.content.SharedPreferences; import android.content.res.Configuration; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.Toast; /** * Fragment used for managing interactions for and presentation of a navigation drawer. * See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction"> * design guidelines</a> for a complete explanation of the behaviors implemented here. */ public class NavigationDrawerFragment extends Fragment { /** * Remember the position of the selected item. */ private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position"; /** * Per the design guidelines, you should show the drawer on launch until the user manually * expands it. This shared preference tracks this. */ private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned"; /** * A pointer to the current callbacks instance (the Activity). */ private NavigationDrawerCallbacks mCallbacks; /** * Helper component that ties the action bar to the navigation drawer. */ private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerListView; private View mFragmentContainerView; private int mCurrentSelectedPosition = 0; private boolean mFromSavedInstanceState; private boolean mUserLearnedDrawer; public NavigationDrawerFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Read in the flag indicating whether or not the user has demonstrated awareness of the // drawer. See PREF_USER_LEARNED_DRAWER for details. SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); mFromSavedInstanceState = true; } // Select either the default item (0) or the last selected item. selectItem(mCurrentSelectedPosition); } @Override public void onActivityCreated (Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Indicate that this fragment would like to influence the set of actions in the action bar. setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mDrawerListView = (ListView) inflater.inflate( R.layout.fragment_navigation_drawer, container, false); mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { selectItem(position); } }); mDrawerListView.setAdapter(new ArrayAdapter<String>( getActionBar().getThemedContext(), android.R.layout.simple_list_item_activated_1, android.R.id.text1, new String[]{ getString(R.string.title_section1), getString(R.string.title_section2), getString(R.string.title_section3), })); mDrawerListView.setItemChecked(mCurrentSelectedPosition, true); return mDrawerListView; } public boolean isDrawerOpen() { return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView); } /** * Users of this fragment must call this method to set up the navigation drawer interactions. * * @param fragmentId The android:id of this fragment in its activity's layout. * @param drawerLayout The DrawerLayout containing this fragment's UI. */ public void setUp(int fragmentId, DrawerLayout drawerLayout) { mFragmentContainerView = getActivity().findViewById(fragmentId); mDrawerLayout = drawerLayout; // set a custom shadow that overlays the main content when the drawer opens mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); // set up the drawer's list view with items and click listener ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); // ActionBarDrawerToggle ties together the the proper interactions // between the navigation drawer and the action bar app icon. mDrawerToggle = new ActionBarDrawerToggle( getActivity(), /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */ R.string.navigation_drawer_open, /* "open drawer" description for accessibility */ R.string.navigation_drawer_close /* "close drawer" description for accessibility */ ) { @Override public void onDrawerClosed(View drawerView) { super.onDrawerClosed(drawerView); if (!isAdded()) { return; } getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu() } @Override public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); if (!isAdded()) { return; } if (!mUserLearnedDrawer) { // The user manually opened the drawer; store this flag to prevent auto-showing // the navigation drawer automatically in the future. mUserLearnedDrawer = true; SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(getActivity()); sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply(); } getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu() } }; // If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer, // per the navigation drawer design guidelines. if (!mUserLearnedDrawer && !mFromSavedInstanceState) { mDrawerLayout.openDrawer(mFragmentContainerView); } // Defer code dependent on restoration of previous instance state. mDrawerLayout.post(new Runnable() { @Override public void run() { mDrawerToggle.syncState(); } }); mDrawerLayout.setDrawerListener(mDrawerToggle); } private void selectItem(int position) { mCurrentSelectedPosition = position; if (mDrawerListView != null) { mDrawerListView.setItemChecked(position, true); } if (mDrawerLayout != null) { mDrawerLayout.closeDrawer(mFragmentContainerView); } if (mCallbacks != null) { mCallbacks.onNavigationDrawerItemSelected(position); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mCallbacks = (NavigationDrawerCallbacks) activity; } catch (ClassCastException e) { throw new ClassCastException("Activity must implement NavigationDrawerCallbacks."); } } @Override public void onDetach() { super.onDetach(); mCallbacks = null; } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); // Forward the new configuration the drawer toggle component. mDrawerToggle.onConfigurationChanged(newConfig); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // If the drawer is open, show the global app actions in the action bar. See also // showGlobalContextActionBar, which controls the top-left area of the action bar. if (mDrawerLayout != null && isDrawerOpen()) { inflater.inflate(R.menu.global, menu); showGlobalContextActionBar(); } super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } if (item.getItemId() == R.id.action_example) { Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show(); return true; } return super.onOptionsItemSelected(item); } /** * Per the navigation drawer design guidelines, updates the action bar to show the global app * 'context', rather than just what's in the current screen. */ private void showGlobalContextActionBar() { ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); } private ActionBar getActionBar() { return getActivity().getActionBar(); } /** * Callbacks interface that all activities using this fragment must implement. */ public static interface NavigationDrawerCallbacks { /** * Called when an item in the navigation drawer is selected. */ void onNavigationDrawerItemSelected(int position); } }
/* * Copyright (C) 2008-2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.renderscript; import android.util.Log; /** * @deprecated in API 16 * <p>ProgramFragmentFixedFunction is a helper class that provides * a way to make a simple fragment shader without writing any * GLSL code. This class allows for display of constant color, interpolated * color from the vertex shader, or combinations of the both * blended with results of up to two texture lookups.</p * **/ public class ProgramFragmentFixedFunction extends ProgramFragment { ProgramFragmentFixedFunction(int id, RenderScript rs) { super(id, rs); } static class InternalBuilder extends BaseProgramBuilder { /** * @deprecated in API 16 */ public InternalBuilder(RenderScript rs) { super(rs); } /** * @deprecated in API 16 * Creates ProgramFragmentFixedFunction from the current state * of the builder * * @return ProgramFragmentFixedFunction */ public ProgramFragmentFixedFunction create() { mRS.validate(); int[] tmp = new int[(mInputCount + mOutputCount + mConstantCount + mTextureCount) * 2]; String[] texNames = new String[mTextureCount]; int idx = 0; for (int i=0; i < mInputCount; i++) { tmp[idx++] = ProgramParam.INPUT.mID; tmp[idx++] = mInputs[i].getID(mRS); } for (int i=0; i < mOutputCount; i++) { tmp[idx++] = ProgramParam.OUTPUT.mID; tmp[idx++] = mOutputs[i].getID(mRS); } for (int i=0; i < mConstantCount; i++) { tmp[idx++] = ProgramParam.CONSTANT.mID; tmp[idx++] = mConstants[i].getID(mRS); } for (int i=0; i < mTextureCount; i++) { tmp[idx++] = ProgramParam.TEXTURE_TYPE.mID; tmp[idx++] = mTextureTypes[i].mID; texNames[i] = mTextureNames[i]; } int id = mRS.nProgramFragmentCreate(mShader, texNames, tmp); ProgramFragmentFixedFunction pf = new ProgramFragmentFixedFunction(id, mRS); initProgram(pf); return pf; } } /** * @deprecated in API 16 */ public static class Builder { /** * @deprecated in API 16 */ public static final int MAX_TEXTURE = 2; int mNumTextures; boolean mPointSpriteEnable; boolean mVaryingColorEnable; String mShader; RenderScript mRS; /** * @deprecated in API 16 * EnvMode describes how textures are combined with the existing * color in the fixed function fragment shader * **/ public enum EnvMode { /** * @deprecated in API 16 **/ REPLACE (1), /** * @deprecated in API 16 **/ MODULATE (2), /** * @deprecated in API 16 **/ DECAL (3); int mID; EnvMode(int id) { mID = id; } } /** * @deprecated in API 16 * Format describes the pixel format of textures in the fixed * function fragment shader and how they are sampled * **/ public enum Format { /** * @deprecated in API 16 **/ ALPHA (1), /** * @deprecated in API 16 **/ LUMINANCE_ALPHA (2), /** * @deprecated in API 16 **/ RGB (3), /** * @deprecated in API 16 **/ RGBA (4); int mID; Format(int id) { mID = id; } } private class Slot { EnvMode env; Format format; Slot(EnvMode _env, Format _fmt) { env = _env; format = _fmt; } } Slot[] mSlots; private void buildShaderString() { mShader = "//rs_shader_internal\n"; mShader += "varying lowp vec4 varColor;\n"; mShader += "varying vec2 varTex0;\n"; mShader += "void main() {\n"; if (mVaryingColorEnable) { mShader += " lowp vec4 col = varColor;\n"; } else { mShader += " lowp vec4 col = UNI_Color;\n"; } if (mNumTextures != 0) { if (mPointSpriteEnable) { mShader += " vec2 t0 = gl_PointCoord;\n"; } else { mShader += " vec2 t0 = varTex0.xy;\n"; } } for(int i = 0; i < mNumTextures; i ++) { switch(mSlots[i].env) { case REPLACE: switch (mSlots[i].format) { case ALPHA: mShader += " col.a = texture2D(UNI_Tex0, t0).a;\n"; break; case LUMINANCE_ALPHA: mShader += " col.rgba = texture2D(UNI_Tex0, t0).rgba;\n"; break; case RGB: mShader += " col.rgb = texture2D(UNI_Tex0, t0).rgb;\n"; break; case RGBA: mShader += " col.rgba = texture2D(UNI_Tex0, t0).rgba;\n"; break; } break; case MODULATE: switch (mSlots[i].format) { case ALPHA: mShader += " col.a *= texture2D(UNI_Tex0, t0).a;\n"; break; case LUMINANCE_ALPHA: mShader += " col.rgba *= texture2D(UNI_Tex0, t0).rgba;\n"; break; case RGB: mShader += " col.rgb *= texture2D(UNI_Tex0, t0).rgb;\n"; break; case RGBA: mShader += " col.rgba *= texture2D(UNI_Tex0, t0).rgba;\n"; break; } break; case DECAL: mShader += " col = texture2D(UNI_Tex0, t0);\n"; break; } } mShader += " gl_FragColor = col;\n"; mShader += "}\n"; } /** * @deprecated * Creates a builder for fixed function fragment program * * @param rs Context to which the program will belong. */ public Builder(RenderScript rs) { mRS = rs; mSlots = new Slot[MAX_TEXTURE]; mPointSpriteEnable = false; } /** * @deprecated in API 16 * Adds a texture to be fetched as part of the fixed function * fragment program * * @param env specifies how the texture is combined with the * current color * @param fmt specifies the format of the texture and how its * components will be used to combine with the * current color * @param slot index of the texture to apply the operations on * * @return this */ public Builder setTexture(EnvMode env, Format fmt, int slot) throws IllegalArgumentException { if((slot < 0) || (slot >= MAX_TEXTURE)) { throw new IllegalArgumentException("MAX_TEXTURE exceeded."); } mSlots[slot] = new Slot(env, fmt); return this; } /** * @deprecated in API 16 * Specifies whether the texture coordinate passed from the * vertex program is replaced with an openGL internal point * sprite texture coordinate * **/ public Builder setPointSpriteTexCoordinateReplacement(boolean enable) { mPointSpriteEnable = enable; return this; } /** * @deprecated in API 16 * Specifies whether the varying color passed from the vertex * program or the constant color set on the fragment program is * used in the final color calculation in the fixed function * fragment shader * **/ public Builder setVaryingColor(boolean enable) { mVaryingColorEnable = enable; return this; } /** * @deprecated in API 16 * Creates the fixed function fragment program from the current * state of the builder. * */ public ProgramFragmentFixedFunction create() { InternalBuilder sb = new InternalBuilder(mRS); mNumTextures = 0; for(int i = 0; i < MAX_TEXTURE; i ++) { if(mSlots[i] != null) { mNumTextures ++; } } buildShaderString(); sb.setShader(mShader); Type constType = null; if (!mVaryingColorEnable) { Element.Builder b = new Element.Builder(mRS); b.add(Element.F32_4(mRS), "Color"); Type.Builder typeBuilder = new Type.Builder(mRS, b.create()); typeBuilder.setX(1); constType = typeBuilder.create(); sb.addConstant(constType); } for (int i = 0; i < mNumTextures; i ++) { sb.addTexture(TextureType.TEXTURE_2D); } ProgramFragmentFixedFunction pf = sb.create(); pf.mTextureCount = MAX_TEXTURE; if (!mVaryingColorEnable) { Allocation constantData = Allocation.createTyped(mRS,constType); FieldPacker fp = new FieldPacker(16); Float4 f4 = new Float4(1.f, 1.f, 1.f, 1.f); fp.addF32(f4); constantData.setFromFieldPacker(0, fp); pf.bindConstants(constantData, 0); } return pf; } } }
package com.mgaetan89.showsrage.fragment; import android.app.Application; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.FragmentActivity; import android.support.v4.view.MenuItemCompat; import android.support.v7.app.ActionBar; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.app.MediaRouteActionProvider; import android.support.v7.app.MediaRouteDiscoveryFragment; import android.support.v7.media.MediaControlIntent; import android.support.v7.media.MediaRouteSelector; import android.support.v7.media.MediaRouter; import android.support.v7.widget.CardView; import android.text.TextUtils; import android.text.format.DateUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import android.widget.Toast; import com.mgaetan89.showsrage.Constants; import com.mgaetan89.showsrage.R; import com.mgaetan89.showsrage.ShowsRageApplication; import com.mgaetan89.showsrage.helper.DateTimeHelper; import com.mgaetan89.showsrage.helper.GenericCallback; import com.mgaetan89.showsrage.model.Episode; import com.mgaetan89.showsrage.model.GenericResponse; import com.mgaetan89.showsrage.model.PlayingVideoData; import com.mgaetan89.showsrage.model.Show; import com.mgaetan89.showsrage.model.SingleEpisode; import com.mgaetan89.showsrage.network.SickRageApi; import java.lang.ref.WeakReference; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Response; public class EpisodeDetailFragment extends MediaRouteDiscoveryFragment implements Callback<SingleEpisode>, View.OnClickListener { @Nullable private TextView airs = null; @Nullable private MenuItem castMenu = null; @Nullable private Episode episode = null; private int episodeNumber = 0; @Nullable private TextView fileSize = null; @Nullable private TextView location = null; @Nullable private CardView moreInformationLayout = null; @Nullable private TextView name = null; @Nullable private MenuItem playVideoMenu = null; @Nullable private TextView plot = null; @Nullable private CardView plotLayout = null; @Nullable private TextView quality = null; private int seasonNumber = 0; @Nullable private Show show = null; @Nullable private TextView status = null; public EpisodeDetailFragment() { this.setHasOptionsMenu(true); this.setRouteSelector(new MediaRouteSelector.Builder() .addControlCategory(MediaControlIntent.CATEGORY_REMOTE_PLAYBACK) .build()); } @Override public void failure(RetrofitError error) { error.printStackTrace(); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); ActionBar actionBar = ((AppCompatActivity) this.getActivity()).getSupportActionBar(); Bundle arguments = this.getArguments(); Episode episode = (Episode) arguments.getSerializable(Constants.Bundle.EPISODE_MODEL); this.episodeNumber = arguments.getInt(Constants.Bundle.EPISODE_NUMBER, 0); this.seasonNumber = arguments.getInt(Constants.Bundle.SEASON_NUMBER, 0); if (actionBar != null) { if (this.seasonNumber <= 0) { actionBar.setTitle(R.string.specials); } else { actionBar.setTitle(this.getString(R.string.season_number, this.seasonNumber)); } } this.show = (Show) arguments.getSerializable(Constants.Bundle.SHOW_MODEL); this.displayEpisode(episode); if (this.show != null) { SickRageApi.getInstance().getServices().getEpisode(this.show.getIndexerId(), this.seasonNumber, this.episodeNumber, this); } } @Override public void onClick(View view) { if (this.show == null) { return; } Toast.makeText(this.getActivity(), this.getString(R.string.episode_search, this.episodeNumber, this.seasonNumber), Toast.LENGTH_SHORT).show(); SickRageApi.getInstance().getServices().searchEpisode(this.show.getIndexerId(), this.seasonNumber, this.episodeNumber, new GenericCallback(this.getActivity())); } @Override public MediaRouter.Callback onCreateCallback() { return new MediaRouterCallback(this); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.episode, menu); this.castMenu = menu.findItem(R.id.menu_cast); MediaRouteActionProvider mediaRouteActionProvider = (MediaRouteActionProvider) MenuItemCompat.getActionProvider(this.castMenu); mediaRouteActionProvider.setRouteSelector(this.getRouteSelector()); this.playVideoMenu = menu.findItem(R.id.menu_play_video); this.displayStreamingMenus(this.episode); } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_episode_detail, container, false); if (view != null) { this.airs = (TextView) view.findViewById(R.id.episode_airs); this.fileSize = (TextView) view.findViewById(R.id.episode_file_size); this.location = (TextView) view.findViewById(R.id.episode_location); this.moreInformationLayout = (CardView) view.findViewById(R.id.episode_more_information_layout); this.name = (TextView) view.findViewById(R.id.episode_name); this.plot = (TextView) view.findViewById(R.id.episode_plot); this.plotLayout = (CardView) view.findViewById(R.id.episode_plot_layout); this.quality = (TextView) view.findViewById(R.id.episode_quality); this.status = (TextView) view.findViewById(R.id.episode_status); FloatingActionButton searchEpisode = (FloatingActionButton) view.findViewById(R.id.search_episode); if (searchEpisode != null) { searchEpisode.setOnClickListener(this); } } return view; } @Override public void onDestroyView() { this.airs = null; this.fileSize = null; this.location = null; this.moreInformationLayout = null; this.name = null; this.plot = null; this.plotLayout = null; this.quality = null; this.status = null; super.onDestroyView(); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_episode_set_status_archived: case R.id.menu_episode_set_status_failed: case R.id.menu_episode_set_status_ignored: case R.id.menu_episode_set_status_skipped: case R.id.menu_episode_set_status_wanted: this.setEpisodeStatus(this.seasonNumber, this.episodeNumber, Episode.getStatusForMenuId(item.getItemId())); break; case R.id.menu_play_video: this.clickPlayVideo(); return true; } return super.onOptionsItemSelected(item); } @Override public void success(SingleEpisode singleEpisode, Response response) { this.displayEpisode(singleEpisode.getData()); this.displayStreamingMenus(singleEpisode.getData()); } private boolean areStreamingMenusVisible(@Nullable Episode episode) { FragmentActivity activity = this.getActivity(); if (activity == null || episode == null) { return false; } SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(activity); boolean episodeDownloaded = "Downloaded".equalsIgnoreCase(episode.getStatus()); boolean viewInVlc = preferences.getBoolean("view_in_vlc", false); return episodeDownloaded && viewInVlc; } private void clickPlayVideo() { Intent intent = new Intent(Intent.ACTION_VIEW, this.getEpisodeVideoUrl()); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.setClassName("org.videolan.vlc", "org.videolan.vlc.gui.video.VideoPlayerActivity"); this.startActivity(intent); } private void displayEpisode(@Nullable Episode episode) { if (episode == null) { return; } this.episode = episode; if (this.airs != null) { this.airs.setText(this.getString(R.string.airs, DateTimeHelper.getRelativeDate(episode.getAirDate(), "yyyy-MM-dd", DateUtils.DAY_IN_MILLIS))); this.airs.setVisibility(View.VISIBLE); } if (episode.getFileSize() == 0L) { if (this.moreInformationLayout != null) { this.moreInformationLayout.setVisibility(View.GONE); } } else { if (this.fileSize != null) { this.fileSize.setText(this.getString(R.string.file_size, episode.getFileSizeHuman())); } if (this.location != null) { this.location.setText(this.getString(R.string.location, episode.getLocation())); } if (this.moreInformationLayout != null) { this.moreInformationLayout.setVisibility(View.VISIBLE); } } if (this.name != null) { this.name.setText(episode.getName()); this.name.setVisibility(View.VISIBLE); } if (this.plot != null) { String description = episode.getDescription(); if (TextUtils.isEmpty(description)) { if (this.plotLayout != null) { this.plotLayout.setVisibility(View.GONE); } } else { this.plot.setText(description); if (this.plotLayout != null) { this.plotLayout.setVisibility(View.VISIBLE); } } } if (this.quality != null) { String quality = episode.getQuality(); if ("N/A".equalsIgnoreCase(quality)) { this.quality.setVisibility(View.GONE); } else { this.quality.setText(this.getString(R.string.quality, quality)); this.quality.setVisibility(View.VISIBLE); } } if (this.status != null) { int status = episode.getStatusTranslationResource(); String statusString = episode.getStatus(); if (status != 0) { statusString = this.getString(status); } this.status.setText(this.getString(R.string.status_value, statusString)); this.status.setVisibility(View.VISIBLE); } } private void displayStreamingMenus(Episode episode) { boolean displayStreamingMenu = this.areStreamingMenusVisible(episode); if (this.castMenu != null) { this.castMenu.setVisible(displayStreamingMenu); } if (this.playVideoMenu != null) { this.playVideoMenu.setVisible(displayStreamingMenu); } } @NonNull private Uri getEpisodeVideoUrl() { String episodeUrl = SickRageApi.getInstance().getVideosUrl(); if (this.show != null) { episodeUrl += this.show.getShowName() + "/"; } if (this.episode != null) { episodeUrl += this.episode.getLocation(); } try { URL url = new URL(episodeUrl); URI uri = new URI(url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), url.getRef()); return Uri.parse(uri.toString()); } catch (MalformedURLException | URISyntaxException exception) { exception.printStackTrace(); } return Uri.parse(episodeUrl); } private void setEpisodeStatus(final int seasonNumber, final int episodeNumber, final String status) { if (this.show == null) { return; } final Callback<GenericResponse> callback = new GenericCallback(this.getActivity()); final int indexerId = this.show.getIndexerId(); new AlertDialog.Builder(this.getActivity()) .setMessage(R.string.replace_existing_episode) .setPositiveButton(R.string.replace, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { SickRageApi.getInstance().getServices().setEpisodeStatus(indexerId, seasonNumber, episodeNumber, 1, status, callback); } }) .setNegativeButton(R.string.keep, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { SickRageApi.getInstance().getServices().setEpisodeStatus(indexerId, seasonNumber, episodeNumber, 0, status, callback); } }) .show(); } private static final class MediaRouterCallback extends MediaRouter.Callback { @NonNull private WeakReference<EpisodeDetailFragment> fragmentReference = new WeakReference<>(null); private MediaRouterCallback(EpisodeDetailFragment fragment) { this.fragmentReference = new WeakReference<>(fragment); } @Override public void onRouteSelected(MediaRouter router, MediaRouter.RouteInfo route) { this.updateRemotePlayer(route); } @Override public void onRouteUnselected(MediaRouter router, MediaRouter.RouteInfo route) { this.updateRemotePlayer(route); } private void updateRemotePlayer(MediaRouter.RouteInfo route) { EpisodeDetailFragment fragment = this.fragmentReference.get(); if (fragment == null) { return; } Application application = fragment.getActivity().getApplication(); if (application instanceof ShowsRageApplication) { PlayingVideoData playingVideo = new PlayingVideoData(); playingVideo.setEpisode(fragment.episode); playingVideo.setRoute(route); playingVideo.setShow(fragment.show); playingVideo.setVideoUri(fragment.getEpisodeVideoUrl()); ((ShowsRageApplication) application).setPlayingVideo(playingVideo); } } } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; import static org.junit.Assert.assertThat; import com.facebook.buck.android.apkmodule.APKModule; import com.facebook.buck.android.apkmodule.APKModuleGraph; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.step.Step; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.sha1.Sha1HashCode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Optional; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public class PreDexedFilesSorterTest { @Rule public TemporaryFolder tempDir = new TemporaryFolder(); private static final String PRIMARY_DEX_PATTERN = "primary"; private static final long DEX_WEIGHT_LIMIT = 10 * 1024 * 1024; private static final int STANDARD_DEX_FILE_ESTIMATE = (int) DEX_WEIGHT_LIMIT / 10 - 1; private APKModuleGraph moduleGraph; private APKModule extraModule; @Before public void setUp() throws IOException { moduleGraph = new APKModuleGraph( TargetGraph.EMPTY, BuildTargetFactory.newInstance("//fakeTarget:yes"), Optional.empty()); extraModule = APKModule.of("extra"); } @Test public void testPrimaryOnly() throws IOException { int numberOfPrimaryDexes = 10; int numberOfSecondaryDexes = 0; int numberOfExtraDexes = 0; ImmutableMap<String, PreDexedFilesSorter.Result> sortResults = generatePreDexSorterResults( numberOfPrimaryDexes, numberOfSecondaryDexes, numberOfExtraDexes); for (String store : sortResults.keySet()) { assertThat(store, is(moduleGraph.getRootAPKModule().getName())); } assertThat(sortResults.size(), is(1)); } @Test(expected = HumanReadableException.class) public void testPrimaryOverFlow() throws IOException { int numberOfPrimaryDexes = 15; int numberOfSecondaryDexes = 0; int numberOfExtraDexes = 0; generatePreDexSorterResults(numberOfPrimaryDexes, numberOfSecondaryDexes, numberOfExtraDexes); } @Test public void testPrimaryAndSecondary() throws IOException { int numberOfPrimaryDexes = 10; int numberOfSecondaryDexes = 10; int numberOfExtraDexes = 0; ImmutableMap<String, PreDexedFilesSorter.Result> sortResults = generatePreDexSorterResults( numberOfPrimaryDexes, numberOfSecondaryDexes, numberOfExtraDexes); PreDexedFilesSorter.Result rootResult = sortResults.get(APKModuleGraph.ROOT_APKMODULE_NAME); for (String store : sortResults.keySet()) { assertThat(store, is(moduleGraph.getRootAPKModule().getName())); } assertThat(rootResult.primaryDexInputs.size(), is(numberOfPrimaryDexes)); assertThat(rootResult.secondaryOutputToInputs.keySet().size(), is(1)); assertThat(rootResult.secondaryOutputToInputs.size(), is(numberOfSecondaryDexes + 1)); } @Test public void testPrimaryAndMultipleSecondary() throws IOException { int numberOfPrimaryDexes = 10; int numberOfSecondaryDexes = 15; int numberOfExtraDexes = 0; ImmutableMap<String, PreDexedFilesSorter.Result> sortResults = generatePreDexSorterResults( numberOfPrimaryDexes, numberOfSecondaryDexes, numberOfExtraDexes); for (String store : sortResults.keySet()) { assertThat(store, is(moduleGraph.getRootAPKModule().getName())); } PreDexedFilesSorter.Result rootResult = sortResults.get(APKModuleGraph.ROOT_APKMODULE_NAME); assertThat(rootResult.metadataTxtDexEntries.size(), is(2)); for (DexWithClasses dexWithClasses : rootResult.metadataTxtDexEntries.values()) { assertThat(dexWithClasses.getClassNames().asList().get(0), Matchers.endsWith("/Canary")); } assertThat(rootResult.primaryDexInputs.size(), is(numberOfPrimaryDexes)); // check that we have 2 secondary stores assertThat(rootResult.secondaryOutputToInputs.keySet().size(), is(2)); // check that we have 11 secondary inputs + 2 from canaries assertThat(rootResult.secondaryOutputToInputs.size(), is(numberOfSecondaryDexes + 2)); } @Test public void testPrimaryAndExtraModule() throws IOException { int numberOfPrimaryDexes = 10; int numberOfSecondaryDexes = 0; int numberOfExtraDexes = 10; ImmutableMap<String, PreDexedFilesSorter.Result> sortResults = generatePreDexSorterResults( numberOfPrimaryDexes, numberOfSecondaryDexes, numberOfExtraDexes); for (String store : sortResults.keySet()) { assertThat(store, oneOf(moduleGraph.getRootAPKModule().getName(), extraModule.getName())); } assertThat(sortResults.size(), is(2)); PreDexedFilesSorter.Result rootResult = sortResults.get(APKModuleGraph.ROOT_APKMODULE_NAME); assertThat(rootResult.primaryDexInputs.size(), is(numberOfPrimaryDexes)); PreDexedFilesSorter.Result extraResult = sortResults.get(extraModule.getName()); assertThat(extraResult.metadataTxtDexEntries.size(), is(1)); assertThat(extraResult.secondaryOutputToInputs.size(), is(numberOfExtraDexes + 1)); for (DexWithClasses dexWithClasses : extraResult.metadataTxtDexEntries.values()) { assertThat(dexWithClasses.getClassNames().asList().get(0), Matchers.endsWith("/Canary")); } } @Test public void testPrimarySecondaryAndExtraModule() throws IOException { int numberOfPrimaryDexes = 10; int numberOfSecondaryDexes = 15; int numberOfExtraDexes = 15; ImmutableMap<String, PreDexedFilesSorter.Result> sortResults = generatePreDexSorterResults( numberOfPrimaryDexes, numberOfSecondaryDexes, numberOfExtraDexes); for (String store : sortResults.keySet()) { assertThat(store, oneOf(moduleGraph.getRootAPKModule().getName(), extraModule.getName())); } PreDexedFilesSorter.Result rootResult = sortResults.get(APKModuleGraph.ROOT_APKMODULE_NAME); PreDexedFilesSorter.Result extraResult = sortResults.get(extraModule.getName()); assertThat(sortResults.size(), is(2)); assertThat(rootResult.primaryDexInputs.size(), is(numberOfPrimaryDexes)); assertThat(rootResult.metadataTxtDexEntries.size(), is(2)); assertThat(extraResult.metadataTxtDexEntries.size(), is(2)); for (DexWithClasses dexWithClasses : rootResult.metadataTxtDexEntries.values()) { assertThat(dexWithClasses.getClassNames().asList().get(0), Matchers.endsWith("/Canary")); } for (DexWithClasses dexWithClasses : extraResult.metadataTxtDexEntries.values()) { assertThat(dexWithClasses.getClassNames().asList().get(0), Matchers.endsWith("/Canary")); } } private ImmutableMap<String, PreDexedFilesSorter.Result> generatePreDexSorterResults( int numberOfPrimaryDexes, int numberOfSecondaryDexes, int numberOfExtraDexes) throws IOException { FakeProjectFilesystem filesystem = new FakeProjectFilesystem(); ImmutableMultimap.Builder<APKModule, DexWithClasses> inputDexes = ImmutableMultimap.builder(); for (int i = 0; i < numberOfPrimaryDexes; i++) { inputDexes.put( moduleGraph.getRootAPKModule(), createFakeDexWithClasses( filesystem, Paths.get("primary").resolve(String.format("/primary%d.dex", i)), ImmutableSet.of(String.format("primary.primary%d.class", i)), STANDARD_DEX_FILE_ESTIMATE)); } for (int i = 0; i < numberOfSecondaryDexes; i++) { inputDexes.put( moduleGraph.getRootAPKModule(), createFakeDexWithClasses( filesystem, Paths.get("secondary").resolve(String.format("secondary%d.dex", i)), ImmutableSet.of(String.format("secondary.secondary%d.class", i)), STANDARD_DEX_FILE_ESTIMATE)); } for (int i = 0; i < numberOfExtraDexes; i++) { inputDexes.put( extraModule, createFakeDexWithClasses( filesystem, Paths.get("extra").resolve(String.format("extra%d.dex", i)), ImmutableSet.of(String.format("extra.extra%d.class", i)), STANDARD_DEX_FILE_ESTIMATE)); } PreDexedFilesSorter sorter = new PreDexedFilesSorter( Optional.empty(), inputDexes.build(), ImmutableSet.of(PRIMARY_DEX_PATTERN), moduleGraph, tempDir.newFolder("scratch").toPath(), DEX_WEIGHT_LIMIT, DexStore.JAR, tempDir.newFolder("secondary").toPath(), tempDir.newFolder("additional").toPath()); ImmutableList.Builder<Step> steps = ImmutableList.builder(); return sorter.sortIntoPrimaryAndSecondaryDexes(filesystem, steps); } private DexWithClasses createFakeDexWithClasses( ProjectFilesystem filesystem, Path pathToDex, ImmutableSet<String> classNames, int weightEstimate) { return new DexWithClasses() { @Override public SourcePath getSourcePathToDexFile() { return PathSourcePath.of(filesystem, pathToDex); } @Override public ImmutableSet<String> getClassNames() { return classNames; } @Override public Sha1HashCode getClassesHash() { return Sha1HashCode.of(String.format("%040x", classNames.hashCode())); } @Override public int getWeightEstimate() { return weightEstimate; } }; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.text; import com.intellij.openapi.util.TextRange; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.Reader; import java.nio.CharBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class CharArrayUtil { private static final int GET_CHARS_THRESHOLD = 10; private CharArrayUtil() { } /** * Copies all symbols from the given char sequence to the given array * * @param src source data holder * @param dst output data buffer * @param dstOffset start offset to use within the given output data buffer */ public static void getChars(@NotNull CharSequence src, @NotNull char[] dst, int dstOffset) { getChars(src, dst, dstOffset, src.length()); } /** * Copies necessary number of symbols from the given char sequence start to the given array. * * @param src source data holder * @param dst output data buffer * @param dstOffset start offset to use within the given output data buffer * @param len number of source data symbols to copy to the given buffer */ public static void getChars(@NotNull CharSequence src, @NotNull char[] dst, int dstOffset, int len) { getChars(src, dst, 0, dstOffset, len); } /** * Copies necessary number of symbols from the given char sequence to the given array. * * @param src source data holder * @param dst output data buffer * @param srcOffset source text offset * @param dstOffset start offset to use within the given output data buffer * @param len number of source data symbols to copy to the given buffer */ public static void getChars(@NotNull CharSequence src, @NotNull char[] dst, int srcOffset, int dstOffset, int len) { if (src instanceof CharArrayExternalizable) { ((CharArrayExternalizable)src).getChars(srcOffset, srcOffset + len, dst, dstOffset); return; } if (len >= GET_CHARS_THRESHOLD) { if (src instanceof String) { ((String)src).getChars(srcOffset, srcOffset + len, dst, dstOffset); return; } else if (src instanceof CharBuffer) { final CharBuffer buffer = (CharBuffer)src; final int i = buffer.position(); buffer.position(i + srcOffset); buffer.get(dst, dstOffset, len); buffer.position(i); return; } else if (src instanceof CharSequenceBackedByArray) { ((CharSequenceBackedByArray)src.subSequence(srcOffset, srcOffset + len)).getChars(dst, dstOffset); return; } else if (src instanceof StringBuffer) { ((StringBuffer)src).getChars(srcOffset, srcOffset + len, dst, dstOffset); return; } else if (src instanceof StringBuilder) { ((StringBuilder)src).getChars(srcOffset, srcOffset + len, dst, dstOffset); return; } } for (int i = 0, j = srcOffset, max = srcOffset + len; j < max && i < dst.length; i++, j++) { dst[i + dstOffset] = src.charAt(j); } } /** * @deprecated use {@link #fromSequence(CharSequence)} */ @NotNull public static char[] fromSequenceStrict(@NotNull CharSequence seq) { return fromSequence(seq); } @Nullable public static char[] fromSequenceWithoutCopying(@Nullable CharSequence seq) { if (seq instanceof CharSequenceBackedByArray) { return ((CharSequenceBackedByArray)seq).getChars(); } if (seq instanceof CharBuffer) { final CharBuffer buffer = (CharBuffer)seq; if (buffer.hasArray() && !buffer.isReadOnly() && buffer.arrayOffset() == 0 && buffer.position() == 0) { return buffer.array(); } } return null; } /** * @return the underlying char[] array if any, or the new chara array if not */ @NotNull public static char[] fromSequence(@NotNull CharSequence seq) { char[] underlying = fromSequenceWithoutCopying(seq); return underlying != null ? Arrays.copyOf(underlying, underlying.length) : fromSequence(seq, 0, seq.length()); } /** * @return a new char array containing the sub-sequence's chars */ @NotNull public static char[] fromSequence(@NotNull CharSequence seq, int start, int end) { char[] result = new char[end - start]; getChars(seq, result, start, 0, end - start); return result; } public static int shiftForward(@NotNull CharSequence buffer, int offset, @NotNull String chars) { return shiftForward(buffer, offset, buffer.length(), chars); } /** * Tries to find an offset from the {@code [startOffset; endOffset)} interval such that a char from the given buffer is * not contained at the given 'chars' string. * <p/> * Example: * {@code buffer="abc", startOffset=0, endOffset = 3, chars="ab". Result: 2} * * @param buffer target buffer which symbols should be checked * @param startOffset start offset to use within the given buffer (inclusive) * @param endOffset end offset to use within the given buffer (exclusive) * @param chars pass-through symbols * @return offset from the {@code [startOffset; endOffset)} which points to a symbol at the given buffer such * as that that symbol is not contained at the given 'chars'; * {@code endOffset} otherwise */ public static int shiftForward(@NotNull CharSequence buffer, final int startOffset, final int endOffset, @NotNull String chars) { for (int offset = startOffset, limit = Math.min(endOffset, buffer.length()); offset < limit; offset++) { char c = buffer.charAt(offset); int i; for (i = 0; i < chars.length(); i++) { if (c == chars.charAt(i)) break; } if (i >= chars.length()) { return offset; } } return endOffset; } public static int shiftForwardCarefully(@NotNull CharSequence buffer, int offset, @NotNull String chars) { if (offset + 1 >= buffer.length()) return offset; if (!isSuitable(chars, buffer.charAt(offset))) return offset; offset++; while (true) { if (offset >= buffer.length()) return offset - 1; char c = buffer.charAt(offset); if (!isSuitable(chars, c)) return offset - 1; offset++; } } private static boolean isSuitable(@NotNull String chars, final char c) { for (int i = 0; i < chars.length(); i++) { if (c == chars.charAt(i)) return true; } return false; } public static int shiftForward(@NotNull char[] buffer, int offset, @NotNull String chars) { return shiftForward(new CharArrayCharSequence(buffer), offset, chars); } public static int shiftBackward(@NotNull CharSequence buffer, int offset, @NotNull String chars) { return shiftBackward(buffer, 0, offset, chars); } public static int shiftBackward(@NotNull CharSequence buffer, int minOffset, int maxOffset, @NotNull String chars) { if (maxOffset >= buffer.length()) return maxOffset; int offset = maxOffset; while (true) { if (offset < minOffset) break; char c = buffer.charAt(offset); int i; for (i = 0; i < chars.length(); i++) { if (c == chars.charAt(i)) break; } if (i == chars.length()) break; offset--; } return offset; } public static int shiftBackward(@NotNull char[] buffer, int offset, @NotNull String chars) { return shiftBackward(new CharArrayCharSequence(buffer), offset, chars); } public static int shiftForwardUntil(@NotNull CharSequence buffer, int offset, @NotNull String chars) { while (true) { if (offset >= buffer.length()) break; char c = buffer.charAt(offset); int i; for (i = 0; i < chars.length(); i++) { if (c == chars.charAt(i)) break; } if (i < chars.length()) break; offset++; } return offset; } //Commented in order to apply to the green code policy as the method is unused. // //public static int shiftBackwardUntil(char[] buffer, int offset, String chars) { // return shiftBackwardUntil(new CharArrayCharSequence(buffer), offset, chars); //} /** * Calculates offset that points to the given buffer and has the following characteristics: * <p/> * <ul> * <li>is less than or equal to the given offset;</li> * <li> * it's guaranteed that all symbols of the given buffer that are located at {@code (returned offset; given offset]} * interval differ from the given symbols; * </li> * </ul> * <p/> * Example: suppose that this method is called with buffer that holds {@code 'test data'} symbols, offset that points * to the last symbols and {@code 'sf'} as a chars to exclude. Offset that points to {@code 's'} symbol * is returned then, i.e. all symbols of the given buffer that are located after it and not after given offset * ({@code 't data'}) are guaranteed to not contain given chars ({@code 'sf'}). * * @param buffer symbols buffer to check * @param offset initial symbols buffer offset to use * @param chars chars to exclude * @return offset of the given buffer that guarantees that all symbols at {@code (returned offset; given offset]} * interval of the given buffer differ from symbols of given {@code 'chars'} arguments; * given offset is returned if it is outside of given buffer bounds; * {@code '-1'} is returned if all document symbols that precede given offset differ from symbols * of the given {@code 'chars to exclude'} */ public static int shiftBackwardUntil(@NotNull CharSequence buffer, int offset, @NotNull String chars) { if (offset >= buffer.length()) return offset; while (true) { if (offset < 0) break; char c = buffer.charAt(offset); int i; for (i = 0; i < chars.length(); i++) { if (c == chars.charAt(i)) break; } if (i < chars.length()) break; offset--; } return offset; } public static boolean regionMatches(@NotNull char[] buffer, int start, int end, @NotNull CharSequence s) { final int len = s.length(); if (start + len > end) return false; if (start < 0) return false; for (int i = 0; i < len; i++) { if (buffer[start + i] != s.charAt(i)) return false; } return true; } public static boolean regionMatches(@NotNull CharSequence buffer, int start, int end, @NotNull CharSequence s) { final int len = s.length(); if (start + len > end) return false; if (start < 0) return false; //if (buffer instanceof String && s instanceof String) { // return ((String)buffer).regionMatches(offset, (String)s, 0, len); //} for (int i = 0; i < len; i++) { if (buffer.charAt(start + i) != s.charAt(i)) return false; } return true; } public static boolean regionMatches(@NotNull CharSequence s1, int start1, int end1, @NotNull CharSequence s2, int start2, int end2) { if (end1-start1 != end2-start2) return false; for (int i = start1,j=start2; i < end1; i++,j++) { if (s1.charAt(i) != s2.charAt(j)) return false; } return true; } public static boolean regionMatches(@NotNull CharSequence buffer, int offset, @NotNull CharSequence s) { if (offset + s.length() > buffer.length()) return false; if (offset < 0) return false; for (int i = 0; i < s.length(); i++) { if (buffer.charAt(offset + i) != s.charAt(i)) return false; } return true; } public static boolean equals(@NotNull char[] buffer1, int start1, int end1, @NotNull char[] buffer2, int start2, int end2) { if (end1 - start1 != end2 - start2) return false; for (int i = start1; i < end1; i++) { if (buffer1[i] != buffer2[i - start1 + start2]) return false; } return true; } public static int indexOf(@NotNull char[] buffer, @NotNull String pattern, int fromIndex) { char[] chars = pattern.toCharArray(); int limit = buffer.length - chars.length + 1; if (fromIndex < 0) { fromIndex = 0; } SearchLoop: for (int i = fromIndex; i < limit; i++) { for (int j = 0; j < chars.length; j++) { if (chars[j] != buffer[i + j]) continue SearchLoop; } return i; } return -1; } public static int indexOf(@NotNull CharSequence buffer, @NotNull CharSequence pattern, int fromIndex) { return indexOf(buffer, pattern, fromIndex, buffer.length()); } /** * Tries to find index of given pattern at the given buffer. * * @param buffer characters buffer which contents should be checked for the given pattern * @param pattern target characters sequence to find at the given buffer * @param fromIndex start index (inclusive). Zero is used if given index is negative * @param toIndex end index (exclusive) * @return index of the given pattern at the given buffer if the match is found; {@code -1} otherwise */ public static int indexOf(@NotNull CharSequence buffer, @NotNull CharSequence pattern, int fromIndex, final int toIndex) { final int patternLength = pattern.length(); if (fromIndex < 0) { fromIndex = 0; } int limit = toIndex - patternLength + 1; SearchLoop: for (int i = fromIndex; i < limit; i++) { for (int j = 0; j < patternLength; j++) { if (pattern.charAt(j) != buffer.charAt(i + j)) continue SearchLoop; } return i; } return -1; } /** * Tries to find index that points to the first location of the given symbol at the given char array at range {@code [from; to)}. * * @param buffer target symbols holder to check * @param symbol target symbol which offset should be found * @param fromIndex start index to search (inclusive) * @param toIndex end index to search (exclusive) * @return index that points to the first location of the given symbol at the given char array at range * {@code [from; to)} if target symbol is found; * {@code -1} otherwise */ public static int indexOf(@NotNull char[] buffer, final char symbol, int fromIndex, final int toIndex) { if (fromIndex < 0) { fromIndex = 0; } for (int i = fromIndex; i < toIndex; i++) { if (buffer[i] == symbol) { return i; } } return -1; } /** * Tries to find index that points to the last location of the given symbol at the given char array at range {@code [from; to)}. * * @param buffer target symbols holder to check * @param symbol target symbol which offset should be found * @param fromIndex start index to search (inclusive) * @param toIndex end index to search (exclusive) * @return index that points to the last location of the given symbol at the given char array at range * {@code [from; to)} if target symbol is found; * {@code -1} otherwise */ public static int lastIndexOf(@NotNull char[] buffer, final char symbol, int fromIndex, final int toIndex) { if (fromIndex < 0) { fromIndex = 0; } for (int i = toIndex - 1; i >= fromIndex; i--) { if (buffer[i] == symbol) { return i; } } return -1; } public static int lastIndexOf(@NotNull CharSequence buffer, @NotNull String pattern, int maxIndex) { char[] chars = pattern.toCharArray(); int end = buffer.length() - chars.length; if (maxIndex > end) { maxIndex = end; } SearchLoop: for (int i = maxIndex; i >= 0; i--) { for (int j = 0; j < chars.length; j++) { if (chars[j] != buffer.charAt(i + j)) continue SearchLoop; } return i; } return -1; } public static int lastIndexOf(@NotNull char[] buffer, @NotNull String pattern, int maxIndex) { char[] chars = pattern.toCharArray(); int end = buffer.length - chars.length; if (maxIndex > end) { maxIndex = end; } SearchLoop: for (int i = maxIndex; i >= 0; i--) { for (int j = 0; j < chars.length; j++) { if (chars[j] != buffer[i + j]) continue SearchLoop; } return i; } return -1; } public static boolean containsOnlyWhiteSpaces(@Nullable CharSequence chars) { if (chars == null) return true; for (int i = 0; i < chars.length(); i++) { final char c = chars.charAt(i); if (c == ' ' || c == '\t' || c == '\n' || c == '\r') continue; return false; } return true; } @NotNull public static TextRange[] getIndents(@NotNull CharSequence charsSequence, int shift) { List<TextRange> result = new ArrayList<TextRange>(); int whitespaceEnd = -1; int lastTextFound = 0; for(int i = charsSequence.length() - 1; i >= 0; i--){ final char charAt = charsSequence.charAt(i); final boolean isWhitespace = Character.isWhitespace(charAt); if(charAt == '\n'){ result.add(new TextRange(i, (whitespaceEnd >= 0 ? whitespaceEnd : i) + 1).shiftRight(shift)); whitespaceEnd = -1; } else if(whitespaceEnd >= 0 ){ if(isWhitespace){ continue; } lastTextFound = result.size(); whitespaceEnd = -1; } else if(isWhitespace){ whitespaceEnd = i; } else { lastTextFound = result.size(); } } if(whitespaceEnd > 0) result.add(new TextRange(0, whitespaceEnd + 1).shiftRight(shift)); if (lastTextFound < result.size()) { result = result.subList(0, lastTextFound); } return result.toArray(new TextRange[result.size()]); } public static boolean containLineBreaks(@NotNull CharSequence seq) { return containLineBreaks(seq, 0, seq.length()); } public static boolean containLineBreaks(@Nullable CharSequence seq, int fromOffset, int endOffset) { if (seq == null) return false; for (int i = fromOffset; i < endOffset; i++) { final char c = seq.charAt(i); if (c == '\n' || c == '\r') return true; } return false; } /** * Allows to answer if target region of the given text contains only white space symbols (tabulations, white spaces and line feeds). * * @param text text to check * @param start start offset within the given text to check (inclusive) * @param end end offset within the given text to check (exclusive) * @return {@code true} if target region of the given text contains white space symbols only; {@code false} otherwise */ public static boolean isEmptyOrSpaces(@NotNull CharSequence text, int start, int end) { for (int i = start; i < end; i++) { char c = text.charAt(i); if (c != ' ' && c != '\t' && c != '\n') { return false; } } return true; } @NotNull public static Reader readerFromCharSequence(@NotNull CharSequence text) { char[] chars = fromSequenceWithoutCopying(text); //noinspection IOResourceOpenedButNotSafelyClosed return chars == null ? new CharSequenceReader(text.toString()) : new UnsyncCharArrayReader(chars, 0, text.length()); } @NotNull public static ImmutableCharSequence createImmutableCharSequence(@NotNull CharSequence sequence) { return ImmutableText.valueOf(sequence); } }
/* * Copyright (c) 2013, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package edu.uci.python.runtime.sequence; import java.util.*; import com.oracle.truffle.api.CompilerDirectives.SlowPath; import edu.uci.python.runtime.datatype.*; import edu.uci.python.runtime.exception.*; import edu.uci.python.runtime.iterator.*; import edu.uci.python.runtime.standardtype.*; public abstract class PBaseSet extends PythonBuiltinObject implements PIterable { protected final TreeSet<Object> set; public PBaseSet() { this.set = new TreeSet<>(); } public PBaseSet(TreeSet<Object> elements) { this.set = elements; } public PBaseSet(PIterator iter) { this(); try { while (true) { this.set.add(iter.__next__()); } } catch (StopIterationException e) { // fall through } } public PBaseSet(PBaseSet baseSet) { this(); addAll(baseSet); } @SlowPath private void addAll(PBaseSet baseSet) { this.set.addAll(baseSet.set); } public Set<Object> getSet() { return set; } public PIterator __iter__() { return new PBaseSetIterator(set.iterator()); } public final boolean contains(Object o) { return this.set.contains(o); } // disjoint public boolean isDisjoint(PBaseSet other) { return Collections.disjoint(this.set, other.set); } @SuppressWarnings("unused") public boolean isDisjoint(PIterator other) { throw new UnsupportedOperationException(); } // subset public boolean isSubset(PBaseSet other) { if (this.len() > other.len()) { return false; } for (Object p : this.set) { if (!other.set.contains(p)) { return false; } } return true; } public boolean isSubset(PIterator other) { return this.isSubset(new PSet(other)); // pack the iterable into a PSet } public boolean isProperSubset(PBaseSet other) { return this.len() < other.len() && this.isSubset(other); } // superset public boolean isSuperset(PBaseSet other) { return other.isSubset(this); // use subset comparison with this/other // order changed } public boolean isSuperset(PIterator other) { return this.isSuperset(new PSet(other)); } public boolean isProperSuperset(PBaseSet other) { // is proper superset return this.len() > other.len() && this.isSuperset(other); } // union public PBaseSet union(PBaseSet other) { PBaseSet newSet = cloneThisSet(); newSet.set.addAll(other.set); return newSet; } public PBaseSet union(PIterator other) { return this.union(new PSet(other)); } // intersection public PBaseSet intersection(PBaseSet other) { boolean set1IsLarger = set.size() > other.set.size(); PBaseSet cloneSet = (set1IsLarger ? other.cloneThisSet() : this.cloneThisSet()); cloneSet.set.retainAll(set1IsLarger ? this.set : other.set); return cloneSet; } public PBaseSet intersection(PIterator other) { return this.intersection(new PSet(other)); } // difference public PBaseSet difference(PBaseSet other) { PBaseSet newSet = cloneThisSet(); newSet.set.removeAll(other.set); return newSet; } public PBaseSet difference(PIterator other) { return this.intersection(new PSet(other)); } // symmetric_difference @SuppressWarnings("unused") public PBaseSet symmetricDifference(PBaseSet other) { throw new UnsupportedOperationException(); } // copy PBaseSet copy() { return cloneThisSet(); } // update public abstract void update(PBaseSet other); public abstract void update(PIterator iterator); // intersection_update @SuppressWarnings("unused") public void intersectionUpdate(PBaseSet other) { throw new UnsupportedOperationException(); } @SuppressWarnings("unused") public void intersectionUpdate(PIterator iterator) { throw new UnsupportedOperationException(); } // difference_update @SuppressWarnings("unused") public void differenceUpdate(PBaseSet other) { throw new UnsupportedOperationException(); } @SuppressWarnings("unused") public void differenceUpdate(PIterator iterator) { throw new UnsupportedOperationException(); } // symmetric_difference_update @SuppressWarnings("unused") public void symmetricDifferenceUpdate(PBaseSet other) { throw new UnsupportedOperationException(); } @SuppressWarnings("unused") public void symmetricDifferenceUpdate(PIterator iterator) { throw new UnsupportedOperationException(); } protected abstract PBaseSet cloneThisSet(); // update methods needed for updating both sets and frozen sets, internally // "Binary operations that mix set instances with frozenset return // the type of the first operand. // For example: frozenset('ab') | set('bc') returns an instance of // frozenset." protected void updateInternal(Object data) { if (data instanceof PBaseSet) { updateInternal((PBaseSet) data); } else if (data instanceof PIterator) { throw new UnsupportedOperationException(); } } protected void updateInternal(PBaseSet data) { // Skip the iteration if both are sets set.addAll(data.set); } protected void updateInternal(PIterator iterator) { try { while (true) { set.add(iterator.__next__()); } } catch (StopIterationException e) { // fall through } } @Override public Object getMax() { return this.set.last(); } @Override public Object getMin() { return this.set.first(); } @Override public int len() { return this.set.size(); } @Override public String toString() { if (set.size() == 0) { return "set()"; } StringBuilder buf = new StringBuilder().append("{"); for (Iterator<Object> i = set.iterator(); i.hasNext();) { String str = PSequence.toString(i.next()); buf.append(str); if (i.hasNext()) { buf.append(", "); } } buf.append("}"); return buf.toString(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! package com.google.javascript.jscomp; public final class Instrumentation extends com.google.protobuf.GeneratedMessage { // Use Instrumentation.newBuilder() to construct. private Instrumentation() { initFields(); } private Instrumentation(boolean noInit) {} private static final Instrumentation defaultInstance; public static Instrumentation getDefaultInstance() { return defaultInstance; } public Instrumentation getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.javascript.jscomp.InstrumentationTemplate.internal_static_jscomp_Instrumentation_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.javascript.jscomp.InstrumentationTemplate.internal_static_jscomp_Instrumentation_fieldAccessorTable; } // optional string report_defined = 1; public static final int REPORT_DEFINED_FIELD_NUMBER = 1; private boolean hasReportDefined; private java.lang.String reportDefined_ = ""; public boolean hasReportDefined() { return hasReportDefined; } public java.lang.String getReportDefined() { return reportDefined_; } // optional string report_call = 2; public static final int REPORT_CALL_FIELD_NUMBER = 2; private boolean hasReportCall; private java.lang.String reportCall_ = ""; public boolean hasReportCall() { return hasReportCall; } public java.lang.String getReportCall() { return reportCall_; } // optional string report_exit = 6; public static final int REPORT_EXIT_FIELD_NUMBER = 6; private boolean hasReportExit; private java.lang.String reportExit_ = ""; public boolean hasReportExit() { return hasReportExit; } public java.lang.String getReportExit() { return reportExit_; } // repeated string declaration_to_remove = 3; public static final int DECLARATION_TO_REMOVE_FIELD_NUMBER = 3; private java.util.List<java.lang.String> declarationToRemove_ = java.util.Collections.emptyList(); public java.util.List<java.lang.String> getDeclarationToRemoveList() { return declarationToRemove_; } public int getDeclarationToRemoveCount() { return declarationToRemove_.size(); } public java.lang.String getDeclarationToRemove(int index) { return declarationToRemove_.get(index); } // repeated string init = 4; public static final int INIT_FIELD_NUMBER = 4; private java.util.List<java.lang.String> init_ = java.util.Collections.emptyList(); public java.util.List<java.lang.String> getInitList() { return init_; } public int getInitCount() { return init_.size(); } public java.lang.String getInit(int index) { return init_.get(index); } // optional string app_name_setter = 5; public static final int APP_NAME_SETTER_FIELD_NUMBER = 5; private boolean hasAppNameSetter; private java.lang.String appNameSetter_ = ""; public boolean hasAppNameSetter() { return hasAppNameSetter; } public java.lang.String getAppNameSetter() { return appNameSetter_; } private void initFields() { } public final boolean isInitialized() { return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (hasReportDefined()) { output.writeString(1, getReportDefined()); } if (hasReportCall()) { output.writeString(2, getReportCall()); } for (java.lang.String element : getDeclarationToRemoveList()) { output.writeString(3, element); } for (java.lang.String element : getInitList()) { output.writeString(4, element); } if (hasAppNameSetter()) { output.writeString(5, getAppNameSetter()); } if (hasReportExit()) { output.writeString(6, getReportExit()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasReportDefined()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getReportDefined()); } if (hasReportCall()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(2, getReportCall()); } { int dataSize = 0; for (java.lang.String element : getDeclarationToRemoveList()) { dataSize += com.google.protobuf.CodedOutputStream .computeStringSizeNoTag(element); } size += dataSize; size += 1 * getDeclarationToRemoveList().size(); } { int dataSize = 0; for (java.lang.String element : getInitList()) { dataSize += com.google.protobuf.CodedOutputStream .computeStringSizeNoTag(element); } size += dataSize; size += 1 * getInitList().size(); } if (hasAppNameSetter()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(5, getAppNameSetter()); } if (hasReportExit()) { size += com.google.protobuf.CodedOutputStream .computeStringSize(6, getReportExit()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } public static com.google.javascript.jscomp.Instrumentation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.google.javascript.jscomp.Instrumentation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.google.javascript.jscomp.Instrumentation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.google.javascript.jscomp.Instrumentation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.google.javascript.jscomp.Instrumentation prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> { private com.google.javascript.jscomp.Instrumentation result; // Construct using com.google.javascript.jscomp.Instrumentation.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); builder.result = new com.google.javascript.jscomp.Instrumentation(); return builder; } protected com.google.javascript.jscomp.Instrumentation internalGetResult() { return result; } public Builder clear() { if (result == null) { throw new IllegalStateException( "Cannot call clear() after build()."); } result = new com.google.javascript.jscomp.Instrumentation(); return this; } public Builder clone() { return create().mergeFrom(result); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.javascript.jscomp.Instrumentation.getDescriptor(); } public com.google.javascript.jscomp.Instrumentation getDefaultInstanceForType() { return com.google.javascript.jscomp.Instrumentation.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } public com.google.javascript.jscomp.Instrumentation build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } private com.google.javascript.jscomp.Instrumentation buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return buildPartial(); } public com.google.javascript.jscomp.Instrumentation buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } if (result.declarationToRemove_ != java.util.Collections.EMPTY_LIST) { result.declarationToRemove_ = java.util.Collections.unmodifiableList(result.declarationToRemove_); } if (result.init_ != java.util.Collections.EMPTY_LIST) { result.init_ = java.util.Collections.unmodifiableList(result.init_); } com.google.javascript.jscomp.Instrumentation returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.javascript.jscomp.Instrumentation) { return mergeFrom((com.google.javascript.jscomp.Instrumentation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.javascript.jscomp.Instrumentation other) { if (other == com.google.javascript.jscomp.Instrumentation.getDefaultInstance()) return this; if (other.hasReportDefined()) { setReportDefined(other.getReportDefined()); } if (other.hasReportCall()) { setReportCall(other.getReportCall()); } if (other.hasReportExit()) { setReportExit(other.getReportExit()); } if (!other.declarationToRemove_.isEmpty()) { if (result.declarationToRemove_.isEmpty()) { result.declarationToRemove_ = new java.util.ArrayList<java.lang.String>(); } result.declarationToRemove_.addAll(other.declarationToRemove_); } if (!other.init_.isEmpty()) { if (result.init_.isEmpty()) { result.init_ = new java.util.ArrayList<java.lang.String>(); } result.init_.addAll(other.init_); } if (other.hasAppNameSetter()) { setAppNameSetter(other.getAppNameSetter()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); return this; } break; } case 10: { setReportDefined(input.readString()); break; } case 18: { setReportCall(input.readString()); break; } case 26: { addDeclarationToRemove(input.readString()); break; } case 34: { addInit(input.readString()); break; } case 42: { setAppNameSetter(input.readString()); break; } case 50: { setReportExit(input.readString()); break; } } } } // optional string report_defined = 1; public boolean hasReportDefined() { return result.hasReportDefined(); } public java.lang.String getReportDefined() { return result.getReportDefined(); } public Builder setReportDefined(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasReportDefined = true; result.reportDefined_ = value; return this; } public Builder clearReportDefined() { result.hasReportDefined = false; result.reportDefined_ = getDefaultInstance().getReportDefined(); return this; } // optional string report_call = 2; public boolean hasReportCall() { return result.hasReportCall(); } public java.lang.String getReportCall() { return result.getReportCall(); } public Builder setReportCall(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasReportCall = true; result.reportCall_ = value; return this; } public Builder clearReportCall() { result.hasReportCall = false; result.reportCall_ = getDefaultInstance().getReportCall(); return this; } // optional string report_exit = 6; public boolean hasReportExit() { return result.hasReportExit(); } public java.lang.String getReportExit() { return result.getReportExit(); } public Builder setReportExit(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasReportExit = true; result.reportExit_ = value; return this; } public Builder clearReportExit() { result.hasReportExit = false; result.reportExit_ = getDefaultInstance().getReportExit(); return this; } // repeated string declaration_to_remove = 3; public java.util.List<java.lang.String> getDeclarationToRemoveList() { return java.util.Collections.unmodifiableList(result.declarationToRemove_); } public int getDeclarationToRemoveCount() { return result.getDeclarationToRemoveCount(); } public java.lang.String getDeclarationToRemove(int index) { return result.getDeclarationToRemove(index); } public Builder setDeclarationToRemove(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.declarationToRemove_.set(index, value); return this; } public Builder addDeclarationToRemove(java.lang.String value) { if (value == null) { throw new NullPointerException(); } if (result.declarationToRemove_.isEmpty()) { result.declarationToRemove_ = new java.util.ArrayList<java.lang.String>(); } result.declarationToRemove_.add(value); return this; } public Builder addAllDeclarationToRemove( java.lang.Iterable<? extends java.lang.String> values) { if (result.declarationToRemove_.isEmpty()) { result.declarationToRemove_ = new java.util.ArrayList<java.lang.String>(); } super.addAll(values, result.declarationToRemove_); return this; } public Builder clearDeclarationToRemove() { result.declarationToRemove_ = java.util.Collections.emptyList(); return this; } // repeated string init = 4; public java.util.List<java.lang.String> getInitList() { return java.util.Collections.unmodifiableList(result.init_); } public int getInitCount() { return result.getInitCount(); } public java.lang.String getInit(int index) { return result.getInit(index); } public Builder setInit(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.init_.set(index, value); return this; } public Builder addInit(java.lang.String value) { if (value == null) { throw new NullPointerException(); } if (result.init_.isEmpty()) { result.init_ = new java.util.ArrayList<java.lang.String>(); } result.init_.add(value); return this; } public Builder addAllInit( java.lang.Iterable<? extends java.lang.String> values) { if (result.init_.isEmpty()) { result.init_ = new java.util.ArrayList<java.lang.String>(); } super.addAll(values, result.init_); return this; } public Builder clearInit() { result.init_ = java.util.Collections.emptyList(); return this; } // optional string app_name_setter = 5; public boolean hasAppNameSetter() { return result.hasAppNameSetter(); } public java.lang.String getAppNameSetter() { return result.getAppNameSetter(); } public Builder setAppNameSetter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } result.hasAppNameSetter = true; result.appNameSetter_ = value; return this; } public Builder clearAppNameSetter() { result.hasAppNameSetter = false; result.appNameSetter_ = getDefaultInstance().getAppNameSetter(); return this; } // @@protoc_insertion_point(builder_scope:jscomp.Instrumentation) } static { defaultInstance = new Instrumentation(true); com.google.javascript.jscomp.InstrumentationTemplate.internalForceInit(); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:jscomp.Instrumentation) }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.catalog.management; import com.esri.gpt.catalog.harvest.repository.HrRecord.HarvestFrequency; import com.esri.gpt.catalog.harvest.repository.HrRecord.RecentJobStatus; import com.esri.gpt.control.webharvest.IterationContext; import com.esri.gpt.control.webharvest.protocol.Protocol; import com.esri.gpt.framework.context.ApplicationContext; import com.esri.gpt.framework.request.Record; import com.esri.gpt.framework.resource.query.QueryBuilder; import com.esri.gpt.framework.util.UuidUtil; import com.esri.gpt.framework.util.Val; import java.sql.Timestamp; import java.util.LinkedHashMap; import java.util.Map; /** * Represents a metadata record associated with a manage metadata request. */ public class MmdRecord extends Record { // class variables ============================================================= // instance variables ========================================================== private String _approvalStatus = ""; private String _approvalStatusMsg = ""; private boolean _canEdit = false; private String _collectionMembership = ""; private String _formattedUpdateDate = ""; private String _ownerName = ""; private String _publicationMethod; private String _publicationMethodMsg = ""; private Timestamp _systemUpdateDate = null; private String _title = ""; private String _uuid = ""; private String _siteUuid = ""; private String _metadataAccessPolicyType = ""; private String _currentMetadataAccessPolicy = ""; private String _currentMetadataAccessPolicyKeys = ""; // resource specific attributes private int _localId; private String _hostUrl = ""; private Protocol _protocol; private HarvestFrequency _harvestFrequency; private boolean _sendNotification; private Timestamp _lastHarvestDate; private RecentJobStatus _recentJobStatus; /** findable */ private boolean findable; /** searchable */ private boolean searchable; /** synchronizable */ private boolean synchronizable; private Map<String, Object> _objectMap = new LinkedHashMap<String, Object>(); // constructors ================================================================ /** Default constructor. */ public MmdRecord() { super(); } // properties ================================================================== /** * Gets the object map. * * @return the object map (never null) */ public Map<String, Object> getObjectMap() { if(_objectMap == null) { _objectMap = new LinkedHashMap<String, Object>(); } return _objectMap; } /** * Gets the approval status. * @return the approval status */ public String getApprovalStatus() { return _approvalStatus; } /** * Sets the approval status. * @param status the approval status */ public void setApprovalStatus(String status) { _approvalStatus = MmdEnums.ApprovalStatus.checkValue(status).toString(); } /** * Gets the approval status resource bundle message. * @return the approval status message */ public String getApprovalStatusMsg() { return _approvalStatusMsg; } /** * Sets the approval status resource bundle message. * @param msg the approval status message */ public void setApprovalStatusMsg(String msg) { _approvalStatusMsg = Val.chkStr(msg); } /** * Gets the editable status for the document. * @return true if the document can be edited */ public boolean getCanEdit() { return _canEdit; } /** * Sets the editable status for the document. * @param canEdit true if the document can be edited */ protected void setCanEdit(boolean canEdit) { _canEdit = canEdit; } /** * Gets the collection membership string. * @return the collection membership */ public String getCollectionMembership() { return _collectionMembership; } /** * Sets the collection membership string. * @param membership the collection membership */ public void setCollectionMembership(String membership) { _collectionMembership = Val.chkStr(membership); } /** * Gets the formatted update date. * @return the formatted update date */ public String getFormattedUpdateDate() { return _formattedUpdateDate; } /** * Sets the formatted update date. * @param date the formatted update date */ public void setFormattedUpdateDate(String date) { _formattedUpdateDate = Val.chkStr(date); } /** * Gets the document owner name (username). * @return the document owner name */ public String getOwnerName() { return _ownerName; } /** * Sets the document owner name (username). * @param name the document owner name */ public void setOwnerName(String name) { _ownerName = Val.chkStr(name); } /** * Gets the publication method. * @return the publication method */ public String getPublicationMethod() { return _publicationMethod; } /** * Sets the publication method. * @param method the publication method */ public void setPublicationMethod(String method) { _publicationMethod = MmdEnums.PublicationMethod.checkValue(method).toString(); } /** * Gets the publication method resource bundle message. * @return the publication method message */ public String getPublicationMethodMsg() { return _publicationMethodMsg; } /** * Sets the publication method resource bundle message. * @param msg the publication method message */ public void setPublicationMethodMsg(String msg) { _publicationMethodMsg = Val.chkStr(msg); } /** * Gets the system update date. * @return the system update date */ public Timestamp getSystemUpdateDate() { return _systemUpdateDate; } /** * Sets the system update date. * @param date the system update date */ public void setSystemUpdateDate(Timestamp date) { _systemUpdateDate = date; } /** * Gets the title. * @return the title */ public String getTitle() { return _title; } /** * Sets the title. * @param title the title */ public void setTitle(String title) { _title = Val.chkStr(title); } /** * Gets the document UUID. * @return the UUID */ public String getUuid() { return _uuid; } /** * Sets the document UUID. * @param uuid the UUID */ public void setUuid(String uuid) { _uuid = UuidUtil.addCurlies(uuid); } /** * Gets the site UUID. * @return the site UUID */ public String getSiteUuid() { return _siteUuid; } /** * Sets the site UUID. * @param uuid site UUID */ public void setSiteUuid(String uuid) { _siteUuid = UuidUtil.addCurlies(uuid); } /** * Gets the document Metadata Access policy type. * @return access policy type type */ public String getMetadataAccessPolicyType() { return _metadataAccessPolicyType; } /** * Sets the document Metadata Access policy type. * @param metadataAccessPolicyType access policy type */ public void setMetadataAccessPolicyType(String metadataAccessPolicyType) { this._metadataAccessPolicyType = metadataAccessPolicyType; } /** * Gets the document current access policy. * @return access policy */ public String getCurrentMetadataAccessPolicy() { return _currentMetadataAccessPolicy; } /** * Sets the document current access policy. * @param currentMetadataAccessPolicy access policy */ public void setCurrentMetadataAccessPolicy(String currentMetadataAccessPolicy) { this._currentMetadataAccessPolicy = currentMetadataAccessPolicy; } /** * Gets the document current access policy. * @return access policy keys */ public String getCurrentMetadataAccessPolicyKeys() { return _currentMetadataAccessPolicyKeys; } /** * Sets the document current access policy. * @param currentMetadataAccessPolicyKeys access policy keys */ public void setCurrentMetadataAccessPolicyKeys(String currentMetadataAccessPolicyKeys) { this._currentMetadataAccessPolicyKeys = currentMetadataAccessPolicyKeys; } /** * Gets local id. * @return local id */ public int getLocalId() { return _localId; } /** * Sets local id. * @param localId local id */ public void setLocalId(int localId) { this._localId = localId; } /** * Gets host URL. * @return host URL */ public String getHostUrl() { return _hostUrl; } /** * Sets host URL. * @param hostUrl host URL */ public void setHostUrl(String hostUrl) { this._hostUrl = Val.chkStr(hostUrl); } /** * Gets protocol. * @return protocol */ public Protocol getProtocol() { return _protocol; } /** * Sets protocol. * @param harvestProtocol protocol */ public void setProtocol(Protocol harvestProtocol) { this._protocol = harvestProtocol; } /** * Gets harvest frequency. * @return harvest frequency */ public HarvestFrequency getHarvestFrequency() { return _harvestFrequency; } /** * Sets harvest frequency. * @param harvestFrequency harvest frequency */ public void setHarvestFrequency(HarvestFrequency harvestFrequency) { this._harvestFrequency = harvestFrequency; } /** * Gets flag to check if send harvest notification. * @return flag to check if send harvest notification */ public boolean getSendNotification() { return _sendNotification; } /** * Sets flag to check if send harvest notification. * @param sendNotification flag to check if send harvest notification */ public void setSendNotification(boolean sendNotification) { this._sendNotification = sendNotification; } /** * Gets last harvest date. * @return last harvest date */ public Timestamp getLastHarvestDate() { return _lastHarvestDate; } /** * Sets last harvest date. * @param lastHarvestDate last harvest date */ public void setLastHarvestDate(Timestamp lastHarvestDate) { this._lastHarvestDate = lastHarvestDate; } /** * Gets recent job status. * @return recent job status */ public RecentJobStatus getRecentJobStatus() { return _recentJobStatus; } /** * Checks if synchronization of this resource is being executed locally. * @return <code>true</code> if synchronization of this resource is being executed locally */ public boolean isExecutingLocally() { return ApplicationContext.getInstance().getHarvestingEngine().isExecutingLocally(getUuid()); } /** * Sets recent job status. * @param _recentJobStatus recent job status */ public void setRecentJobStatus(RecentJobStatus _recentJobStatus) { this._recentJobStatus = _recentJobStatus; } public String getName() { return getTitle(); } /** * Checks if record is findable. * @return <code>true</code> if record is findable */ public boolean getFindable() { return findable; } /** * Sets record is findable. * @param findable <code>true</code> to make record is findable */ public void setFindable(boolean findable) { this.findable = findable; } /** * Checks if records is synchronizable. * @return <code>true</code> if records is synchronizable */ public boolean getSynchronizable() { return synchronizable; } /** * Sets records is synchronizable. * @param synchronizable <code>true</code> to make records is synchronizable */ public void setSynchronizable(boolean synchronizable) { this.synchronizable = synchronizable; } /** * Checks if records is searchable. * @return <code>true</code> if records is searchable */ public boolean getSearchable() { return searchable; } /** * Sets records is searchable. * @param searchable <ocde>true</code> to make records is searchable */ public void setSearchable(boolean searchable) { this.searchable = searchable; } /** * Creates new query builder. * @param iterationContext iteration context (can be <code>null</code>) * @return query builder or <code>null</code> if no protocol */ public QueryBuilder newQueryBuilder(IterationContext iterationContext) { if (iterationContext==null) { iterationContext = new IterationContext() { public void onIterationException(Exception ex) { } }; } return getProtocol()!=null? getProtocol().newQueryBuilder(iterationContext, getHostUrl()): null; } // methods ===================================================================== }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.protobuf; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument; import com.google.protobuf.DynamicMessage; import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.Message; import com.google.protobuf.Parser; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.Set; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderProvider; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.CustomCoder; import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Sets; /** * A {@link Coder} using Google Protocol Buffers binary format. {@link ProtoCoder} supports both * Protocol Buffers syntax versions 2 and 3. * * <p>To learn more about Protocol Buffers, visit: <a * href="https://developers.google.com/protocol-buffers">https://developers.google.com/protocol-buffers</a> * * <p>{@link ProtoCoder} is registered in the global {@link CoderRegistry} as the default {@link * Coder} for any {@link Message} object. Custom message extensions are also supported, but these * extensions must be registered for a particular {@link ProtoCoder} instance and that instance must * be registered on the {@link PCollection} that needs the extensions: * * <pre>{@code * import MyProtoFile; * import MyProtoFile.MyMessage; * * Coder<MyMessage> coder = ProtoCoder.of(MyMessage.class).withExtensionsFrom(MyProtoFile.class); * PCollection<MyMessage> records = input.apply(...).setCoder(coder); * }</pre> * * <h3>Versioning</h3> * * <p>{@link ProtoCoder} supports both versions 2 and 3 of the Protocol Buffers syntax. However, the * Java runtime version of the <code>google.com.protobuf</code> library must match exactly the * version of <code>protoc</code> that was used to produce the JAR files containing the compiled * <code>.proto</code> messages. * * <p>For more information, see the <a * href="https://developers.google.com/protocol-buffers/docs/proto3#using-proto2-message-types">Protocol * Buffers documentation</a>. * * <h3>{@link ProtoCoder} and Determinism</h3> * * <p>In general, Protocol Buffers messages can be encoded deterministically within a single * pipeline as long as: * * <ul> * <li>The encoded messages (and any transitively linked messages) do not use <code>map</code> * fields. * <li>Every Java VM that encodes or decodes the messages use the same runtime version of the * Protocol Buffers library and the same compiled <code>.proto</code> file JAR. * </ul> * * <h3>{@link ProtoCoder} and Encoding Stability</h3> * * <p>When changing Protocol Buffers messages, follow the rules in the Protocol Buffers language * guides for <a href="https://developers.google.com/protocol-buffers/docs/proto#updating">{@code * proto2}</a> and <a * href="https://developers.google.com/protocol-buffers/docs/proto3#updating">{@code proto3}</a> * syntaxes, depending on your message type. Following these guidelines will ensure that the old * encoded data can be read by new versions of the code. * * <p>Generally, any change to the message type, registered extensions, runtime library, or compiled * proto JARs may change the encoding. Thus even if both the original and updated messages can be * encoded deterministically within a single job, these deterministic encodings may not be the same * across jobs. * * @param <T> the Protocol Buffers {@link Message} handled by this {@link Coder}. */ public class ProtoCoder<T extends Message> extends CustomCoder<T> { public static final long serialVersionUID = -5043999806040629525L; /** Returns a {@link ProtoCoder} for the given Protocol Buffers {@link Message}. */ public static <T extends Message> ProtoCoder<T> of(Class<T> protoMessageClass) { return new ProtoCoder<>(protoMessageClass, ImmutableSet.of()); } /** * Returns a {@link ProtoCoder} for the Protocol Buffers {@link Message} indicated by the given * {@link TypeDescriptor}. */ public static <T extends Message> ProtoCoder<T> of(TypeDescriptor<T> protoMessageType) { @SuppressWarnings("unchecked") Class<T> protoMessageClass = (Class<T>) protoMessageType.getRawType(); return of(protoMessageClass); } /** * Validate that all extensionHosts are able to be registered. * * @param moreExtensionHosts */ void validateExtensions(Iterable<Class<?>> moreExtensionHosts) { for (Class<?> extensionHost : moreExtensionHosts) { // Attempt to access the required method, to make sure it's present. try { Method registerAllExtensions = extensionHost.getDeclaredMethod("registerAllExtensions", ExtensionRegistry.class); checkArgument( Modifier.isStatic(registerAllExtensions.getModifiers()), "Method registerAllExtensions() must be static"); } catch (NoSuchMethodException | SecurityException e) { throw new IllegalArgumentException( String.format("Unable to register extensions for %s", extensionHost.getCanonicalName()), e); } } } /** * Returns a {@link ProtoCoder} like this one, but with the extensions from the given classes * registered. * * <p>Each of the extension host classes must be an class automatically generated by the Protocol * Buffers compiler, {@code protoc}, that contains messages. * * <p>Does not modify this object. */ public ProtoCoder<T> withExtensionsFrom(Iterable<Class<?>> moreExtensionHosts) { validateExtensions(moreExtensionHosts); return new ProtoCoder<>( protoMessageClass, new ImmutableSet.Builder<Class<?>>() .addAll(extensionHostClasses) .addAll(moreExtensionHosts) .build()); } /** * See {@link #withExtensionsFrom(Iterable)}. * * <p>Does not modify this object. */ public ProtoCoder<T> withExtensionsFrom(Class<?>... moreExtensionHosts) { return withExtensionsFrom(Arrays.asList(moreExtensionHosts)); } @Override public void encode(T value, OutputStream outStream) throws IOException { encode(value, outStream, Context.NESTED); } @Override public void encode(T value, OutputStream outStream, Context context) throws IOException { if (value == null) { throw new CoderException("cannot encode a null " + protoMessageClass.getSimpleName()); } if (context.isWholeStream) { value.writeTo(outStream); } else { value.writeDelimitedTo(outStream); } } @Override public T decode(InputStream inStream) throws IOException { return decode(inStream, Context.NESTED); } @Override public T decode(InputStream inStream, Context context) throws IOException { if (context.isWholeStream) { return getParser().parseFrom(inStream, getExtensionRegistry()); } else { return getParser().parseDelimitedFrom(inStream, getExtensionRegistry()); } } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } ProtoCoder<?> otherCoder = (ProtoCoder<?>) other; return protoMessageClass.equals(otherCoder.protoMessageClass) && Sets.newHashSet(extensionHostClasses) .equals(Sets.newHashSet(otherCoder.extensionHostClasses)); } @Override public int hashCode() { return Objects.hash(protoMessageClass, extensionHostClasses); } @Override public void verifyDeterministic() throws NonDeterministicException { ProtobufUtil.verifyDeterministic(this); } /** Returns the Protocol Buffers {@link Message} type this {@link ProtoCoder} supports. */ public Class<T> getMessageType() { return protoMessageClass; } public Set<Class<?>> getExtensionHosts() { return extensionHostClasses; } /** * Returns the {@link ExtensionRegistry} listing all known Protocol Buffers extension messages to * {@code T} registered with this {@link ProtoCoder}. */ public ExtensionRegistry getExtensionRegistry() { if (memoizedExtensionRegistry == null) { ExtensionRegistry registry = ExtensionRegistry.newInstance(); for (Class<?> extensionHost : extensionHostClasses) { try { extensionHost .getDeclaredMethod("registerAllExtensions", ExtensionRegistry.class) .invoke(null, registry); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IllegalStateException(e); } } memoizedExtensionRegistry = registry.getUnmodifiable(); } return memoizedExtensionRegistry; } //////////////////////////////////////////////////////////////////////////////////// // Private implementation details below. /** The {@link Message} type to be coded. */ final Class<T> protoMessageClass; /** * All extension host classes included in this {@link ProtoCoder}. The extensions from these * classes will be included in the {@link ExtensionRegistry} used during encoding and decoding. */ final Set<Class<?>> extensionHostClasses; // Constants used to serialize and deserialize private static final String PROTO_MESSAGE_CLASS = "proto_message_class"; private static final String PROTO_EXTENSION_HOSTS = "proto_extension_hosts"; // Transient fields that are lazy initialized and then memoized. private transient ExtensionRegistry memoizedExtensionRegistry; transient Parser<T> memoizedParser; /** Private constructor. */ protected ProtoCoder(Class<T> protoMessageClass, Set<Class<?>> extensionHostClasses) { this.protoMessageClass = protoMessageClass; this.extensionHostClasses = extensionHostClasses; } /** Get the memoized {@link Parser}, possibly initializing it lazily. */ protected Parser<T> getParser() { if (memoizedParser == null) { try { if (DynamicMessage.class.equals(protoMessageClass)) { throw new IllegalArgumentException( "DynamicMessage is not supported by the ProtoCoder, use the DynamicProtoCoder."); } else { @SuppressWarnings("unchecked") T protoMessageInstance = (T) protoMessageClass.getMethod("getDefaultInstance").invoke(null); @SuppressWarnings("unchecked") Parser<T> tParser = (Parser<T>) protoMessageInstance.getParserForType(); memoizedParser = tParser; } } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IllegalArgumentException(e); } } return memoizedParser; } /** * Returns a {@link CoderProvider} which uses the {@link ProtoCoder} for {@link Message proto * messages}. * * <p>This method is invoked reflectively from {@link DefaultCoder}. */ public static CoderProvider getCoderProvider() { return new ProtoCoderProvider(); } static final TypeDescriptor<Message> MESSAGE_TYPE = new TypeDescriptor<Message>() {}; /** A {@link CoderProvider} for {@link Message proto messages}. */ private static class ProtoCoderProvider extends CoderProvider { @Override public <T> Coder<T> coderFor( TypeDescriptor<T> typeDescriptor, List<? extends Coder<?>> componentCoders) throws CannotProvideCoderException { if (!typeDescriptor.isSubtypeOf(MESSAGE_TYPE)) { throw new CannotProvideCoderException( String.format( "Cannot provide %s because %s is not a subclass of %s", ProtoCoder.class.getSimpleName(), typeDescriptor, Message.class.getName())); } @SuppressWarnings("unchecked") TypeDescriptor<? extends Message> messageType = (TypeDescriptor<? extends Message>) typeDescriptor; try { @SuppressWarnings("unchecked") Coder<T> coder = (Coder<T>) ProtoCoder.of(messageType); return coder; } catch (IllegalArgumentException e) { throw new CannotProvideCoderException(e); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.Uid; import java.io.IOException; import java.util.function.Function; import java.util.function.IntConsumer; import java.util.function.Predicate; /** * A query that selects all docs that do NOT belong in the current shards this query is executed on. * It can be used to split a shard into N shards marking every document that doesn't belong into the shard * as deleted. See {@link org.apache.lucene.index.IndexWriter#deleteDocuments(Query...)} */ final class ShardSplittingQuery extends Query { private final IndexMetadata indexMetadata; private final int shardId; private final BitSetProducer nestedParentBitSetProducer; ShardSplittingQuery(IndexMetadata indexMetadata, int shardId, boolean hasNested) { this.indexMetadata = indexMetadata; this.shardId = shardId; this.nestedParentBitSetProducer = hasNested ? newParentDocBitSetProducer() : null; } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { @Override public String toString() { return "weight(delete docs query)"; } @Override public Scorer scorer(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); Predicate<BytesRef> includeInShard = ref -> { int targetShardId = OperationRouting.generateShardId(indexMetadata, Uid.decodeId(ref.bytes, ref.offset, ref.length), null); return shardId == targetShardId; }; if (terms == null) { // this is the common case - no partitioning and no _routing values // in this case we also don't do anything special with regards to nested docs since we basically delete // by ID and parent and nested all have the same id. assert indexMetadata.isRoutingPartitionedIndex() == false; findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); } else { final BitSet parentBitSet; if (nestedParentBitSetProducer == null) { parentBitSet = null; } else { parentBitSet = nestedParentBitSetProducer.getBitSet(context); if (parentBitSet == null) { return null; // no matches } } if (indexMetadata.isRoutingPartitionedIndex()) { // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing // this this index is routing partitioned. Visitor visitor = new Visitor(leafReader); TwoPhaseIterator twoPhaseIterator = parentBitSet == null ? new RoutingPartitionedDocIdSetIterator(visitor) : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); } else { // here we potentially guard the docID consumers with our parent bitset if we have one. // this ensures that we are only marking root documents in the nested case and if necessary // we do a second pass to mark the corresponding children in markChildDocs Function<IntConsumer, IntConsumer> maybeWrapConsumer = consumer -> { if (parentBitSet != null) { return docId -> { if (parentBitSet.get(docId)) { consumer.accept(docId); } }; } return consumer; }; // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to delete findSplitDocs(RoutingFieldMapper.NAME, ref -> { int targetShardId = OperationRouting.generateShardId(indexMetadata, null, ref.utf8ToString()); return shardId == targetShardId; }, leafReader, maybeWrapConsumer.apply(bitSet::set)); // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the ones // with a routing value from the next iteration an delete / select based on the ID. if (terms.getDocCount() != leafReader.maxDoc()) { // this is a special case where some of the docs have no routing values this sucks but it's possible today FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); findSplitDocs(RoutingFieldMapper.NAME, ref -> false, leafReader, maybeWrapConsumer.apply(hasRoutingValue::set)); IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { if (hasRoutingValue.get(docId) == false) { bitSetConsumer.accept(docId); } }); } } if (parentBitSet != null) { // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. markChildDocs(parentBitSet, bitSet); } } return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); } @Override public boolean isCacheable(LeafReaderContext ctx) { // This is not a regular query, let's not cache it. It wouldn't help // anyway. return false; } }; } private void markChildDocs(BitSet parentDocs, BitSet matchingDocs) { int currentDeleted = 0; while (currentDeleted < matchingDocs.length() && (currentDeleted = matchingDocs.nextSetBit(currentDeleted)) != DocIdSetIterator.NO_MORE_DOCS) { int previousParent = parentDocs.prevSetBit(Math.max(0, currentDeleted-1)); for (int i = previousParent + 1; i < currentDeleted; i++) { matchingDocs.set(i); } currentDeleted++; } } @Override public String toString(String field) { return "shard_splitting_query"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ShardSplittingQuery that = (ShardSplittingQuery) o; if (shardId != that.shardId) return false; return indexMetadata.equals(that.indexMetadata); } @Override public int hashCode() { int result = indexMetadata.hashCode(); result = 31 * result + shardId; return classHash() ^ result; } private static void findSplitDocs(String idField, Predicate<BytesRef> includeInShard, LeafReader leafReader, IntConsumer consumer) throws IOException { Terms terms = leafReader.terms(idField); TermsEnum iterator = terms.iterator(); BytesRef idTerm; PostingsEnum postingsEnum = null; while ((idTerm = iterator.next()) != null) { if (includeInShard.test(idTerm) == false) { postingsEnum = iterator.postings(postingsEnum); int doc; while ((doc = postingsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { consumer.accept(doc); } } } } /* this class is a stored fields visitor that reads _id and/or _routing from the stored fields which is necessary in the case of a routing partitioned index sine otherwise we would need to un-invert the _id and _routing field which is memory heavy */ private final class Visitor extends StoredFieldVisitor { final LeafReader leafReader; private int leftToVisit = 2; private final BytesRef spare = new BytesRef(); private String routing; private String id; Visitor(LeafReader leafReader) { this.leafReader = leafReader; } @Override public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { switch (fieldInfo.name) { case IdFieldMapper.NAME: id = Uid.decodeId(value); break; default: throw new IllegalStateException("Unexpected field: " + fieldInfo.name); } } @Override public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { spare.bytes = value; spare.offset = 0; spare.length = value.length; switch (fieldInfo.name) { case RoutingFieldMapper.NAME: routing = spare.utf8ToString(); break; default: throw new IllegalStateException("Unexpected field: " + fieldInfo.name); } } @Override public Status needsField(FieldInfo fieldInfo) throws IOException { // we don't support 5.x so no need for the uid field switch (fieldInfo.name) { case IdFieldMapper.NAME: case RoutingFieldMapper.NAME: leftToVisit--; return Status.YES; default: return leftToVisit == 0 ? Status.STOP : Status.NO; } } boolean matches(int doc) throws IOException { routing = id = null; leftToVisit = 2; leafReader.document(doc, this); assert id != null : "docID must not be null - we might have hit a nested document"; int targetShardId = OperationRouting.generateShardId(indexMetadata, id, routing); return targetShardId != shardId; } } /** * This two phase iterator visits every live doc and selects all docs that don't belong into this * shard based on their id and routing value. This is only used in a routing partitioned index. */ private static final class RoutingPartitionedDocIdSetIterator extends TwoPhaseIterator { private final Visitor visitor; RoutingPartitionedDocIdSetIterator(Visitor visitor) { super(DocIdSetIterator.all(visitor.leafReader.maxDoc())); // we iterate all live-docs this.visitor = visitor; } @Override public boolean matches() throws IOException { return visitor.matches(approximation.docID()); } @Override public float matchCost() { return 42; // that's obvious, right? } } /** * This TwoPhaseIterator marks all nested docs of matching parents as matches as well. */ private static final class NestedRoutingPartitionedDocIdSetIterator extends TwoPhaseIterator { private final Visitor visitor; private final BitSet parentDocs; private int nextParent = -1; private boolean nextParentMatches; NestedRoutingPartitionedDocIdSetIterator(Visitor visitor, BitSet parentDocs) { super(DocIdSetIterator.all(visitor.leafReader.maxDoc())); // we iterate all live-docs this.parentDocs = parentDocs; this.visitor = visitor; } @Override public boolean matches() throws IOException { // the educated reader might ask why this works, it does because all live doc ids (root docs and nested docs) are evaluated in // order and that way we don't need to seek backwards as we do in other nested docs cases. int doc = approximation.docID(); if (doc > nextParent) { // we only check once per nested/parent set nextParent = parentDocs.nextSetBit(doc); // never check a child document against the visitor, they neihter have _id nor _routing as stored fields nextParentMatches = visitor.matches(nextParent); } return nextParentMatches; } @Override public float matchCost() { return 42; // that's obvious, right? } } /* * this is used internally to obtain a bitset for parent documents. We don't cache this since we never access the same reader more * than once. There is no point in using BitsetFilterCache#BitSetProducerWarmer since we use this only as a delete by query which is * executed on a recovery-private index writer. There is no point in caching it and it won't have a cache hit either. */ private static BitSetProducer newParentDocBitSetProducer() { return context -> BitsetFilterCache.bitsetFromQuery(Queries.newNonNestedFilter(), context); } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.impl; // Start of user code for imports import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart; import org.eclipse.emf.eef.runtime.context.impl.EObjectPropertiesEditionContext; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.parts.CompositePropertiesEditionPart; import org.eclipse.emf.eef.runtime.policies.PropertiesEditingPolicy; import org.eclipse.emf.eef.runtime.providers.PropertiesEditingProvider; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.TabElementTreeSelectionDialog; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.parts.StoreMediatorInputConnectorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; // End of user code /** * * */ public class StoreMediatorInputConnectorPropertiesEditionPartImpl extends CompositePropertiesEditionPart implements ISWTPropertiesEditionPart, StoreMediatorInputConnectorPropertiesEditionPart { protected ReferencesTable incomingLinks; protected List<ViewerFilter> incomingLinksBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> incomingLinksFilters = new ArrayList<ViewerFilter>(); /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public StoreMediatorInputConnectorPropertiesEditionPartImpl(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite) * */ public Composite createFigure(final Composite parent) { view = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(view); return view; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createControls(org.eclipse.swt.widgets.Composite) * */ public void createControls(Composite view) { CompositionSequence storeMediatorInputConnectorStep = new BindingCompositionSequence(propertiesEditionComponent); storeMediatorInputConnectorStep .addStep(EsbViewsRepository.StoreMediatorInputConnector.Properties.class) .addStep(EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks); composer = new PartComposer(storeMediatorInputConnectorStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.StoreMediatorInputConnector.Properties.class) { return createPropertiesGroup(parent); } if (key == EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks) { return createIncomingLinksAdvancedReferencesTable(parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(Composite parent) { Group propertiesGroup = new Group(parent, SWT.NONE); propertiesGroup.setText(EsbMessages.StoreMediatorInputConnectorPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesGroupData = new GridData(GridData.FILL_HORIZONTAL); propertiesGroupData.horizontalSpan = 3; propertiesGroup.setLayoutData(propertiesGroupData); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); return propertiesGroup; } /** * */ protected Composite createIncomingLinksAdvancedReferencesTable(Composite parent) { String label = getDescription(EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks, EsbMessages.StoreMediatorInputConnectorPropertiesEditionPart_IncomingLinksLabel); this.incomingLinks = new ReferencesTable(label, new ReferencesTableListener() { public void handleAdd() { addIncomingLinks(); } public void handleEdit(EObject element) { editIncomingLinks(element); } public void handleMove(EObject element, int oldIndex, int newIndex) { moveIncomingLinks(element, oldIndex, newIndex); } public void handleRemove(EObject element) { removeFromIncomingLinks(element); } public void navigateTo(EObject element) { } }); this.incomingLinks.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks, EsbViewsRepository.SWT_KIND)); this.incomingLinks.createControls(parent); this.incomingLinks.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(StoreMediatorInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData incomingLinksData = new GridData(GridData.FILL_HORIZONTAL); incomingLinksData.horizontalSpan = 3; this.incomingLinks.setLayoutData(incomingLinksData); this.incomingLinks.disableMove(); incomingLinks.setID(EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks); incomingLinks.setEEFType("eef::AdvancedReferencesTable"); //$NON-NLS-1$ return parent; } /** * */ protected void addIncomingLinks() { TabElementTreeSelectionDialog dialog = new TabElementTreeSelectionDialog(incomingLinks.getInput(), incomingLinksFilters, incomingLinksBusinessFilters, "incomingLinks", propertiesEditionComponent.getEditingContext().getAdapterFactory(), current.eResource()) { @Override public void process(IStructuredSelection selection) { for (Iterator<?> iter = selection.iterator(); iter.hasNext();) { EObject elem = (EObject) iter.next(); propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(StoreMediatorInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, elem)); } incomingLinks.refresh(); } }; dialog.open(); } /** * */ protected void moveIncomingLinks(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(StoreMediatorInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); incomingLinks.refresh(); } /** * */ protected void removeFromIncomingLinks(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(StoreMediatorInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); incomingLinks.refresh(); } /** * */ protected void editIncomingLinks(EObject element) { EObjectPropertiesEditionContext context = new EObjectPropertiesEditionContext(propertiesEditionComponent.getEditingContext(), propertiesEditionComponent, element, adapterFactory); PropertiesEditingProvider provider = (PropertiesEditingProvider)adapterFactory.adapt(element, PropertiesEditingProvider.class); if (provider != null) { PropertiesEditingPolicy policy = provider.getPolicy(context); if (policy != null) { policy.execute(); incomingLinks.refresh(); } } } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.StoreMediatorInputConnectorPropertiesEditionPart#initIncomingLinks(org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings) */ public void initIncomingLinks(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); incomingLinks.setContentProvider(contentProvider); incomingLinks.setInput(settings); incomingLinksBusinessFilters.clear(); incomingLinksFilters.clear(); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.StoreMediatorInputConnector.Properties.incomingLinks); if (eefElementEditorReadOnlyState && incomingLinks.getTable().isEnabled()) { incomingLinks.setEnabled(false); incomingLinks.setToolTipText(EsbMessages.StoreMediatorInputConnector_ReadOnly); } else if (!eefElementEditorReadOnlyState && !incomingLinks.getTable().isEnabled()) { incomingLinks.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.StoreMediatorInputConnectorPropertiesEditionPart#updateIncomingLinks() * */ public void updateIncomingLinks() { incomingLinks.refresh(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.StoreMediatorInputConnectorPropertiesEditionPart#addFilterIncomingLinks(ViewerFilter filter) * */ public void addFilterToIncomingLinks(ViewerFilter filter) { incomingLinksFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.StoreMediatorInputConnectorPropertiesEditionPart#addBusinessFilterIncomingLinks(ViewerFilter filter) * */ public void addBusinessFilterToIncomingLinks(ViewerFilter filter) { incomingLinksBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.StoreMediatorInputConnectorPropertiesEditionPart#isContainedInIncomingLinksTable(EObject element) * */ public boolean isContainedInIncomingLinksTable(EObject element) { return ((ReferencesTableSettings)incomingLinks.getInput()).contains(element); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.StoreMediatorInputConnector_Part_Title; } // Start of user code additional methods // End of user code }
package com.planet_ink.coffee_mud.WebMacros; import com.planet_ink.coffee_web.http.HTTPException; import com.planet_ink.coffee_web.http.HTTPMethod; import com.planet_ink.coffee_web.http.MIMEType; import com.planet_ink.coffee_web.http.MultiPartData; import com.planet_ink.coffee_web.interfaces.*; import com.planet_ink.coffee_web.util.CWThread; import com.planet_ink.coffee_web.util.CWConfig; import com.planet_ink.coffee_mud.WebMacros.interfaces.*; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.CMSecurity.DbgFlag; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import com.planet_ink.coffee_mud.core.exceptions.*; import java.io.File; import java.io.InputStream; import java.net.InetAddress; import java.util.*; /* Copyright 2002-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class StdWebMacro implements WebMacro { @Override public String ID() { return name(); } @Override public String name() { return "UNKNOWN"; } @Override public boolean isAWebPath() { return false; } @Override public boolean preferBinary() { return false; } @Override public boolean isAdminMacro() { return false; } @Override public CMObject newInstance() { return this; } @Override public void initializeClass() { } @Override public CMObject copyOf() { return this; } @Override public byte[] runBinaryMacro(final HTTPRequest httpReq, final String parm, final HTTPResponse httpResp) throws HTTPServerException { return runMacro(httpReq,parm, null).getBytes(); } @Override public String runMacro(final HTTPRequest httpReq, final String parm, final HTTPResponse httpResp) throws HTTPServerException { return "[Unimplemented macro!]"; } @Override public int compareTo(final CMObject o) { return CMClass.classID(this).compareToIgnoreCase(CMClass.classID(o)); } protected static StringBuffer colorwebifyOnly(final StringBuffer s) { if(s==null) return null; int i=0; final String[] lookup=CMLib.color().standardHTMLlookups(); while(i<s.length()) { if(s.charAt(i)=='^') { if(i<(s.length()-1)) { final char c=s.charAt(i+1); //TODO: handle ~ and # here? final String code=lookup[c]; if(code!=null) { s.delete(i,i+2); if(code.startsWith("<")) { s.insert(i,code+">"); i+=code.length(); } else { s.insert(i,code); i+=code.length()-1; } } else if(c=='?') { s.delete(i,i+2); s.insert(i,"</FONT>"); i+=7; } } } i++; } return s; } protected static String webify(final String s) { return webify(new StringBuffer(s)).toString(); } protected static StringBuffer webify(StringBuffer s) { if(s==null) return null; int i=0; while(i<s.length()) { switch(s.charAt(i)) { case '\n': case '\r': if((i<s.length()-1) &&(s.charAt(i+1)!=s.charAt(i)) &&((s.charAt(i+1)=='\r')||(s.charAt(i+1)=='\n'))) { s.delete(i,i+2); s.insert(i,"<BR>"); i+=3; } else { s.delete(i,i+1); s.insert(i,"<BR>"); i+=3; } break; case ' ': s.setCharAt(i,'&'); s.insert(i+1,"nbsp;"); i+=5; break; case '>': s.setCharAt(i,'&'); s.insert(i+1,"gt;"); i+=3; break; case '<': s.setCharAt(i,'&'); s.insert(i+1,"lt;"); i+=3; break; } i++; } s=colorwebifyOnly(s); return s; } protected String clearWebMacros(final String s) { return CMLib.webMacroFilter().clearWebMacros(s); } protected String clearWebMacros(final StringBuffer s) { return CMLib.webMacroFilter().clearWebMacros(s); } protected StringBuilder helpHelp(final StringBuilder s) { return helpHelp(s, 70); } protected StringBuilder helpHelp(final String s) { return helpHelp(new StringBuilder(s), 70); } protected StringBuilder helpHelp(StringBuilder s, final int limit) { if(s!=null) { final String[] lookup=CMLib.color().standardHTMLlookups(); s=new StringBuilder(s.toString()); int x=0; int count=0; x=0; int lastSpace=0; //TODO: limit should adjust or lastspace should -- something is wrong RIGHT HERE! while((x>=0)&&(x<s.length())) { count++; switch(s.charAt(x)) { case '\\': if(x<s.length()-1) { if(s.charAt(x+1)=='n') { s.delete(x, x+2); s.insert(x, " <BR>"); lastSpace=x; } } break; case '\n': if(x<s.length()-1) { if(s.charAt(x+1)=='\r') { s.delete(x, x+2); s.insert(x, " <BR>"); } else { s.delete(x, x+1); s.insert(x, " <BR>"); } lastSpace=x; } break; case '\r': if(x<s.length()-1) { if(s.charAt(x+1)=='\n') { s.delete(x, x+2); s.insert(x, " <BR>"); } else { s.delete(x, x+1); s.insert(x, " <BR>"); } lastSpace=x; } break; case ' ': lastSpace=x; break; case '<': if((x<=s.length()-4) &&(s.substring(x,x+4).equalsIgnoreCase("<BR>"))) { count=0; x=x+3; lastSpace=x+4; } else { s.setCharAt(x,'&'); s.insert(x+1,"lt;"); x+=3; } break; case '-': if((x>4) &&(s.charAt(x-1)=='-') &&(s.charAt(x-2)=='-') &&(s.charAt(x-3)=='-')) { count=0; lastSpace=x; } break; case '!': if((x>4) &&(s.charAt(x-1)==' ') &&(s.charAt(x-2)==' ') &&(s.charAt(x-3)==' ')) { count=0; lastSpace=x; } else if((x<s.length()-10) // remove music &&(s.charAt(x+1)=='!') &&((s.substring(x+2,x+7).equalsIgnoreCase("sound")) ||(s.substring(x+2,x+7).equalsIgnoreCase("music")))) { final int x1=s.indexOf("(",x+7); final int y1=s.indexOf(")",x+7); if((x1>=0)&&(y1>=x1)) { s.delete(x,y1+1); x--; } } break; case '^': if(x<(s.length()-1)) { char c=s.charAt(x+1); if(c=='?') c='w'; final String code; if((c==ColorLibrary.COLORCODE_BACKGROUND) &&(x<(s.length()-2))) { c=s.charAt(x+2); s.delete(x, x+1); code=CMLib.color().getBackgroundHtmlTag(c); } else code=lookup[c]; if(code!=null) { s.delete(x,x+2); if(code.startsWith("<")) { s.insert(x,code+">"); x+=code.length(); } else { s.insert(x-1,code); x+=code.length()-1; } } count--; } break; } if(count==limit) { //int brx=s.indexOf("<BR>",lastSpace); //if((brx<0)||(brx>lastSpace+12)) s.replace(lastSpace,lastSpace+1,"<BR>"); lastSpace=lastSpace+4; x=lastSpace; count=0; } else x++; } return s; } return new StringBuilder(""); } protected PairSVector<String,String> parseOrderedParms(final String parm, final boolean preserveCase) { final PairSVector<String,String> requestParms=new PairSVector<String,String>(); if((parm!=null)&&(parm.length()>0)) { int lastDex=0; CharSequence varSeq=null; for(int i=0;i<parm.length();i++) { switch(parm.charAt(i)) { case '\\': i++; break; case '&': { if(varSeq==null) { if(preserveCase) requestParms.add(parm.substring(lastDex,i),parm.substring(lastDex,i)); else requestParms.add(parm.substring(lastDex,i).toUpperCase().trim(),parm.substring(lastDex,i).trim()); } else { if(preserveCase) requestParms.add(varSeq.toString(),parm.substring(lastDex,i)); else requestParms.add(varSeq.toString().trim().toUpperCase(),parm.substring(lastDex,i).trim()); } lastDex=i+1; varSeq=null; break; } case '=': { if(varSeq==null) { varSeq=parm.subSequence(lastDex,i); lastDex=i+1; } break; } } } final int i=parm.length(); if(varSeq==null) { if(preserveCase) requestParms.add(parm.substring(lastDex,i),parm.substring(lastDex,i)); else requestParms.add(parm.substring(lastDex,i).trim().toUpperCase(),parm.substring(lastDex,i).trim()); } else { if(preserveCase) requestParms.add(varSeq.toString(),parm.substring(lastDex,i)); else requestParms.add(varSeq.toString().trim().toUpperCase(),parm.substring(lastDex,i).trim()); } } return requestParms; } protected String safeIncomingfilter(final String buf) { if(buf==null) return null; if(buf.length()==0) return ""; return CMLib.coffeeFilter().simpleInFilter(new StringBuilder(buf)); } protected String htmlIncomingFilter(final String buf) { return htmlIncomingFilter(new StringBuffer(buf)).toString(); } protected StringBuffer htmlIncomingFilter(final StringBuffer buf) { int loop=0; while(buf.length()>loop) { if((buf.charAt(loop)=='&') &&(loop<buf.length()-3)) { int endloop=loop+1; while((endloop<buf.length())&&(endloop<loop+10)&&(buf.charAt(endloop)!=';')) endloop++; if(endloop<buf.length()) { final String s=buf.substring(loop,endloop+1); if(s.equalsIgnoreCase("&gt;")) { buf.setCharAt(loop,'>'); buf.delete(loop+1,endloop+1); } else if(s.equalsIgnoreCase("&lt;")) { buf.setCharAt(loop,'<'); buf.delete(loop+1,endloop+1); } else if(s.equalsIgnoreCase("&amp;")) { buf.setCharAt(loop,'&'); buf.delete(loop+1,endloop+1); } else if(s.equalsIgnoreCase("&quot;")) { buf.setCharAt(loop,'\"'); buf.delete(loop+1,endloop+1); } } } loop++; } return buf; } protected static String htmlOutgoingFilter(final String buf) { return htmlOutgoingFilter(new StringBuffer(buf)).toString(); } protected static StringBuffer htmlOutgoingFilter(final StringBuffer buf) { int loop=0; while(buf.length()>loop) { switch(buf.charAt(loop)) { case '>': buf.delete(loop,loop+1); buf.insert(loop,"&gt;".toCharArray()); loop+=3; break; case '"': buf.delete(loop,loop+1); buf.insert(loop,"&quot;".toCharArray()); loop+=5; break; case '&': if((loop+3>=buf.length()) ||((!buf.substring(loop,loop+3).equalsIgnoreCase("lt;")) &&(!buf.substring(loop,loop+3).equalsIgnoreCase("amp;")) &&(!buf.substring(loop,loop+3).equalsIgnoreCase("quot;")) &&(!buf.substring(loop,loop+3).equalsIgnoreCase("gt;")))) { buf.delete(loop,loop+1); buf.insert(loop,"&amp;".toCharArray()); loop+=4; } else loop++; break; case '<': buf.delete(loop,loop+1); buf.insert(loop,"&lt;".toCharArray()); loop+=3; break; default: loop++; } } return buf; } protected byte[] getHTTPFileData(final HTTPRequest httpReq, final String file) throws HTTPException { if(Thread.currentThread() instanceof CWThread) { final CWConfig config=((CWThread)Thread.currentThread()).getConfig(); final HTTPRequest newReq=new HTTPRequest() { final Hashtable<String,String> params=new XHashtable<String,String>(httpReq.getUrlParametersCopy()); @Override public String getHost() { return httpReq.getHost(); } @Override public String getUrlPath() { return file; } @Override public String getFullRequest() { return httpReq.getMethod().name() + " " + getUrlPath(); } @Override public String getUrlParameter(final String name) { return params.get(name.toLowerCase()); } @Override public Map<String, String> getUrlParametersCopy() { return new XHashtable<String, String>(params); } @Override public boolean isUrlParameter(final String name) { return params.containsKey(name.toLowerCase()); } @Override public Set<String> getUrlParameters() { return params.keySet(); } @Override public HTTPMethod getMethod() { return httpReq.getMethod(); } @Override public String getHeader(final String name) { return httpReq.getHeader(name); } @Override public InetAddress getClientAddress() { return httpReq.getClientAddress(); } @Override public int getClientPort() { return httpReq.getClientPort(); } @Override public InputStream getBody() { return httpReq.getBody(); } @Override public String getCookie(final String name) { return httpReq.getCookie(name); } @Override public Set<String> getCookieNames() { return httpReq.getCookieNames(); } @Override public List<MultiPartData> getMultiParts() { return httpReq.getMultiParts(); } @Override public double getSpecialEncodingAcceptability(final String type) { return httpReq.getSpecialEncodingAcceptability(type); } @Override public String getFullHost() { return httpReq.getFullHost(); } @Override public List<long[]> getRangeAZ() { return httpReq.getRangeAZ(); } @Override public void addFakeUrlParameter(final String name, final String value) { params.put(name.toLowerCase(), value); } @Override public void removeUrlParameter(final String name) { params.remove(name.toLowerCase()); } @Override public Map<String, Object> getRequestObjects() { return httpReq.getRequestObjects(); } @Override public float getHttpVer() { return httpReq.getHttpVer(); } @Override public String getQueryString() { return httpReq.getQueryString(); } }; final DataBuffers data=config.getFileGetter().getFileData(newReq); return data.flushToBuffer().array(); } return new byte[0]; } protected File grabFile(final HTTPRequest httpReq, String filename) { if(Thread.currentThread() instanceof CWThread) { filename=filename.replace(File.separatorChar,'/'); if (!filename.startsWith("/")) filename = '/' + filename; final String file=filename; final CWConfig config=((CWThread)Thread.currentThread()).getConfig(); final HTTPRequest newReq=new HTTPRequest() { public final Hashtable<String,String> params=new XHashtable<String,String>(httpReq.getUrlParametersCopy()); @Override public String getHost() { return httpReq.getHost(); } @Override public String getUrlPath() { return file; } @Override public String getFullRequest() { return httpReq.getMethod().name() + " " + getUrlPath(); } @Override public String getUrlParameter(final String name) { return params.get(name.toUpperCase()); } @Override public boolean isUrlParameter(final String name) { return params.containsKey(name.toUpperCase()); } @Override public Map<String, String> getUrlParametersCopy() { return new XHashtable<String, String>(params); } @Override public Set<String> getUrlParameters() { return params.keySet(); } @Override public HTTPMethod getMethod() { return httpReq.getMethod(); } @Override public String getHeader(final String name) { return httpReq.getHeader(name); } @Override public InetAddress getClientAddress() { return httpReq.getClientAddress(); } @Override public int getClientPort() { return httpReq.getClientPort(); } @Override public InputStream getBody() { return httpReq.getBody(); } @Override public String getCookie(final String name) { return httpReq.getCookie(name); } @Override public Set<String> getCookieNames() { return httpReq.getCookieNames(); } @Override public List<MultiPartData> getMultiParts() { return httpReq.getMultiParts(); } @Override public double getSpecialEncodingAcceptability(final String type) { return httpReq.getSpecialEncodingAcceptability(type); } @Override public String getFullHost() { return httpReq.getFullHost(); } @Override public List<long[]> getRangeAZ() { return httpReq.getRangeAZ(); } @Override public void addFakeUrlParameter(final String name, final String value) { params.put(name.toUpperCase(), value); } @Override public void removeUrlParameter(final String name) { params.remove(name.toUpperCase()); } @Override public Map<String, Object> getRequestObjects() { return httpReq.getRequestObjects(); } @Override public float getHttpVer() { return httpReq.getHttpVer(); } @Override public String getQueryString() { return httpReq.getQueryString(); } }; return config.getFileGetter().createFile(newReq,config.getFileGetter().assembleFilePath(newReq)); } return null; } protected java.util.Map<String,String> parseParms(final String parm) { final Hashtable<String,String> requestParms=new Hashtable<String,String>(); final PairSVector<String,String> requestParsed = parseOrderedParms(parm,false); for(final Pair<String,String> P : requestParsed) requestParms.put(P.first,P.second); return requestParms; } protected java.util.Map<String,String> parseParms(final String parm, final boolean preserveCase) { final Hashtable<String,String> requestParms=new Hashtable<String,String>(); final PairSVector<String,String> requestParsed = parseOrderedParms(parm,preserveCase); for(final Pair<String,String> P : requestParsed) requestParms.put(P.first,P.second); return requestParms; } public String L(final String str, final String ... xs) { return CMLib.lang().fullSessionTranslation(str, xs); } }
import java.awt.*; import java.io.*; import java.util.ArrayList; import static java.awt.GraphicsEnvironment.*; public class ourGrammarListeners extends ourGrammarBaseListener { ourGrammarParser parser; ArrayList<String> options = new ArrayList<>(); File fout; BufferedWriter gramgram; FileOutputStream fos; public int xOffset=0, yOffset=0; private Rectangle boundaries = getLocalGraphicsEnvironment().getMaximumWindowBounds(); public int maxWidth = (int) boundaries.getMaxX(); public int maxHeight = (int) boundaries.getMaxY(); public int halfWidth = (int) boundaries.getCenterX(); public int halfHeight = (int) boundaries.getCenterY()- 50; public int halfwayHeight = (int) boundaries.getCenterY(); //This is because the windows were too tall, but the midway point was correct public String finalView = ""; public String sleepString = "\nsleep 1\n"; public String filename; /* Constructor */ public ourGrammarListeners(ourGrammarParser parser){ this.parser = parser; } @Override public void enterWorkspace(ourGrammarParser.WorkspaceContext ctx){ // switched wmctrl to a different workspace setViewport(ctx.children.get(1).toString()); } private void setViewport(String workspaceNum) { try { gramgram.write("###### Start Workspace " + workspaceNum + " ######"); gramgram.write(sleepString); } catch (IOException e) { e.printStackTrace(); } switch (workspaceNum) { case "1": try { gramgram.write("wmctrl -o 0,0\n"); } catch (IOException e) { e.printStackTrace(); } break; case "2": try { gramgram.write("wmctrl -o "+ maxWidth+",0\n"); } catch (IOException e) { e.printStackTrace(); } break; case "3": try { gramgram.write("wmctrl -o"+ " 0,"+ maxHeight+"\n"); } catch (IOException e) { e.printStackTrace(); } break; case "4": try { gramgram.write("wmctrl -o " + maxWidth + "," + maxHeight + "\n"); } catch (IOException e) { e.printStackTrace(); } break; } } @Override public void enterApplication(ourGrammarParser.ApplicationContext ctx) { try { gramgram.write("\n###### Start Application ######\n"); gramgram.write(ctx.children.get(0).toString()); } catch (IOException e) { e.printStackTrace(); } } @Override public void enterAppoption(ourGrammarParser.AppoptionContext ctx) { if(ctx.children.get(0).toString().equals("args")) { //gets the string without the qoutes String appvalue = ctx.appvalue().getText().substring(1, ctx.appvalue().getText().length() - 1); //split on comma seperated commands String[] splitAppvalue = appvalue.split(","); String args = ""; for (int i = 0; i < splitAppvalue.length; i++) { args = args.concat(splitAppvalue[i]) + " "; } options.add(0, " " + args + " &" + sleepString); } else if(ctx.children.get(0).toString().equals("snap")) { String actions; String dimensions = ctx.children.get(2).getText().toLowerCase(); actions = getWmctrlActions(dimensions); options.add(actions); } } private String getWmctrlActions(String dimensions) { dimensions = dimensions.substring(1,dimensions.length()-1); String actions = ""; switch (dimensions){ case "fullscreen": actions = "wmctrl -r :ACTIVE: -b add,maximized_horz,maximized_vert"; break; case "topleft": actions = "wmctrl -r :ACTIVE: -e 0," + xOffset + "," + yOffset + "," + halfWidth + "," + halfHeight+"\n"; break; case "topright": actions = "wmctrl -r :ACTIVE: -e 0," + (xOffset + halfWidth) + "," + yOffset +"," + halfWidth + "," + halfHeight + "\n"; break; case "bottomright": actions = "wmctrl -r :ACTIVE: -e 0," + (xOffset + halfWidth) + "," + (yOffset+halfwayHeight) +"," + halfWidth + "," + halfHeight + "\n"; break; case "bottomleft": actions = "wmctrl -r :ACTIVE: -e 0," + (xOffset) + "," + (yOffset+halfwayHeight) +"," + halfWidth + "," + halfHeight + "\n"; break; case "top": actions = "wmctrl -r :ACTIVE: -e 0," + xOffset + "," + yOffset +"," + 0 + "," + halfHeight + "\nwmctrl -r :ACTIVE: -b add,maximized_horz" + "\n"; break; case "bottom": actions = "wmctrl -r :ACTIVE: -e 0," + xOffset + "," + (yOffset+halfwayHeight) +"," + 0 + "," + halfHeight + "\nwmctrl -r :ACTIVE: -b add,maximized_horz" + "\n"; break; case "left": actions = "wmctrl -r :ACTIVE: -e 0," + xOffset + "," + yOffset +"," + halfWidth + "," + 0 + "\nwmctrl -r :ACTIVE: -b add,maximized_vert" + "\n"; break; case "right": actions = "wmctrl -r :ACTIVE: -e 0," + (xOffset + halfWidth) + "," + yOffset +"," + halfWidth + "," + 0 + "\nwmctrl -r :ACTIVE: -b add,maximized_vert" + "\n"; break; } return actions; } @Override public void exitApplication(ourGrammarParser.ApplicationContext ctx) { // Check to see if wmctrl is the first thing. If it is, that means there were no args if(options.get(0).contains("wmctrl")){ try { gramgram.write(" &" + sleepString); } catch (IOException e) { e.printStackTrace(); } } for(String items:options){ try { gramgram.write(items); } catch (IOException e) { e.printStackTrace(); } } options.clear(); try { gramgram.write("\n"); } catch (IOException e) { e.printStackTrace(); } } @Override public void enterOption(ourGrammarParser.OptionContext ctx) { if (ctx.ID().getText().equals("templateName")){ filename = ctx.value().getText(); fout = new File(filename); fout.setExecutable(true); try { fos = new FileOutputStream(fout); gramgram = new BufferedWriter(new OutputStreamWriter(fos)); } catch (FileNotFoundException e) { e.printStackTrace(); } } if (ctx.ID().getText().equals("view")){ setFinalView(ctx.value().getText()); } } private void setFinalView(String workspace) { switch (workspace) { case "workspace1": finalView = "wmctrl -o 0,0\n"; break; case "workspace2": finalView = "wmctrl -o "+ maxWidth+",0\n"; break; case "workspace3": finalView = "wmctrl -o"+ " 0,"+ maxHeight+"\n"; break; case "workspace4": finalView = "wmctrl -o " + maxWidth + "," + maxHeight + "\n"; break; } } // Exit start // write out the final view @Override public void exitWorkspace(ourGrammarParser.WorkspaceContext ctx) { try { gramgram.write("\n"); } catch (IOException e) { e.printStackTrace(); } } @Override public void exitStart(ourGrammarParser.StartContext ctx) { try { gramgram.write("###### Switch to view setting ######"); gramgram.write(sleepString); gramgram.write(finalView); } catch (IOException e) { e.printStackTrace(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.builder; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.RandomQueryBuilder; import org.elasticsearch.search.AbstractSearchTestCase; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; import java.io.IOException; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; public class SearchSourceBuilderTests extends AbstractSearchTestCase { public void testFromXContent() throws IOException { SearchSourceBuilder testSearchSourceBuilder = createSearchSourceBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } testSearchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); assertParseSearchSource(testSearchSourceBuilder, createParser(builder)); } private static void assertParseSearchSource(SearchSourceBuilder testBuilder, XContentParser parser) throws IOException { QueryParseContext parseContext = new QueryParseContext(parser); if (randomBoolean()) { parser.nextToken(); // sometimes we move it on the START_OBJECT to // test the embedded case } SearchSourceBuilder newBuilder = SearchSourceBuilder.fromXContent(parseContext); assertNull(parser.nextToken()); assertEquals(testBuilder, newBuilder); assertEquals(testBuilder.hashCode(), newBuilder.hashCode()); } private QueryParseContext createParseContext(XContentParser parser) { return new QueryParseContext(parser); } public void testSerialization() throws IOException { SearchSourceBuilder testBuilder = createSearchSourceBuilder(); try (BytesStreamOutput output = new BytesStreamOutput()) { testBuilder.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { SearchSourceBuilder deserializedBuilder = new SearchSourceBuilder(in); assertEquals(deserializedBuilder, testBuilder); assertEquals(deserializedBuilder.hashCode(), testBuilder.hashCode()); assertNotSame(deserializedBuilder, testBuilder); } } } public void testEqualsAndHashcode() throws IOException { // TODO add test checking that changing any member of this class produces an object that is not equal to the original EqualsHashCodeTestUtils.checkEqualsAndHashCode(createSearchSourceBuilder(), this::copyBuilder); } //we use the streaming infra to create a copy of the builder provided as argument private SearchSourceBuilder copyBuilder(SearchSourceBuilder original) throws IOException { return ESTestCase.copyWriteable(original, namedWriteableRegistry, SearchSourceBuilder::new); } public void testParseIncludeExclude() throws IOException { { String restContent = " { \"_source\": { \"includes\": \"include\", \"excludes\": \"*.field2\"}}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertArrayEquals(new String[]{"*.field2"}, searchSourceBuilder.fetchSource().excludes()); assertArrayEquals(new String[]{"include"}, searchSourceBuilder.fetchSource().includes()); } } { String restContent = " { \"_source\": false}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().excludes()); assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().includes()); assertFalse(searchSourceBuilder.fetchSource().fetchSource()); } } } public void testMultipleQueryObjectsAreRejected() throws Exception { String restContent = " { \"query\": {\n" + " \"multi_match\": {\n" + " \"query\": \"workd\",\n" + " \"fields\": [\"title^5\", \"plain_body\"]\n" + " },\n" + " \"filters\": {\n" + " \"terms\": {\n" + " \"status\": [ 3 ]\n" + " }\n" + " }\n" + " } }"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser))); assertEquals("[multi_match] malformed query, expected [END_OBJECT] but found [FIELD_NAME]", e.getMessage()); } } public void testParseSort() throws IOException { { String restContent = " { \"sort\": \"foo\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(1, searchSourceBuilder.sorts().size()); assertEquals(new FieldSortBuilder("foo"), searchSourceBuilder.sorts().get(0)); } } { String restContent = "{\"sort\" : [\n" + " { \"post_date\" : {\"order\" : \"asc\"}},\n" + " \"user\",\n" + " { \"name\" : \"desc\" },\n" + " { \"age\" : \"desc\" },\n" + " \"_score\"\n" + " ]}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(5, searchSourceBuilder.sorts().size()); assertEquals(new FieldSortBuilder("post_date"), searchSourceBuilder.sorts().get(0)); assertEquals(new FieldSortBuilder("user"), searchSourceBuilder.sorts().get(1)); assertEquals(new FieldSortBuilder("name").order(SortOrder.DESC), searchSourceBuilder.sorts().get(2)); assertEquals(new FieldSortBuilder("age").order(SortOrder.DESC), searchSourceBuilder.sorts().get(3)); assertEquals(new ScoreSortBuilder(), searchSourceBuilder.sorts().get(4)); } } } public void testAggsParsing() throws IOException { { String restContent = "{\n" + " " + "\"aggs\": {" + " \"test_agg\": {\n" + " " + "\"terms\" : {\n" + " \"field\": \"foo\"\n" + " }\n" + " }\n" + " }\n" + "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(1, searchSourceBuilder.aggregations().count()); } } { String restContent = "{\n" + " \"aggregations\": {" + " \"test_agg\": {\n" + " \"terms\" : {\n" + " \"field\": \"foo\"\n" + " }\n" + " }\n" + " }\n" + "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(1, searchSourceBuilder.aggregations().count()); } } } /** * test that we can parse the `rescore` element either as single object or as array */ public void testParseRescore() throws IOException { { String restContent = "{\n" + " \"query\" : {\n" + " \"match\": { \"content\": { \"query\": \"foo bar\" }}\n" + " },\n" + " \"rescore\": {" + " \"window_size\": 50,\n" + " \"query\": {\n" + " \"rescore_query\" : {\n" + " \"match\": { \"content\": { \"query\": \"baz\" } }\n" + " }\n" + " }\n" + " }\n" + "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(1, searchSourceBuilder.rescores().size()); assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50), searchSourceBuilder.rescores().get(0)); } } { String restContent = "{\n" + " \"query\" : {\n" + " \"match\": { \"content\": { \"query\": \"foo bar\" }}\n" + " },\n" + " \"rescore\": [ {" + " \"window_size\": 50,\n" + " \"query\": {\n" + " \"rescore_query\" : {\n" + " \"match\": { \"content\": { \"query\": \"baz\" } }\n" + " }\n" + " }\n" + " } ]\n" + "}\n"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(1, searchSourceBuilder.rescores().size()); assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50), searchSourceBuilder.rescores().get(0)); } } } public void testTimeoutWithUnits() throws IOException { final String timeout = randomTimeValue(); final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) { final SearchSourceBuilder builder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertThat(builder.timeout(), equalTo(TimeValue.parseTimeValue(timeout, null, "timeout"))); } } public void testTimeoutWithoutUnits() throws IOException { final int timeout = randomIntBetween(1, 1024); final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, query)) { final ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> SearchSourceBuilder.fromXContent( createParseContext(parser))); assertThat(e, hasToString(containsString("unit is missing or unrecognized"))); } } public void testToXContent() throws IOException { //verify that only what is set gets printed out through toXContent XContentType xContentType = randomFrom(XContentType.values()); { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); BytesReference source = builder.bytes(); Map<String, Object> sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); assertEquals(0, sourceAsMap.size()); } { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(RandomQueryBuilder.createQuery(random())); XContentBuilder builder = XContentFactory.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); BytesReference source = builder.bytes(); Map<String, Object> sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); assertEquals(1, sourceAsMap.size()); assertEquals("query", sourceAsMap.keySet().iterator().next()); } } public void testParseIndicesBoost() throws IOException { { String restContent = " { \"indices_boost\": {\"foo\": 1.0, \"bar\": 2.0}}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(2, searchSourceBuilder.indexBoosts().size()); assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0)); assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1)); assertWarnings("Object format in indices_boost is deprecated, please use array format instead"); } } { String restContent = "{" + " \"indices_boost\" : [\n" + " { \"foo\" : 1.0 },\n" + " { \"bar\" : 2.0 },\n" + " { \"baz\" : 3.0 }\n" + " ]}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser)); assertEquals(3, searchSourceBuilder.indexBoosts().size()); assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0)); assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1)); assertEquals(new SearchSourceBuilder.IndexBoost("baz", 3.0f), searchSourceBuilder.indexBoosts().get(2)); } } { String restContent = "{" + " \"indices_boost\" : [\n" + " { \"foo\" : 1.0, \"bar\": 2.0}\n" + // invalid format " ]}"; assertIndicesBoostParseErrorMessage(restContent, "Expected [END_OBJECT] in [indices_boost] but found [FIELD_NAME]"); } { String restContent = "{" + " \"indices_boost\" : [\n" + " {}\n" + // invalid format " ]}"; assertIndicesBoostParseErrorMessage(restContent, "Expected [FIELD_NAME] in [indices_boost] but found [END_OBJECT]"); } { String restContent = "{" + " \"indices_boost\" : [\n" + " { \"foo\" : \"bar\"}\n" + // invalid format " ]}"; assertIndicesBoostParseErrorMessage(restContent, "Expected [VALUE_NUMBER] in [indices_boost] but found [VALUE_STRING]"); } { String restContent = "{" + " \"indices_boost\" : [\n" + " { \"foo\" : {\"bar\": 1}}\n" + // invalid format " ]}"; assertIndicesBoostParseErrorMessage(restContent, "Expected [VALUE_NUMBER] in [indices_boost] but found [START_OBJECT]"); } } public void testNegativeFromErrors() { IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> new SearchSourceBuilder().from(-2)); assertEquals("[from] parameter cannot be negative", expected.getMessage()); } private void assertIndicesBoostParseErrorMessage(String restContent, String expectedErrorMessage) throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser))); assertEquals(expectedErrorMessage, e.getMessage()); } } }
/** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * <p/> * Redistribution and use in srccode and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of srccode code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the className of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * <p/> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package act.asm; /** * A dynamically extensible vector of bytes. This class is roughly equivalent to * a DataOutputStream on top of a ByteArrayOutputStream, but is more efficient. * * @author Eric Bruneton */ public class ByteVector { /** * The content of this vector. */ byte[] data; /** * Actual number of bytes in this vector. */ int length; /** * Constructs a new {@link ByteVector ByteVector} with a default initial * size. */ public ByteVector() { data = new byte[64]; } /** * Constructs a new {@link ByteVector ByteVector} with the given initial * size. * * @param initialSize * the initial size of the byte vector to be constructed. */ public ByteVector(final int initialSize) { data = new byte[initialSize]; } /** * Puts a byte into this byte vector. The byte vector is automatically * enlarged if necessary. * * @param b * a byte. * @return this byte vector. */ public ByteVector putByte(final int b) { int length = this.length; if (length + 1 > data.length) { enlarge(1); } data[length++] = (byte) b; this.length = length; return this; } /** * Puts two bytes into this byte vector. The byte vector is automatically * enlarged if necessary. * * @param b1 * a byte. * @param b2 * another byte. * @return this byte vector. */ ByteVector put11(final int b1, final int b2) { int length = this.length; if (length + 2 > data.length) { enlarge(2); } byte[] data = this.data; data[length++] = (byte) b1; data[length++] = (byte) b2; this.length = length; return this; } /** * Puts a short into this byte vector. The byte vector is automatically * enlarged if necessary. * * @param s * a short. * @return this byte vector. */ public ByteVector putShort(final int s) { int length = this.length; if (length + 2 > data.length) { enlarge(2); } byte[] data = this.data; data[length++] = (byte) (s >>> 8); data[length++] = (byte) s; this.length = length; return this; } /** * Puts a byte and a short into this byte vector. The byte vector is * automatically enlarged if necessary. * * @param b * a byte. * @param s * a short. * @return this byte vector. */ ByteVector put12(final int b, final int s) { int length = this.length; if (length + 3 > data.length) { enlarge(3); } byte[] data = this.data; data[length++] = (byte) b; data[length++] = (byte) (s >>> 8); data[length++] = (byte) s; this.length = length; return this; } /** * Puts an int into this byte vector. The byte vector is automatically * enlarged if necessary. * * @param i * an int. * @return this byte vector. */ public ByteVector putInt(final int i) { int length = this.length; if (length + 4 > data.length) { enlarge(4); } byte[] data = this.data; data[length++] = (byte) (i >>> 24); data[length++] = (byte) (i >>> 16); data[length++] = (byte) (i >>> 8); data[length++] = (byte) i; this.length = length; return this; } /** * Puts a long into this byte vector. The byte vector is automatically * enlarged if necessary. * * @param l * a long. * @return this byte vector. */ public ByteVector putLong(final long l) { int length = this.length; if (length + 8 > data.length) { enlarge(8); } byte[] data = this.data; int i = (int) (l >>> 32); data[length++] = (byte) (i >>> 24); data[length++] = (byte) (i >>> 16); data[length++] = (byte) (i >>> 8); data[length++] = (byte) i; i = (int) l; data[length++] = (byte) (i >>> 24); data[length++] = (byte) (i >>> 16); data[length++] = (byte) (i >>> 8); data[length++] = (byte) i; this.length = length; return this; } /** * Puts an UTF8 string into this byte vector. The byte vector is * automatically enlarged if necessary. * * @param s * a String whose UTF8 encoded length must be less than 65536. * @return this byte vector. */ public ByteVector putUTF8(final String s) { int charLength = s.length(); if (charLength > 65535) { throw new IllegalArgumentException(); } int len = length; if (len + 2 + charLength > data.length) { enlarge(2 + charLength); } byte[] data = this.data; // optimistic algorithm: instead of computing the byte length and then // serializing the string (which requires two loops), we assume the byte // length is equal to char length (which is the most frequent case), and // we start serializing the string right away. During the serialization, // if we find that this assumption is wrong, we continue with the // general method. data[len++] = (byte) (charLength >>> 8); data[len++] = (byte) charLength; for (int i = 0; i < charLength; ++i) { char c = s.charAt(i); if (c >= '\001' && c <= '\177') { data[len++] = (byte) c; } else { length = len; return encodeUTF8(s, i, 65535); } } length = len; return this; } /** * Puts an UTF8 string into this byte vector. The byte vector is * automatically enlarged if necessary. The string length is encoded in two * bytes before the encoded characters, if there is space for that (i.e. if * this.length - i - 2 >= 0). * * @param s * the String to encode. * @param i * the index of the first character to encode. The previous * characters are supposed to have already been encoded, using * only one byte per character. * @param maxByteLength * the maximum byte length of the encoded string, including the * already encoded characters. * @return this byte vector. */ ByteVector encodeUTF8(final String s, int i, int maxByteLength) { int charLength = s.length(); int byteLength = i; char c; for (int j = i; j < charLength; ++j) { c = s.charAt(j); if (c >= '\001' && c <= '\177') { byteLength++; } else if (c > '\u07FF') { byteLength += 3; } else { byteLength += 2; } } if (byteLength > maxByteLength) { throw new IllegalArgumentException(); } int start = length - i - 2; if (start >= 0) { data[start] = (byte) (byteLength >>> 8); data[start + 1] = (byte) byteLength; } if (length + byteLength - i > data.length) { enlarge(byteLength - i); } int len = length; for (int j = i; j < charLength; ++j) { c = s.charAt(j); if (c >= '\001' && c <= '\177') { data[len++] = (byte) c; } else if (c > '\u07FF') { data[len++] = (byte) (0xE0 | c >> 12 & 0xF); data[len++] = (byte) (0x80 | c >> 6 & 0x3F); data[len++] = (byte) (0x80 | c & 0x3F); } else { data[len++] = (byte) (0xC0 | c >> 6 & 0x1F); data[len++] = (byte) (0x80 | c & 0x3F); } } length = len; return this; } /** * Puts an array of bytes into this byte vector. The byte vector is * automatically enlarged if necessary. * * @param b * an array of bytes. May be <tt>null</tt> to put <tt>len</tt> * null bytes into this byte vector. * @param off * index of the fist byte of b that must be copied. * @param len * number of bytes of b that must be copied. * @return this byte vector. */ public ByteVector putByteArray(final byte[] b, final int off, final int len) { if (length + len > data.length) { enlarge(len); } if (b != null) { System.arraycopy(b, off, data, length, len); } length += len; return this; } /** * Enlarge this byte vector so that it can receive n more bytes. * * @param size * number of additional bytes that this byte vector should be * able to receive. */ private void enlarge(final int size) { int length1 = 2 * data.length; int length2 = length + size; byte[] newData = new byte[length1 > length2 ? length1 : length2]; System.arraycopy(data, 0, newData, 0, length); data = newData; } }
package dorfgen.worldgen; import static dorfgen.WorldGenerator.scale; import static net.minecraftforge.common.ChestGenHooks.DUNGEON_CHEST; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import dorfgen.WorldGenerator; import dorfgen.conversion.DorfMap; import dorfgen.conversion.SiteTerrain; import dorfgen.conversion.DorfMap.Site; import dorfgen.conversion.DorfMap.SiteType; import dorfgen.conversion.SiteStructureGenerator.SiteStructures; import net.minecraft.init.Blocks; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntityChest; import net.minecraft.tileentity.TileEntityMobSpawner; import net.minecraft.util.WeightedRandomChestContent; import net.minecraft.world.World; import net.minecraft.world.biome.BiomeGenBase; import net.minecraft.world.gen.feature.WorldGenDungeons; import net.minecraft.world.gen.structure.ComponentScatteredFeaturePieces; import net.minecraft.world.gen.structure.MapGenScatteredFeature; import net.minecraft.world.gen.structure.MapGenStronghold; import net.minecraft.world.gen.structure.MapGenVillage; import net.minecraft.world.gen.structure.StructureComponent; import net.minecraft.world.gen.structure.StructureStart; import net.minecraft.world.gen.structure.StructureStrongholdPieces; import net.minecraft.world.gen.structure.StructureVillagePieces; import net.minecraftforge.common.ChestGenHooks; import net.minecraftforge.common.DungeonHooks; public class MapGenSites extends MapGenVillage { HashSet<Integer> set = new HashSet(); HashSet<Integer> made = new HashSet(); Site siteToGen = null; public MapGenSites() { super(); } public MapGenSites(Map p_i2093_1_) { super(p_i2093_1_); } @Override protected boolean canSpawnStructureAtCoords(int x, int z) { int chunkX = x, chunkZ = z; x *= 16; z *= 16; x -= WorldGenerator.shift.getX(); z -= WorldGenerator.shift.getZ(); DorfMap dorfs = WorldGenerator.instance.dorfs; HashSet<Site> sites = dorfs.getSiteForCoords(x, z); if(sites==null) return false; for(Site site: sites) { if(shouldSiteSpawn(x, z, site) && !set.contains(site.id)) { set.add(site.id); siteToGen = site; return true; } } return false; } private boolean shouldSiteSpawn(int x, int z, Site site) { int[][] coords = site.corners; if(site.type == SiteType.LAIR) { int embarkX = (x/scale)*scale; int embarkZ = (z/scale)*scale; if(embarkX/scale != site.x || embarkZ/scale != site.z) return false; int relX = x%scale; int relZ = z%scale; boolean middle = relX/16 == scale/32 && relZ/16 == scale/32; return middle; } return false; } @Override protected StructureStart getStructureStart(int x, int z) { Site site = siteToGen; siteToGen = null; if(site==null) { return super.getStructureStart(x, z); } made.add(site.id); if(site.type == SiteType.FORTRESS) { MapGenStronghold.Start start; for (start = new MapGenStronghold.Start(this.worldObj, this.rand, x, z); start.getComponents().isEmpty() || ((StructureStrongholdPieces.Stairs2)start.getComponents().get(0)).strongholdPortalRoom == null; start = new MapGenStronghold.Start(this.worldObj, this.rand, x, z)) { ; } return start; } else if(site.type == SiteType.DARKFORTRESS) { } else if(site.type == SiteType.DARKPITS) { } else if(site.type == SiteType.HIPPYHUTS) { return new Start(worldObj, rand, x, z, 0); } else if(site.type == SiteType.SHRINE) { return new Start(worldObj, rand, x, z, 2); } else if(site.type == SiteType.LAIR) { return new Start(worldObj, rand, x, z, 3); } else if(site.type == SiteType.CAVE) { return new Start(worldObj, rand, x, z, 1); } return super.getStructureStart(x, z); } public static class Start extends StructureStart { public Start() {} public Start(World world_, Random rand, int x, int z, int type) { super(x, z); if (type==0) { for(int k = 0; k<15; k++) { int x1 = 40 - rand.nextInt(40); int z1 = 40 - rand.nextInt(40); for(int i = 0; i<rand.nextInt(20); i++) { ComponentScatteredFeaturePieces.SwampHut swamphut = new ComponentScatteredFeaturePieces.SwampHut(rand, x * 16 + x1, z * 16 + z1); this.components.add(swamphut); } } } else if(type==1) { ComponentScatteredFeaturePieces.DesertPyramid desertpyramid = new ComponentScatteredFeaturePieces.DesertPyramid(rand, x * 16, z * 16); this.components.add(desertpyramid); } else if(type==2) { ComponentScatteredFeaturePieces.JunglePyramid junglepyramid = new ComponentScatteredFeaturePieces.JunglePyramid(rand, x * 16, z * 16); this.components.add(junglepyramid); } else if(type==3) { //TODO re-copy dungeon code from 1.8 again for this // int h = 0; // int[][] map = WorldGenerator.instance.dorfs.elevationMap; // int x1 = x * 16 - WorldGenerator.shift.getX(); // int z1 = z * 16 - WorldGenerator.shift.getZ(); // // if(x1>0&&z1>0&&x1/scale < map.length && z1/scale < map[0].length) // { // h = map[x1/scale][z1/scale]; // } // else // { // h = world_.getHeightValue(x*16, z*16); // } // x1 = x*16 + rand.nextInt(8); // z1 = z*16 + rand.nextInt(8); // for(int i = -4; i<=4; i++) // for(int j = -4; j<=4; j++) // for(int k = -4; k<=4; k++) // { // world_.setBlock(x1 + i, h+j, z1+k, Blocks.cobblestone, 0, 2); // } // // byte b0 = 3; // int l = rand.nextInt(2) + 2; // int i1 = rand.nextInt(2) + 2; // int j1 = 3; // int k1; // int l1; // int i2; // // if (j1 >= 1 && j1 <= 5) // { // for (k1 = x1 - l - 1; k1 <= x1 + l + 1; ++k1) // { // for (l1 = h + b0; l1 >= h - 1; --l1) // { // for (i2 = z1 - i1 - 1; i2 <= z1 + i1 + 1; ++i2) // { // if (k1 != x1 - l - 1 && l1 != h - 1 && i2 != z1 - i1 - 1 && k1 != x1 + l + 1 && l1 != h + b0 + 1 && i2 != z1 + i1 + 1) // { // world_.setBlockToAir(k1, l1, i2); // } // else if (l1 >= 0 && !world_.getBlock(k1, l1 - 1, i2).getMaterial().isSolid()) // { // world_.setBlockToAir(k1, l1, i2); // } // else if (world_.getBlock(k1, l1, i2).getMaterial().isSolid()) // { // if (l1 == h - 1 && rand.nextInt(4) != 0) // { // world_.setBlock(k1, l1, i2, Blocks.mossy_cobblestone, 0, 2); // } // else // { // world_.setBlock(k1, l1, i2, Blocks.cobblestone, 0, 2); // } // } // } // } // } // // k1 = 0; // // while (k1 < 2) // { // l1 = 0; // // while (true) // { // if (l1 < 3) // { // label197: // { // i2 = x1 + rand.nextInt(l * 2 + 1) - l; // int j2 = z1 + rand.nextInt(i1 * 2 + 1) - i1; // // if (world_.isAirBlock(i2, h, j2)) // { // int k2 = 0; // // if (world_.getBlock(i2 - 1, h, j2).getMaterial().isSolid()) // { // ++k2; // } // // if (world_.getBlock(i2 + 1, h, j2).getMaterial().isSolid()) // { // ++k2; // } // // if (world_.getBlock(i2, h, j2 - 1).getMaterial().isSolid()) // { // ++k2; // } // // if (world_.getBlock(i2, h, j2 + 1).getMaterial().isSolid()) // { // ++k2; // } // // if (k2 == 1) // { // world_.setBlock(i2, h, j2, Blocks.chest, 0, 2); // TileEntityChest tileentitychest = (TileEntityChest)world_.getTileEntity(i2, h, j2); // // if (tileentitychest != null) // { // WeightedRandomChestContent.generateChestContents(rand, ChestGenHooks.getItems(DUNGEON_CHEST, rand), tileentitychest, ChestGenHooks.getCount(DUNGEON_CHEST, rand)); // } // // break label197; // } // } // // ++l1; // continue; // } // } // // ++k1; // break; // } // } // // world_.setBlock(x1, h, z1, Blocks.mob_spawner, 0, 2); // TileEntityMobSpawner tileentitymobspawner = (TileEntityMobSpawner)world_.getTileEntity(x1, h, z1); // // if (tileentitymobspawner != null) // { // tileentitymobspawner.func_145881_a().setEntityName(DungeonHooks.getRandomDungeonMob(rand)); // } // else // { // System.err.println("Failed to fetch mob spawner entity at (" + x1 + ", " + h + ", " + z1 + ")"); // } // } } this.updateBoundingBox(); } } }
package js.tinyvm; import java.util.HashMap; import java.util.ArrayList; import java.util.HashSet; import js.common.ToolProgressMonitor; import js.tinyvm.io.IByteWriter; import js.tinyvm.util.HashVector; import java.io.FileOutputStream; import java.io.ObjectOutputStream; import java.io.IOException; import java.io.OutputStream; import org.apache.bcel.Constants; import org.apache.bcel.generic.Type; /** * Abstraction for dumped binary. */ public class Binary { // State that is written to the binary: final RecordTable<WritableData> iEntireBinary = new RecordTable<WritableData>("binary", true, true); final RecordTable<WritableData> iStaticStorage = new RecordTable<WritableData>("binary", true, true); // Contents of binary: final MasterRecord iMasterRecord = new MasterRecord(this); RecordTable<ClassRecord> iClassTable = new RecordTable<ClassRecord>("class table", false, true); RecordTable<StaticValue> iStaticState = new RecordTable<StaticValue>("static state", true, true); RecordTable<StaticFieldRecord> iStaticFields = new RecordTable<StaticFieldRecord>("static fields", true, true); RecordTable<ConstantRecord> iConstantTable = new RecordTable<ConstantRecord>("constants", false, true); RecordTable<RecordTable<MethodRecord>> iMethodTables = new RecordTable<RecordTable<MethodRecord>>("methods", true, true); RecordTable<RecordTable<ExceptionRecord>> iExceptionTables = new RecordTable<RecordTable<ExceptionRecord>>("exceptions", false, true); RecordTable<RecordTable<InstanceFieldRecord>> iInstanceFieldTables = new RecordTable<RecordTable<InstanceFieldRecord>>("instance fields", true, true); final RecordTable<CodeSequence> iCodeSequences = new RecordTable<CodeSequence>("code", true, true); RecordTable<ConstantValue> iConstantValues = new RecordTable<ConstantValue>("constant values", true, true); final RecordTable<EntryClassIndex> iEntryClassIndices = new RecordTable<EntryClassIndex>( "entry class indices", true, true); final RecordTable<InterfaceMap> iInterfaceMaps = new RecordTable<InterfaceMap>("interface", true, true); // Other state: final HashSet<Signature> iSpecialSignatures = new HashSet<Signature>(); final HashMap<String, ClassRecord> iClasses = new HashMap<String, ClassRecord>(); final HashVector<Signature> iSignatures = new HashVector<Signature>(); final DebugData debugData = new DebugData(); int usedClassCount = 0; int markGeneration = 0; boolean useAll = false; // Optimal order for storing constants/statics etc. Note we store 4 byte // items first to maximize the chance of using optimized load/store operations // on them. final int[] alignments = {4, 8, 2, 1}; int constOpLoads = 0; int constNormLoads = 0; int constWideLoads = 0; int constString = 0; int staticOpLoads = 0; int staticNormLoads = 0; int fieldOpOp = 0; int fieldNormOp = 0; int interfaceClasses = 0; int usedInterfaceClasses = 0; int implementedInterfaces = 0; int usedImplementedInterfaces = 0; /** * Constructor. * @param useAll true if all classes/methods etc. should be included */ public Binary (boolean useAll) { this.useAll = useAll; } /** * Dump. * * @param writer * @throws TinyVMException */ public void dump (IByteWriter writer) throws TinyVMException { iEntireBinary.dump(writer); } /** * Dump debug data. * * @param fos FileOutputStream * @throws TinyVMException */ public void dumpDebug (OutputStream fos) throws IOException { DebugData.save(this.debugData, fos); } // // TODO public interface // // // TODO protected interface // // // classes // /** * Add a class. * * @param className class name with '/' * @param classRecord */ protected void addClassRecord (String className, ClassRecord classRecord) { assert className != null: "Precondition: className != null"; assert classRecord != null: "Precondition: classRecord != null"; assert className.indexOf('.') == -1: "Precondition: className is in correct form"; iClasses.put(className, classRecord); iClassTable.add(classRecord); } /** * Has class in binary a public static void main (String[] args) method? * * @param className class name with '/' * @return */ public boolean hasMain (String className) { assert className != null: "Precondition: className != null"; assert className.indexOf('.') == -1: "Precondition: className is in correct form"; ClassRecord pRec = getClassRecord(className); return pRec.hasMethod(new Signature("main", "([Ljava/lang/String;)V"), true); } /** * Get class record with given signature. * * @param className class name with '/' * @return class record or null if not found */ public ClassRecord getClassRecord (String className) { assert className != null: "Precondition: className != null"; assert className.indexOf('.') == -1: "Precondition: className is in correct form"; return iClasses.get(className); } /** * Return the class the represents an array of the given type and dimension. * * * @param elementClass * @return class record or null if not found or the array is a primitive array. * @throws TinyVMException */ public ClassRecord getClassRecordForArray (ClassRecord elementClass) throws TinyVMException { int dims = 1; if (elementClass.isArray()) { dims += elementClass.getArrayDimension(); } int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); if (pRec.getArrayDimension() == dims && pRec.getArrayElementClass() == elementClass) return pRec; } // Not found so we will create one... String sig = ""; for(int i = 0; i < dims; i++) sig += "["; sig += elementClass.signature(); return ClassRecord.storeArrayClass(sig, iClasses, iClassTable, null, this); } /** * Get index of class in binary by its signature. * * @param className class name with '/' * @return index of class in binary or -1 if not found */ public int getClassIndex (String className) { assert className != null: "Precondition: className != null"; assert className.indexOf('.') == -1: "Precondition: className is in correct form"; return getClassIndex(getClassRecord(className)); } /** * Get index of class in binary by its class record. * * @param classRecord * @return index of class in binary or -1 if not found */ public int getClassIndex (ClassRecord classRecord) { if (classRecord == null) { return -1; } return iClassTable.indexOf(classRecord); } /** * Mark the given class as actually used. * * @param classRecord the class to be marked * @param instance */ public void markClassUsed(ClassRecord classRecord, boolean instance) { if (instance && !classRecord.instanceUsed()) { classRecord.markInstanceUsed(); usedClassCount++; } if (!classRecord.used()) { classRecord.markUsed(); usedClassCount++; } } /** * Return the current marking generation. This is used to ensure that for * each new iteration (or generation) of the recursive mark we will * walk all of the code at least once. * @return current generation */ public int getGeneration() { return markGeneration; } // // constants // /** * Get constant record with given index. * * @param index * @return constant record or null if not found */ public ConstantRecord getConstantRecord (int index) { assert index >= 0: "Precondition: index >= 0"; return iConstantTable.get(index); } /** * Get index of constant in binary by its constant record. * * @param constantRecord * @return index of constant in binary or -1 if not found */ public int getConstantIndex (ConstantRecord constantRecord) { if (constantRecord == null) { return -1; } return iConstantTable.indexOf(constantRecord); } /** * Return true if unused methods/classes etc. should still be included in * the output file. * @return */ public boolean useAll() { return useAll; } // // processing // /** * Create closure. * * @param entryClassNames names of entry class with '/' * @param classPath class path * @param all do not filter classes? * @return * @throws TinyVMException */ public static Binary createFromClosureOf (String[] entryClassNames, ClassPath classPath, boolean all) throws TinyVMException { Binary result = new Binary(all); // From special classes and entry class, store closure result.processClasses(entryClassNames, classPath); // Store special signatures result.processSpecialSignatures(); result.processConstants(); result.processMethods(); result.processFields(); // Remove unused methods/classes/fields/constants. result.markUsed(entryClassNames); result.processOptimizedClasses(); result.processOptimizedConstants(); result.processOptimizedMethods(); result.processOptimizedFields(); // Copy code as is (first pass) result.processCode(false); result.storeComponents(); result.initOffsets(); // Post-process code after offsets are set (second pass) result.processCode(true); result.debugData.create(result); assert result != null: "Postconditon: result != null"; return result; } public void processClasses (String[] entryClassNames, ClassPath classPath) throws TinyVMException { assert entryClassNames != null: "Precondition: entryClassNames != null"; assert classPath != null: "Precondition: classPath != null"; ArrayList<String> pInterfaceMethods = new ArrayList<String>(); // Add special all classes first String[] specialClasses = SpecialConstants.CLASSES; //_logger.log(Level.INFO, "Starting with " + specialClasses.length // + " special classes."); for (int i = 0; i < specialClasses.length; i++) { String className = specialClasses[i]; if (className.charAt(0) == '[') ClassRecord.storeArrayClass(className, iClasses, iClassTable, classPath, this); else if (className.indexOf('/') != -1) addClassRecord(className, ClassRecord.getClassRecord(className, classPath, this)); else addClassRecord(className, PrimitiveClassRecord.getClassRecord(className, this, (byte)i)); } // Now add entry classes // _logger.log(Level.INFO, "Starting with " + entryClassNames.length // + " entry classes."); for (int i = 0; i < entryClassNames.length; i++) { String className = entryClassNames[i]; ClassRecord classRecord = ClassRecord.getClassRecord(className, classPath, this); // Convert name into standard form. className = classRecord.getName().replace('.', '/'); classRecord = ClassRecord.getClassRecord(className, classPath, this); entryClassNames[i] = className; addClassRecord(className, classRecord); classRecord.useAllMethods(); // Update table of indices to entry classes iEntryClassIndices.add(new EntryClassIndex(this, className)); } // Now add the closure. // _logger.log(Level.INFO, "Starting with " + iClassTable.size() // + " classes."); // Yes, call iClassTable.size() in every pass of the loop. for (int pIndex = 0; pIndex < iClassTable.size(); pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); classRecord.storeReferredClasses(iClasses, iClassTable, classPath, pInterfaceMethods); } // Initialize indices and flags int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); for (int i = 0; i < pInterfaceMethods.size(); i++) { classRecord.addUsedMethod(pInterfaceMethods.get(i)); } classRecord.iIndex = pIndex; classRecord.initFlags(); classRecord.initParent(); } } /** * Optimize the number and order of classes. * We make a pass of all of the classes and select only those that are * actually used. We also optimize the order of the classes in an attempt * to minimize the size of the interface maps. * @throws TinyVMException */ public void processOptimizedClasses () throws TinyVMException { RecordTable<ClassRecord> iNewClassTable = new RecordTable<ClassRecord>("class table", false, true); int pSize = iClassTable.size(); // First copy over the special classes for (int pIndex = 0; pIndex < SpecialConstants.CLASSES.length; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); iNewClassTable.add(classRecord); } // Now we add in any classes that have interfaces. This keeps them all // together and keeps the interface map small. Note duplicates are // not allowed so we can add the same entry multiple times. for (int pIndex = SpecialConstants.CLASSES.length; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); if (classRecord.isInterface() && (useAll() || classRecord.used())) classRecord.storeOptimizedImplementingClasses(iNewClassTable); } // now add in the rest of the used classes for (int pIndex = SpecialConstants.CLASSES.length; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); if (useAll() || classRecord.used()) iNewClassTable.add(classRecord); } iClassTable = iNewClassTable; pSize = iClassTable.size(); // Now we selected all of the classes we can fix up any linkages (which // may use the class index) and create the interface maps. for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); classRecord.initParent(); if (classRecord.isInterface()) classRecord.storeInterfaceMap(iInterfaceMaps); } } public void markUsed (String[] entryClassNames) throws TinyVMException { /* First stage of unused method/class/field elimination. * Starting with the callable root methods we need to mark all callable * methods. We recursively walk the code for each method marking and * walking new methods as we come to them. * As we walk the code we also mark all used classes and fields. * We need to take particular care with interfaces and with * over-ridden methods to ensure that all possible destinations are * included. */ /* For interfaces we need to ensure that for every method in an interface * that ends up being marked we locate all possible implementations * of that method. To do that we search all classes for those that * implement an interface and associate those classes with the interface. * Then when we mark a method in the interface we can mark all possible * implementations. * * We also need to handle marking methods that may be over-ridden by a * method in a sub class. We locate all such methods and link them to the * "super-method" if this gets marked we also mark the sub-methods. */ int pSize = iClassTable.size(); // Mark special classes as being used (they may be generated by the vm String[] specialClasses = SpecialConstants.CLASSES; for (int i = 0; i < specialClasses.length; i++) { String className = specialClasses[i]; ClassRecord classRecord = getClassRecord(className); classRecord.markUsed(); classRecord.markInstanceUsed(); } // Add the run method that is called directly from the vm Signature staticInit = new Signature(Constants.STATIC_INITIALIZER_NAME, "()V"); Signature runMethod = new Signature("run", "()V"); Signature mainMethod = new Signature("main", "([Ljava/lang/String;)V"); // Now add entry classes for (int i = 0; i < entryClassNames.length; i++) { ClassRecord classRecord = getClassRecord(entryClassNames[i]); classRecord.markUsed(); classRecord.markInstanceUsed(); } // We now add in the static initializers of all marked classes. // We also add in the special entry points that may be called // directly from the VM. // Note: The set of used classes may increase as a result of marking. // in which case we do the whole thing over again. int classCount; do { classCount = usedClassCount; markGeneration++; // First make sure all interfaces implementors and hidden // methods are exposed for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); if (classRecord.used()) { if (classRecord.instanceUsed()) { classRecord.addInterfaces(classRecord); classRecord.findHiddenMethods(); } } } // Now recursively mark any classes that can be called directly by // the VM for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); if (classRecord.used()) { if (classRecord.hasMethod(runMethod, false)) { MethodRecord pRec = classRecord.getMethodRecord(runMethod); classRecord.markMethod(pRec, true); } if (classRecord.hasStaticInitializer()) { MethodRecord pRec = classRecord.getMethodRecord(staticInit); classRecord.markMethod(pRec, true); } if (useAll) classRecord.markMethods(); } } // Finally mark starting from all of the entry classes for (int i = 0; i < entryClassNames.length; i++) { ClassRecord classRecord = getClassRecord(entryClassNames[i]); if (classRecord.hasMethod(mainMethod, true)) { MethodRecord pRec = classRecord.getMethodRecord(mainMethod); classRecord.markMethod(pRec, true); } } } while (classCount != usedClassCount); } public void processSpecialSignatures () { for (int i = 0; i < SpecialConstants.SIGNATURES.length; i++) { Signature pSig = new Signature(SpecialConstants.SIGNATURES[i]); iSignatures.addElement(pSig); iSpecialSignatures.add(pSig); } } public boolean isSpecialSignature (Signature aSig) { return iSpecialSignatures.contains(aSig); } public void processConstants () throws TinyVMException { int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); pRec.storeConstants(iConstantTable, iConstantValues); } } /** * Store constant values in an optimal fashion. * We only include constants that are actually used. We also arrange to store * constant values correctly aligned so that they can be accessed directly * by the VM. We order the constants to allow fast access to the commanly used * int/float types. * @throws TinyVMException */ public void processOptimizedConstants () throws TinyVMException { int pSize = iConstantTable.size(); RecordTable<ConstantRecord> iOptConstantTable = new RecordTable<ConstantRecord>("constants", false, true); RecordTable<ConstantValue> iOptConstantValues = new RecordTable<ConstantValue>("constant values", true, true); for(int align : alignments) { for (int pIndex = 0; pIndex < pSize; pIndex++) { ConstantRecord pRec = iConstantTable.get(pIndex); if (pRec.constantValue().getAlignment() == align && (useAll() || pRec.used())) { iOptConstantTable.add(pRec); iOptConstantValues.add(pRec.constantValue()); } } } iConstantTable = iOptConstantTable; iConstantValues = iOptConstantValues; } /** * Calls storeMethods on all the classes of the closure previously computed * with processClasses. * * @throws TinyVMException */ public void processMethods () throws TinyVMException { int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); classRecord.storeMethods(iMethodTables, iExceptionTables, iSignatures, useAll()); } } public void processOptimizedMethods () throws TinyVMException { /* This is the second stage of the unused methods elimination code. * We need to re-create the method and exception tables so that they * only contain methods that are actually called. */ int pSize = iClassTable.size(); // We need an optimized version of the method and exception tables // so create new ones and repopulate. iMethodTables = new RecordTable<RecordTable<MethodRecord>>("methods", true, true); iExceptionTables = new RecordTable<RecordTable<ExceptionRecord>>("exceptions", false, true); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord classRecord = iClassTable.get(pIndex); classRecord.storeOptimizedMethods(iMethodTables, iExceptionTables, iSignatures); } } public void processFields () throws TinyVMException { int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); pRec.storeFields(iInstanceFieldTables, iStaticFields, iStaticState); } } public void printInterfaces () throws TinyVMException { int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); if (pRec.isInterface() && pRec.used()) { System.out.println("Interface: " + pRec.iName + " is implemented by:"); for(ClassRecord cr : pRec.iImplementedBy) { System.out.println("Active class: " + cr.iName + " id " + this.getClassIndex(cr)); } } } } public void processOptimizedFields () throws TinyVMException { int pSize = iClassTable.size(); // We need an optimized version of the static tables // so create new ones and repopulate. iStaticState = new RecordTable<StaticValue>("static state", true, true); iStaticFields = new RecordTable<StaticFieldRecord>("static fields", true, true); iInstanceFieldTables = new RecordTable<RecordTable<InstanceFieldRecord>>("instance fields", true, true); for(int align : alignments) { for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); pRec.storeOptimizedStaticFields(iStaticFields, iStaticState, align); } } for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); pRec.storeOptimizedFields(iInstanceFieldTables); } } public void processCode (boolean aPostProcess) throws TinyVMException { int pSize = iClassTable.size(); for (int pIndex = 0; pIndex < pSize; pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); pRec.storeCode(iCodeSequences, aPostProcess); } } // // storing // public void storeComponents () { // Master record and class table are always the first two, all // tables are aligned on 4 byte boundaries. iEntireBinary.add(iMasterRecord); iEntireBinary.add(iClassTable); // We do not need to store the static fields, just calculate layout iStaticStorage.add(iStaticState); iEntireBinary.add(iStaticFields); iEntireBinary.add(iConstantTable); iEntireBinary.add(iMethodTables); iEntireBinary.add(iExceptionTables); iEntireBinary.add(iInstanceFieldTables); iEntireBinary.add(iConstantValues); iEntireBinary.add(iInterfaceMaps); iEntireBinary.add(iEntryClassIndices); iEntireBinary.add(iCodeSequences); } public void initOffsets () throws TinyVMException { iEntireBinary.initOffset(0); iStaticStorage.initOffset(0); } public void setRunTimeOptions(int opt) { iMasterRecord.setRunTimeOptions(opt); } public int getTotalNumMethods () { int pTotal = 0; int pSize = iMethodTables.size(); for (int i = 0; i < pSize; i++) { pTotal += iMethodTables.get(i).size(); } return pTotal; } public int getTotalNumInstanceFields () { int pTotal = 0; int pSize = iInstanceFieldTables.size(); for (int i = 0; i < pSize; i++) { pTotal += iInstanceFieldTables.get(i).size(); } return pTotal; } public int getTotalNumExceptionRecords() { int pTotal = 0; int pSize = iExceptionTables.size(); for (int i = 0; i < pSize; i++) { pTotal += iExceptionTables.get(i).size(); } return pTotal; } // private static final Logger _logger = Logger.getLogger("TinyVM"); public void log(ToolProgressMonitor monitor) throws TinyVMException { // all classes for (int pIndex = 0; pIndex < iClassTable.size(); pIndex++) { ClassRecord pRec = iClassTable.get(pIndex); monitor.log("Class " + pIndex + ": " + pRec.getCanonicalName()); } int pSize = iMethodTables.size(); int methodNo = 0; for (int i = 0; i < pSize; i++) { RecordTable<MethodRecord> rt = iMethodTables.get(i); int cnt = rt.size(); for(int j = 0; j < cnt; j++) { MethodRecord mr = rt.get(j); // String s = "Method " + methodNo + ": Class: " + mr.iClassRecord.getName() + " Signature: " + // (iSignatures.elementAt(mr.iSignatureId)).getImage(); Signature sig = iSignatures.elementAt(mr.iSignatureId); String s = "Method " + methodNo + ": " + toPrettyString(sig, mr.iClassRecord.getCanonicalName(), mr.iClassRecord.getSimpleName(), true); if ((mr.iFlags & TinyVMConstants.M_NATIVE) == 0) monitor.log(s + " PC " + mr.getCodeStart() + " Signature id " + mr.iSignatureId); else monitor.log(s + " Native id " + mr.iSignatureId); methodNo++; } } monitor.log("Master record : " + iMasterRecord.getLength() + " bytes."); monitor.log("Class records : " + iClassTable.size() + " (" + iClassTable.getLength() + " bytes)."); monitor.log("Field records : " + getTotalNumInstanceFields() + " (" + iInstanceFieldTables.getLength() + " bytes)."); monitor.log("Static fields : " + iStaticFields.size() + " (" + iStaticFields.getLength() + " bytes)."); monitor.log("Static state : " + iStaticState.size() + " (" + iStaticState.getLength() + " bytes)."); monitor.log("Constant records : " + iConstantTable.size() + " (" + iConstantTable.getLength() + " bytes)."); monitor.log("Constant values : " + iConstantValues.size() + " (" + iConstantValues.getLength() + " bytes)."); monitor.log("Method records : " + getTotalNumMethods() + " (" + iMethodTables.getLength() + " bytes)."); monitor.log("Exception records: " + getTotalNumExceptionRecords() + " (" + iExceptionTables.getLength() + " bytes)."); monitor.log("Interface maps : " + iInterfaceMaps.size() + " (" + iInterfaceMaps.getLength() + " bytes)."); monitor.log("Code : " + iCodeSequences.size() + " (" + iCodeSequences.getLength() + " bytes)."); monitor.log("Total : " + iEntireBinary.getLength() + " bytes."); monitor.log("Run time options : " + iMasterRecord.getRunTimeOptions()); monitor.log("Constant loads : " + this.constNormLoads + "N " + this.constOpLoads + "O " + this.constWideLoads + "W " + this.constString + "S"); monitor.log("Static load/store: " + this.staticNormLoads + "N " + this.staticOpLoads + "O"); monitor.log("Field load/store: " + this.fieldNormOp + "N " + this.fieldOpOp + "O"); //printInterfaces(); } private static String toPrettyString(Signature sig, String fullclass, String simpleclass, boolean omitReturn) { String name = sig.getName(); String descriptor = sig.getDescriptor(); boolean omitEmptyArgs; String friendlyName; Type[] args = Type.getArgumentTypes(descriptor); Type rv = Type.getReturnType(descriptor); if (sig.isConstructor()) { // alternative: friendlyName = simpleclass; friendlyName = name; omitEmptyArgs = false; omitReturn |= rv.equals(Type.VOID); } else if (sig.isStaticInitializer()) { // alternative: friendlyName = "static{}" friendlyName = name; omitEmptyArgs = true; omitReturn |= rv.equals(Type.VOID); } else { friendlyName = name; omitEmptyArgs = false; } StringBuilder sb = new StringBuilder(); if (!omitReturn) { sb.append(rv); sb.append(' '); } if (fullclass != null) { sb.append(fullclass); sb.append("."); } sb.append(friendlyName); if (!omitEmptyArgs || args.length > 0) { sb.append('('); for (int j=0; j<args.length; j++) { if (j > 0) sb.append(", "); sb.append(args[j]); } sb.append(')'); } return sb.toString(); } }
/** * Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.product.index; import java.io.Serializable; import java.time.LocalDate; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import org.joda.beans.Bean; import org.joda.beans.BeanDefinition; import org.joda.beans.ImmutableBean; import org.joda.beans.ImmutableDefaults; import org.joda.beans.ImmutablePreBuild; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectFieldsBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.basics.index.IborIndex; import com.opengamma.strata.basics.value.Rounding; import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.product.ResolvedProduct; import com.opengamma.strata.product.SecurityId; import com.opengamma.strata.product.rate.IborRateComputation; /** * A futures contract based on an Ibor index, resolved for pricing. * <p> * This is the resolved form of {@link IborFuture} and is an input to the pricers. * Applications will typically create a {@code ResolvedIborFuture} from a {@code IborFuture} * using {@link IborFuture#resolve(ReferenceData)}. * <p> * A {@code ResolvedIborFuture} is bound to data that changes over time, such as holiday calendars. * If the data changes, such as the addition of a new holiday, the resolved form will not be updated. * Care must be taken when placing the resolved form in a cache or persistence layer. * * <h4>Price</h4> * The price of an Ibor future is based on the interest rate of the underlying index. * It is defined as {@code (100 - percentRate)}. * <p> * Strata uses <i>decimal prices</i> for Ibor futures in the trade model, pricers and market data. * The decimal price is based on the decimal rate equivalent to the percentage. * For example, a price of 99.32 implies an interest rate of 0.68% which is represented in Strata by 0.9932. */ @BeanDefinition(constructorScope = "package") public final class ResolvedIborFuture implements ResolvedProduct, ImmutableBean, Serializable { /** * The security identifier. * <p> * This identifier uniquely identifies the security within the system. */ @PropertyDefinition(validate = "notNull") private final SecurityId securityId; /** * The currency that the future is traded in. */ @PropertyDefinition(validate = "notNull") private final Currency currency; /** * The notional amount. * <p> * This is the full notional of the deposit, such as 1 million dollars. * The notional expressed here must be positive. * The currency of the notional is specified by {@code currency}. */ @PropertyDefinition(validate = "ArgChecker.notNegativeOrZero") private final double notional; /** * The accrual factor, defaulted from the index if not set. * <p> * This is the year fraction of the contract, typically 0.25 for a 3 month deposit. * <p> * When building, this will default to the number of months in the index divided by 12 * if not specified. However, if the index is not month-based, no defaulting will occur. */ @PropertyDefinition(validate = "ArgChecker.notNegativeOrZero") private final double accrualFactor; /** * The Ibor rate observation. * <p> * The future is based on this index. * It will be a well known market index such as 'USD-LIBOR-3M'. */ @PropertyDefinition(validate = "notNull") private final IborRateComputation iborRate; /** * The definition of how to round the futures price, defaulted to no rounding. * <p> * The price is represented in decimal form, not percentage form. * As such, the decimal places expressed by the rounding refers to this decimal form. * For example, the common market price of 99.7125 for a 0.2875% rate is * represented as 0.997125 which has 6 decimal places. */ @PropertyDefinition(validate = "notNull") private final Rounding rounding; //------------------------------------------------------------------------- @ImmutableDefaults private static void applyDefaults(Builder builder) { builder.rounding(Rounding.none()); } @ImmutablePreBuild private static void preBuild(Builder builder) { if (builder.iborRate != null) { if (builder.accrualFactor == 0d && builder.iborRate.getIndex().getTenor().isMonthBased()) { builder.accrualFactor(builder.iborRate.getIndex().getTenor().getPeriod().toTotalMonths() / 12d); } if (builder.currency == null) { builder.currency = builder.iborRate.getIndex().getCurrency(); } } } //------------------------------------------------------------------------- /** * Gets the Ibor index that the future is based on. * * @return the Ibor index */ public IborIndex getIndex() { return iborRate.getIndex(); } /** * Gets the last date of trading, which is the same as the fixing date. * <p> * This is typically 2 business days before the IMM date (3rd Wednesday of the month). * By including this method, it allows for the possibility of a future where the fixing date * and last trade date differ. * * @return the last trade date */ public LocalDate getLastTradeDate() { return iborRate.getFixingDate(); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code ResolvedIborFuture}. * @return the meta-bean, not null */ public static ResolvedIborFuture.Meta meta() { return ResolvedIborFuture.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(ResolvedIborFuture.Meta.INSTANCE); } /** * The serialization version id. */ private static final long serialVersionUID = 1L; /** * Returns a builder used to create an instance of the bean. * @return the builder, not null */ public static ResolvedIborFuture.Builder builder() { return new ResolvedIborFuture.Builder(); } /** * Creates an instance. * @param securityId the value of the property, not null * @param currency the value of the property, not null * @param notional the value of the property * @param accrualFactor the value of the property * @param iborRate the value of the property, not null * @param rounding the value of the property, not null */ ResolvedIborFuture( SecurityId securityId, Currency currency, double notional, double accrualFactor, IborRateComputation iborRate, Rounding rounding) { JodaBeanUtils.notNull(securityId, "securityId"); JodaBeanUtils.notNull(currency, "currency"); ArgChecker.notNegativeOrZero(notional, "notional"); ArgChecker.notNegativeOrZero(accrualFactor, "accrualFactor"); JodaBeanUtils.notNull(iborRate, "iborRate"); JodaBeanUtils.notNull(rounding, "rounding"); this.securityId = securityId; this.currency = currency; this.notional = notional; this.accrualFactor = accrualFactor; this.iborRate = iborRate; this.rounding = rounding; } @Override public ResolvedIborFuture.Meta metaBean() { return ResolvedIborFuture.Meta.INSTANCE; } @Override public <R> Property<R> property(String propertyName) { return metaBean().<R>metaProperty(propertyName).createProperty(this); } @Override public Set<String> propertyNames() { return metaBean().metaPropertyMap().keySet(); } //----------------------------------------------------------------------- /** * Gets the security identifier. * <p> * This identifier uniquely identifies the security within the system. * @return the value of the property, not null */ public SecurityId getSecurityId() { return securityId; } //----------------------------------------------------------------------- /** * Gets the currency that the future is traded in. * @return the value of the property, not null */ public Currency getCurrency() { return currency; } //----------------------------------------------------------------------- /** * Gets the notional amount. * <p> * This is the full notional of the deposit, such as 1 million dollars. * The notional expressed here must be positive. * The currency of the notional is specified by {@code currency}. * @return the value of the property */ public double getNotional() { return notional; } //----------------------------------------------------------------------- /** * Gets the accrual factor, defaulted from the index if not set. * <p> * This is the year fraction of the contract, typically 0.25 for a 3 month deposit. * <p> * When building, this will default to the number of months in the index divided by 12 * if not specified. However, if the index is not month-based, no defaulting will occur. * @return the value of the property */ public double getAccrualFactor() { return accrualFactor; } //----------------------------------------------------------------------- /** * Gets the Ibor rate observation. * <p> * The future is based on this index. * It will be a well known market index such as 'USD-LIBOR-3M'. * @return the value of the property, not null */ public IborRateComputation getIborRate() { return iborRate; } //----------------------------------------------------------------------- /** * Gets the definition of how to round the futures price, defaulted to no rounding. * <p> * The price is represented in decimal form, not percentage form. * As such, the decimal places expressed by the rounding refers to this decimal form. * For example, the common market price of 99.7125 for a 0.2875% rate is * represented as 0.997125 which has 6 decimal places. * @return the value of the property, not null */ public Rounding getRounding() { return rounding; } //----------------------------------------------------------------------- /** * Returns a builder that allows this bean to be mutated. * @return the mutable builder, not null */ public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { ResolvedIborFuture other = (ResolvedIborFuture) obj; return JodaBeanUtils.equal(securityId, other.securityId) && JodaBeanUtils.equal(currency, other.currency) && JodaBeanUtils.equal(notional, other.notional) && JodaBeanUtils.equal(accrualFactor, other.accrualFactor) && JodaBeanUtils.equal(iborRate, other.iborRate) && JodaBeanUtils.equal(rounding, other.rounding); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(securityId); hash = hash * 31 + JodaBeanUtils.hashCode(currency); hash = hash * 31 + JodaBeanUtils.hashCode(notional); hash = hash * 31 + JodaBeanUtils.hashCode(accrualFactor); hash = hash * 31 + JodaBeanUtils.hashCode(iborRate); hash = hash * 31 + JodaBeanUtils.hashCode(rounding); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(224); buf.append("ResolvedIborFuture{"); buf.append("securityId").append('=').append(securityId).append(',').append(' '); buf.append("currency").append('=').append(currency).append(',').append(' '); buf.append("notional").append('=').append(notional).append(',').append(' '); buf.append("accrualFactor").append('=').append(accrualFactor).append(',').append(' '); buf.append("iborRate").append('=').append(iborRate).append(',').append(' '); buf.append("rounding").append('=').append(JodaBeanUtils.toString(rounding)); buf.append('}'); return buf.toString(); } //----------------------------------------------------------------------- /** * The meta-bean for {@code ResolvedIborFuture}. */ public static final class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code securityId} property. */ private final MetaProperty<SecurityId> securityId = DirectMetaProperty.ofImmutable( this, "securityId", ResolvedIborFuture.class, SecurityId.class); /** * The meta-property for the {@code currency} property. */ private final MetaProperty<Currency> currency = DirectMetaProperty.ofImmutable( this, "currency", ResolvedIborFuture.class, Currency.class); /** * The meta-property for the {@code notional} property. */ private final MetaProperty<Double> notional = DirectMetaProperty.ofImmutable( this, "notional", ResolvedIborFuture.class, Double.TYPE); /** * The meta-property for the {@code accrualFactor} property. */ private final MetaProperty<Double> accrualFactor = DirectMetaProperty.ofImmutable( this, "accrualFactor", ResolvedIborFuture.class, Double.TYPE); /** * The meta-property for the {@code iborRate} property. */ private final MetaProperty<IborRateComputation> iborRate = DirectMetaProperty.ofImmutable( this, "iborRate", ResolvedIborFuture.class, IborRateComputation.class); /** * The meta-property for the {@code rounding} property. */ private final MetaProperty<Rounding> rounding = DirectMetaProperty.ofImmutable( this, "rounding", ResolvedIborFuture.class, Rounding.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "securityId", "currency", "notional", "accrualFactor", "iborRate", "rounding"); /** * Restricted constructor. */ private Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 1574023291: // securityId return securityId; case 575402001: // currency return currency; case 1585636160: // notional return notional; case -1540322338: // accrualFactor return accrualFactor; case -1621804100: // iborRate return iborRate; case -142444: // rounding return rounding; } return super.metaPropertyGet(propertyName); } @Override public ResolvedIborFuture.Builder builder() { return new ResolvedIborFuture.Builder(); } @Override public Class<? extends ResolvedIborFuture> beanType() { return ResolvedIborFuture.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code securityId} property. * @return the meta-property, not null */ public MetaProperty<SecurityId> securityId() { return securityId; } /** * The meta-property for the {@code currency} property. * @return the meta-property, not null */ public MetaProperty<Currency> currency() { return currency; } /** * The meta-property for the {@code notional} property. * @return the meta-property, not null */ public MetaProperty<Double> notional() { return notional; } /** * The meta-property for the {@code accrualFactor} property. * @return the meta-property, not null */ public MetaProperty<Double> accrualFactor() { return accrualFactor; } /** * The meta-property for the {@code iborRate} property. * @return the meta-property, not null */ public MetaProperty<IborRateComputation> iborRate() { return iborRate; } /** * The meta-property for the {@code rounding} property. * @return the meta-property, not null */ public MetaProperty<Rounding> rounding() { return rounding; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 1574023291: // securityId return ((ResolvedIborFuture) bean).getSecurityId(); case 575402001: // currency return ((ResolvedIborFuture) bean).getCurrency(); case 1585636160: // notional return ((ResolvedIborFuture) bean).getNotional(); case -1540322338: // accrualFactor return ((ResolvedIborFuture) bean).getAccrualFactor(); case -1621804100: // iborRate return ((ResolvedIborFuture) bean).getIborRate(); case -142444: // rounding return ((ResolvedIborFuture) bean).getRounding(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code ResolvedIborFuture}. */ public static final class Builder extends DirectFieldsBeanBuilder<ResolvedIborFuture> { private SecurityId securityId; private Currency currency; private double notional; private double accrualFactor; private IborRateComputation iborRate; private Rounding rounding; /** * Restricted constructor. */ private Builder() { applyDefaults(this); } /** * Restricted copy constructor. * @param beanToCopy the bean to copy from, not null */ private Builder(ResolvedIborFuture beanToCopy) { this.securityId = beanToCopy.getSecurityId(); this.currency = beanToCopy.getCurrency(); this.notional = beanToCopy.getNotional(); this.accrualFactor = beanToCopy.getAccrualFactor(); this.iborRate = beanToCopy.getIborRate(); this.rounding = beanToCopy.getRounding(); } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case 1574023291: // securityId return securityId; case 575402001: // currency return currency; case 1585636160: // notional return notional; case -1540322338: // accrualFactor return accrualFactor; case -1621804100: // iborRate return iborRate; case -142444: // rounding return rounding; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case 1574023291: // securityId this.securityId = (SecurityId) newValue; break; case 575402001: // currency this.currency = (Currency) newValue; break; case 1585636160: // notional this.notional = (Double) newValue; break; case -1540322338: // accrualFactor this.accrualFactor = (Double) newValue; break; case -1621804100: // iborRate this.iborRate = (IborRateComputation) newValue; break; case -142444: // rounding this.rounding = (Rounding) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public Builder set(MetaProperty<?> property, Object value) { super.set(property, value); return this; } @Override public Builder setString(String propertyName, String value) { setString(meta().metaProperty(propertyName), value); return this; } @Override public Builder setString(MetaProperty<?> property, String value) { super.setString(property, value); return this; } @Override public Builder setAll(Map<String, ? extends Object> propertyValueMap) { super.setAll(propertyValueMap); return this; } @Override public ResolvedIborFuture build() { preBuild(this); return new ResolvedIborFuture( securityId, currency, notional, accrualFactor, iborRate, rounding); } //----------------------------------------------------------------------- /** * Sets the security identifier. * <p> * This identifier uniquely identifies the security within the system. * @param securityId the new value, not null * @return this, for chaining, not null */ public Builder securityId(SecurityId securityId) { JodaBeanUtils.notNull(securityId, "securityId"); this.securityId = securityId; return this; } /** * Sets the currency that the future is traded in. * @param currency the new value, not null * @return this, for chaining, not null */ public Builder currency(Currency currency) { JodaBeanUtils.notNull(currency, "currency"); this.currency = currency; return this; } /** * Sets the notional amount. * <p> * This is the full notional of the deposit, such as 1 million dollars. * The notional expressed here must be positive. * The currency of the notional is specified by {@code currency}. * @param notional the new value * @return this, for chaining, not null */ public Builder notional(double notional) { ArgChecker.notNegativeOrZero(notional, "notional"); this.notional = notional; return this; } /** * Sets the accrual factor, defaulted from the index if not set. * <p> * This is the year fraction of the contract, typically 0.25 for a 3 month deposit. * <p> * When building, this will default to the number of months in the index divided by 12 * if not specified. However, if the index is not month-based, no defaulting will occur. * @param accrualFactor the new value * @return this, for chaining, not null */ public Builder accrualFactor(double accrualFactor) { ArgChecker.notNegativeOrZero(accrualFactor, "accrualFactor"); this.accrualFactor = accrualFactor; return this; } /** * Sets the Ibor rate observation. * <p> * The future is based on this index. * It will be a well known market index such as 'USD-LIBOR-3M'. * @param iborRate the new value, not null * @return this, for chaining, not null */ public Builder iborRate(IborRateComputation iborRate) { JodaBeanUtils.notNull(iborRate, "iborRate"); this.iborRate = iborRate; return this; } /** * Sets the definition of how to round the futures price, defaulted to no rounding. * <p> * The price is represented in decimal form, not percentage form. * As such, the decimal places expressed by the rounding refers to this decimal form. * For example, the common market price of 99.7125 for a 0.2875% rate is * represented as 0.997125 which has 6 decimal places. * @param rounding the new value, not null * @return this, for chaining, not null */ public Builder rounding(Rounding rounding) { JodaBeanUtils.notNull(rounding, "rounding"); this.rounding = rounding; return this; } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(224); buf.append("ResolvedIborFuture.Builder{"); buf.append("securityId").append('=').append(JodaBeanUtils.toString(securityId)).append(',').append(' '); buf.append("currency").append('=').append(JodaBeanUtils.toString(currency)).append(',').append(' '); buf.append("notional").append('=').append(JodaBeanUtils.toString(notional)).append(',').append(' '); buf.append("accrualFactor").append('=').append(JodaBeanUtils.toString(accrualFactor)).append(',').append(' '); buf.append("iborRate").append('=').append(JodaBeanUtils.toString(iborRate)).append(',').append(' '); buf.append("rounding").append('=').append(JodaBeanUtils.toString(rounding)); buf.append('}'); return buf.toString(); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
// Start of user code Copyright /******************************************************************************* * Copyright (c) 2012 IBM Corporation and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v. 1.0 which accompanies this distribution. * * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * * Russell Boykin - initial API and implementation * Alberto Giammaria - initial API and implementation * Chris Peters - initial API and implementation * Gianluca Bernardini - initial API and implementation * Sam Padgett - initial API and implementation * Michael Fiedler - adapted for OSLC4J * Jad El-khoury - initial implementation of code generator (422448) * Matthieu Helleboid - Support for multiple Service Providers. * Anass Radouani - Support for multiple Service Providers. * * This file is generated by org.eclipse.lyo.oslc4j.codegenerator *******************************************************************************/ // End of user code package eu.scott.warehouse.domains.pddl; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.text.SimpleDateFormat; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.TreeSet; import java.util.Iterator; import org.eclipse.lyo.oslc4j.core.OSLC4JUtils; import org.eclipse.lyo.oslc4j.core.exception.OslcCoreApplicationException; import org.eclipse.lyo.oslc4j.core.annotation.OslcAllowedValue; import org.eclipse.lyo.oslc4j.core.annotation.OslcDescription; import org.eclipse.lyo.oslc4j.core.annotation.OslcMemberProperty; import org.eclipse.lyo.oslc4j.core.annotation.OslcName; import org.eclipse.lyo.oslc4j.core.annotation.OslcNamespace; import org.eclipse.lyo.oslc4j.core.annotation.OslcOccurs; import org.eclipse.lyo.oslc4j.core.annotation.OslcPropertyDefinition; import org.eclipse.lyo.oslc4j.core.annotation.OslcRange; import org.eclipse.lyo.oslc4j.core.annotation.OslcReadOnly; import org.eclipse.lyo.oslc4j.core.annotation.OslcRepresentation; import org.eclipse.lyo.oslc4j.core.annotation.OslcResourceShape; import org.eclipse.lyo.oslc4j.core.annotation.OslcTitle; import org.eclipse.lyo.oslc4j.core.annotation.OslcValueType; import org.eclipse.lyo.oslc4j.core.model.AbstractResource; import org.eclipse.lyo.oslc4j.core.model.Link; import org.eclipse.lyo.oslc4j.core.model.Occurs; import org.eclipse.lyo.oslc4j.core.model.OslcConstants; import org.eclipse.lyo.oslc4j.core.model.Representation; import org.eclipse.lyo.oslc4j.core.model.ValueType; import org.eclipse.lyo.oslc4j.core.model.ResourceShape; import org.eclipse.lyo.oslc4j.core.model.ResourceShapeFactory; import eu.scott.warehouse.domains.pddl.PddlDomainConstants; import eu.scott.warehouse.domains.RdfsDomainConstants; import eu.scott.warehouse.domains.pddl.PddlDomainConstants; import eu.scott.warehouse.domains.pddl.Action; import eu.scott.warehouse.domains.pddl.Constant; import eu.scott.warehouse.domains.pddl.EitherType; // Start of user code imports // End of user code // Start of user code preClassCode // End of user code // Start of user code classAnnotations // End of user code @OslcNamespace(PddlDomainConstants.DOMAIN_NAMESPACE) @OslcName(PddlDomainConstants.DOMAIN_LOCALNAME) @OslcResourceShape(title = "Domain Resource Shape", describes = PddlDomainConstants.DOMAIN_TYPE) public class Domain extends AbstractResource implements IDomain { // Start of user code attributeAnnotation:action // End of user code private Link action; // Start of user code attributeAnnotation:constant // End of user code private Set<Link> constant = new HashSet<Link>(); // Start of user code attributeAnnotation:function // End of user code private Set<Link> function = new HashSet<Link>(); // Start of user code attributeAnnotation:predicate // End of user code private Set<Link> predicate = new HashSet<Link>(); // Start of user code attributeAnnotation:type // End of user code private Link type; // Start of user code attributeAnnotation:label // End of user code private String label; // Start of user code classAttributes // End of user code // Start of user code classMethods // End of user code public Domain() { super(); // Start of user code constructor1 // End of user code } public Domain(final URI about) { super(about); // Start of user code constructor2 // End of user code } public static ResourceShape createResourceShape() throws OslcCoreApplicationException, URISyntaxException { return ResourceShapeFactory.createResourceShape(OSLC4JUtils.getServletURI(), OslcConstants.PATH_RESOURCE_SHAPES, PddlDomainConstants.DOMAIN_PATH, Domain.class); } public String toString() { return toString(false); } public String toString(boolean asLocalResource) { String result = ""; // Start of user code toString_init // End of user code if (asLocalResource) { result = result + "{a Local Domain Resource} - update Domain.toString() to present resource as desired."; // Start of user code toString_bodyForLocalResource // End of user code } else { result = String.valueOf(getAbout()); } // Start of user code toString_finalize // End of user code return result; } public void addConstant(final Link constant) { this.constant.add(constant); } public void addFunction(final Link function) { this.function.add(function); } public void addPredicate(final Link predicate) { this.predicate.add(predicate); } // Start of user code getterAnnotation:action // End of user code @OslcName("action") @OslcPropertyDefinition(PddlDomainConstants.SCOTT_PDDL_2_1_SUBSET_SPEC_NAMSPACE + "action") @OslcDescription("Action of the plan step.") @OslcOccurs(Occurs.ExactlyOne) @OslcValueType(ValueType.Resource) @OslcRange({PddlDomainConstants.ACTION_TYPE}) @OslcReadOnly(false) public Link getAction() { // Start of user code getterInit:action // End of user code return action; } // Start of user code getterAnnotation:constant // End of user code @OslcName("constant") @OslcPropertyDefinition(PddlDomainConstants.SCOTT_PDDL_2_1_SUBSET_SPEC_NAMSPACE + "constant") @OslcDescription("Domain constants.") @OslcOccurs(Occurs.ZeroOrMany) @OslcValueType(ValueType.Resource) @OslcRange({PddlDomainConstants.CONSTANT_TYPE}) @OslcReadOnly(false) public Set<Link> getConstant() { // Start of user code getterInit:constant // End of user code return constant; } // Start of user code getterAnnotation:function // End of user code @OslcName("function") @OslcPropertyDefinition(PddlDomainConstants.SCOTT_PDDL_2_1_SUBSET_SPEC_NAMSPACE + "function") @OslcDescription("Domain functions.") @OslcOccurs(Occurs.ZeroOrMany) @OslcValueType(ValueType.Resource) @OslcReadOnly(false) public Set<Link> getFunction() { // Start of user code getterInit:function // End of user code return function; } // Start of user code getterAnnotation:predicate // End of user code @OslcName("predicate") @OslcPropertyDefinition(PddlDomainConstants.SCOTT_PDDL_2_1_SUBSET_SPEC_NAMSPACE + "predicate") @OslcDescription("Domain predicates.") @OslcOccurs(Occurs.ZeroOrMany) @OslcValueType(ValueType.Resource) @OslcReadOnly(false) public Set<Link> getPredicate() { // Start of user code getterInit:predicate // End of user code return predicate; } // Start of user code getterAnnotation:type // End of user code @OslcName("type") @OslcPropertyDefinition(PddlDomainConstants.SCOTT_PDDL_2_1_SUBSET_SPEC_NAMSPACE + "type") @OslcDescription("Parameter type.") @OslcOccurs(Occurs.ExactlyOne) @OslcValueType(ValueType.Resource) @OslcRange({PddlDomainConstants.EITHERTYPE_TYPE}) @OslcReadOnly(false) public Link getType() { // Start of user code getterInit:type // End of user code return type; } // Start of user code getterAnnotation:label // End of user code @OslcName("label") @OslcPropertyDefinition(RdfsDomainConstants.RDFS_NAMSPACE + "label") @OslcDescription("Parameter name.") @OslcOccurs(Occurs.ExactlyOne) @OslcValueType(ValueType.String) @OslcReadOnly(false) public String getLabel() { // Start of user code getterInit:label // End of user code return label; } // Start of user code setterAnnotation:action // End of user code public void setAction(final Link action ) { // Start of user code setterInit:action // End of user code this.action = action; // Start of user code setterFinalize:action // End of user code } // Start of user code setterAnnotation:constant // End of user code public void setConstant(final Set<Link> constant ) { // Start of user code setterInit:constant // End of user code this.constant.clear(); if (constant != null) { this.constant.addAll(constant); } // Start of user code setterFinalize:constant // End of user code } // Start of user code setterAnnotation:function // End of user code public void setFunction(final Set<Link> function ) { // Start of user code setterInit:function // End of user code this.function.clear(); if (function != null) { this.function.addAll(function); } // Start of user code setterFinalize:function // End of user code } // Start of user code setterAnnotation:predicate // End of user code public void setPredicate(final Set<Link> predicate ) { // Start of user code setterInit:predicate // End of user code this.predicate.clear(); if (predicate != null) { this.predicate.addAll(predicate); } // Start of user code setterFinalize:predicate // End of user code } // Start of user code setterAnnotation:type // End of user code public void setType(final Link type ) { // Start of user code setterInit:type // End of user code this.type = type; // Start of user code setterFinalize:type // End of user code } // Start of user code setterAnnotation:label // End of user code public void setLabel(final String label ) { // Start of user code setterInit:label // End of user code this.label = label; // Start of user code setterFinalize:label // End of user code } }
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.lang; import java.util.Locale; import junit.framework.TestCase; // TODO(kstanger): Some tests are skipped on armv7 devices due to imprecise // floating point operations. Fix these to allow some tolerance for imprecise // results. public class DoubleTest extends TestCase { private static final long rawBitsFor3_4en324ToN1[] = { 0x1L, 0x7L, 0x45L, 0x2b0L, 0x1ae2L, 0x10cd1L, 0xa8028L, 0x69018dL, 0x41a0f7eL, 0x29049aedL, 0x19a2e0d44L, 0x1005cc84acL, 0xa039fd2ebdL, 0x64243e3d361L, 0x3e96a6e641c6L, 0x271e284fe91b8L, 0x1872d931f1b131L, 0x4e8f8f7e6e1d7dL, 0x8319b9af04d26eL, 0xb7e0281ac6070aL, 0xedd832217788ccL, 0x122a71f54eab580L, 0x15750e72a2562e0L, 0x18d2520f4aebb98L, 0x1c2373498ed353fL, 0x1f6c501bf28828eL, 0x22c76422ef2a332L, 0x261c9e95d57a5ffL, 0x2963c63b4ad8f7fL, 0x2cbcb7ca1d8f35fL, 0x3015f2de527981bL, 0x335b6f95e717e22L, 0x36b24b7b60dddabL, 0x3a0f6f2d1c8aa8bL, 0x3d534af863ad52dL, 0x40a81db67c98a79L, 0x440912920ddf68bL, 0x474b5736915742eL, 0x4a9e2d0435ad13aL, 0x4e02dc22a18c2c4L, 0x5143932b49ef375L, 0x549477f61c6b052L, 0x57f995f3a385c67L, 0x5b3bfdb846339c0L, 0x5e8afd2657c0830L, 0x61edbc6fedb0a3dL, 0x653495c5f48e666L, 0x6881bb3771b1fffL, 0x6be22a054e1e7ffL, 0x6f2d5a4350d30ffL, 0x7278b0d42507d3fL, 0x75d6dd092e49c8fL, 0x79264a25bcee1daL, 0x7c6fdcaf2c29a50L, 0x7fcbd3daf7340e4L, 0x831f6468da8088eL, 0x86673d831120ab2L, 0x89c10ce3d568d5fL, 0x8d18a80e656185bL, 0x905ed211feb9e72L, 0x93b686967e6860eL, 0x9712141e0f013c9L, 0x9a56992592c18bbL, 0x9dac3f6ef771eeaL, 0xa10ba7a55aa7352L, 0xa44e918eb151027L, 0xa7a235f25da5430L, 0xab0561b77a8749eL, 0xae46ba2559291c6L, 0xb19868aeaf73637L, 0xb4fe82da5b503c5L, 0xb83f11c8791225bL, 0xbb8ed63a9756af2L, 0xbef28bc93d2c5afL, 0xc237975dc63bb8dL, 0xc5857d3537caa70L, 0xc8e6dc8285bd50cL, 0xcc3049d19396528L, 0xcf7c5c45f87be72L, 0xd2db7357769ae0eL, 0xd6292816aa20cc9L, 0xd973721c54a8ffbL, 0xdcd04ea369d33faL, 0xe0223126222407cL, 0xe36abd6faaad09bL, 0xe6c56ccb95584c2L, 0xea1b63ff3d572f9L, 0xed623cff0cacfb8L, 0xf0bacc3ecfd83a5L, 0xf414bfa741e7247L, 0xf759ef911260ed9L, 0xfab06b7556f9290L, 0xfe0e4329565bb9aL, 0x10151d3f3abf2a80L, 0x104a648f096ef520L, 0x10807ed965e55934L, 0x10b49e8fbf5eaf81L, 0x10e9c633af365b61L, 0x11201be04d81f91dL, 0x115422d860e27764L, 0x11892b8e791b153dL, 0x11bf76721761da8cL, 0x11f3aa074e9d2898L, 0x12289489224472beL, 0x125eb9ab6ad58f6dL, 0x1293340b22c579a4L, 0x12c8010deb76d80dL, 0x12fe015166548e11L, 0x1332c0d2dff4d8caL, 0x1367710797f20efdL, 0x139d4d497dee92bcL, 0x13d2504deeb51bb6L, 0x1406e4616a6262a3L, 0x143c9d79c4fafb4cL, 0x1471e26c1b1cdd0fL, 0x14a65b0721e41453L, 0x14dbf1c8ea5d1968L, 0x1511771d927a2fe1L, 0x1545d4e4f718bbd9L, 0x157b4a1e34deead0L, 0x15b10e52e10b52c2L, 0x15e551e7994e2772L, 0x161aa6617fa1b14fL, 0x1650a7fcefc50ed1L, 0x1684d1fc2bb65286L, 0x16ba067b36a3e727L, 0x16f0440d02267078L, 0x1724551042b00c96L, 0x17596a54535c0fbcL, 0x178fc4e9683313abL, 0x17c3db11e11fec4bL, 0x17f8d1d65967e75eL, 0x182f064befc1e135L, 0x186363ef75d92cc1L, 0x18983ceb534f77f1L, 0x18ce4c26282355eeL, 0x1902ef97d91615b5L, 0x1937ab7dcf5b9b22L, 0x196d965d433281eaL, 0x19a27dfa49ff9132L, 0x19d71d78dc7f757fL, 0x1a0ce4d7139f52dfL, 0x1a420f066c4393cbL, 0x1a7692c8075478beL, 0x1aac377a092996edL, 0x1ae1a2ac45b9fe54L, 0x1b160b5757287de9L, 0x1b4b8e2d2cf29d64L, 0x1b8138dc3c17a25eL, 0x1bb587134b1d8af6L, 0x1beae8d81de4edb4L, 0x1c20d18712af1490L, 0x1c5505e8d75ad9b4L, 0x1c8a47630d319021L, 0x1cc06c9de83efa15L, 0x1cf487c5624eb89aL, 0x1d29a9b6bae266c1L, 0x1d600a1234cd8038L, 0x1d940c96c200e046L, 0x1dc90fbc72811858L, 0x1dff53ab8f215e6eL, 0x1e33944b3974db05L, 0x1e68795e07d211c6L, 0x1e9e97b589c69637L, 0x1ed31ed1761c1de3L, 0x1f07e685d3a3255bL, 0x1f3de027488beeb2L, 0x1f72ac188d57752fL, 0x1fa7571eb0ad527bL, 0x1fdd2ce65cd8a71aL, 0x20123c0ffa076870L, 0x2046cb13f889428cL, 0x207c7dd8f6ab932fL, 0x20b1cea79a2b3bfeL, 0x20e6425180b60afdL, 0x211bd2e5e0e38dbcL, 0x215163cfac8e3896L, 0x2185bcc397b1c6bbL, 0x21bb2bf47d9e386aL, 0x21f0fb78ce82e342L, 0x22253a5702239c13L, 0x225a88ecc2ac8317L, 0x22909593f9abd1efL, 0x22c4baf8f816c66aL, 0x22f9e9b7361c7805L, 0x2330321281d1cb03L, 0x23643e9722463dc4L, 0x23994e3cead7cd35L, 0x23cfa1cc258dc082L, 0x2403c51f97789851L, 0x2438b6677d56be65L, 0x246ee4015cac6dffL, 0x24a34e80d9ebc4bfL, 0x24d822211066b5efL, 0x250e2aa95480636bL, 0x2542daa9d4d03e23L, 0x257791544a044dabL, 0x25ad75a95c856116L, 0x25e26989d9d35caeL, 0x261703ec504833d9L, 0x264cc4e7645a40d0L, 0x2681fb109eb86882L, 0x26b679d4c66682a2L, 0x26ec1849f800234bL, 0x27218f2e3b00160fL, 0x2755f2f9c9c01b93L, 0x278b6fb83c302277L, 0x27c125d3259e158bL, 0x27f56f47ef059aedL, 0x282acb19eac701a8L, 0x2860bef032bc6109L, 0x2894eeac3f6b794cL, 0x28ca2a574f46579eL, 0x29005a76918bf6c3L, 0x2934711435eef474L, 0x29698d59436ab191L, 0x299ff0af94455df5L, 0x29d3f66dbcab5ab9L, 0x2a08f4092bd63167L, 0x2a3f310b76cbbdc1L, 0x2a737ea72a3f5699L, 0x2aa85e50f4cf2c3fL, 0x2ade75e53202f74fL, 0x2b1309af3f41da91L, 0x2b47cc1b0f125135L, 0x2b7dbf21d2d6e583L, 0x2bb2977523c64f72L, 0x2be73d526cb7e34eL, 0x2c1d0ca707e5dc22L, 0x2c5227e864efa995L, 0x2c86b1e27e2b93faL, 0x2cbc5e5b1db678f9L, 0x2cf1baf8f2920b9cL, 0x2d2629b72f368e83L, 0x2d5bb424fb043223L, 0x2d9150971ce29f56L, 0x2dc5a4bce41b472bL, 0x2dfb0dec1d2218f6L, 0x2e30e8b392354f9aL, 0x2e6522e076c2a380L, 0x2e9a6b9894734c61L, 0x2ed0833f5cc80fbcL, 0x2f04a40f33fa13abL, 0x2f39cd1300f89896L, 0x2f70202be09b5f5eL, 0x2fa42836d8c23735L, 0x2fd932448ef2c503L, 0x300f7ed5b2af7643L, 0x3043af458fada9eaL, 0x30789b16f3991465L, 0x30aec1dcb07f597eL, 0x30e33929ee4f97efL, 0x3118077469e37deaL, 0x314e0951845c5d65L, 0x3182c5d2f2b9ba5fL, 0x31b77747af6828f7L, 0x31ed55199b423335L, 0x3222553001096001L, 0x3256ea7c014bb801L, 0x328ca51b019ea601L, 0x32c1e730e10327c1L, 0x32f660fd1943f1b1L, 0x332bf93c5f94ee1dL, 0x33617bc5bbbd14d2L, 0x3395dab72aac5a07L, 0x33cb5164f5577089L, 0x340112df1956a655L, 0x34355796dfac4febL, 0x346aad7c979763e5L, 0x34a0ac6ddebe9e6fL, 0x34d4d789566e460bL, 0x350a0d6bac09d78eL, 0x354048634b8626b9L, 0x35745a7c1e67b067L, 0x35a9711b26019c81L, 0x35dfcd61ef8203a1L, 0x3613e05d35b14245L, 0x3648d874831d92d6L, 0x367f0e91a3e4f78bL, 0x36b3691b066f1ab7L, 0x36e84361c80ae165L, 0x371e543a3a0d99beL, 0x3752f4a464488017L, 0x3787b1cd7d5aa01cL, 0x37bd9e40dcb14823L, 0x37f282e889eecd16L, 0x382723a2ac6a805cL, 0x385cec8b57852073L, 0x389213d716b33448L, 0x38c698ccdc60015aL, 0x38fc3f00137801b0L, 0x3931a7600c2b010eL, 0x396611380f35c151L, 0x399b9586130331a6L, 0x39d13d73cbe1ff08L, 0x3a058cd0beda7ec9L, 0x3a3af004ee911e7cL, 0x3a70d603151ab30dL, 0x3aa50b83da615fd1L, 0x3ada4e64d0f9b7c5L, 0x3b1070ff029c12dbL, 0x3b448d3ec3431792L, 0x3b79b08e7413dd76L, 0x3bb00e59088c6a6aL, 0x3be411ef4aaf8504L, 0x3c19166b1d5b6646L, 0x3c4f5c05e4b23fd7L, 0x3c839983aeef67e6L, 0x3cb87fe49aab41e0L, 0x3cee9fddc1561258L, 0x3d2323ea98d5cb77L, 0x3d57ece53f0b3e55L, 0x3d8de81e8ece0deaL, 0x3dc2b1131940c8b2L, 0x3df75d57df90fadfL, 0x3e2d34add7753996L, 0x3e6240eca6a943feL, 0x3e96d127d05394fdL, 0x3ecc8571c4687a3dL, 0x3f01d3671ac14c66L, 0x3f364840e1719f80L, 0x3f6bda5119ce075fL, 0x3fa16872b020c49cL, 0x3fd5c28f5c28f5c3L, 0x400B333333333333L }; private static final long rawBitsFor1_2e0To309[] = { 0x3ff3333333333333L, 0x4028000000000000L, 0x405e000000000000L, 0x4092c00000000000L, 0x40c7700000000000L, 0x40fd4c0000000000L, 0x41324f8000000000L, 0x4166e36000000000L, 0x419c9c3800000000L, 0x41d1e1a300000000L, 0x42065a0bc0000000L, 0x423bf08eb0000000L, 0x427176592e000000L, 0x42a5d3ef79800000L, 0x42db48eb57e00000L, 0x43110d9316ec0000L, 0x434550f7dca70000L, 0x437aa535d3d0c000L, 0x43b0a741a4627800L, 0x43e4d1120d7b1600L, 0x441a055690d9db80L, 0x445043561a882930L, 0x4484542ba12a337cL, 0x44b969368974c05bL, 0x44efc3842bd1f072L, 0x4523da329b633647L, 0x4558d0bf423c03d9L, 0x458f04ef12cb04cfL, 0x45c363156bbee301L, 0x45f83bdac6ae9bc2L, 0x462e4ad1785a42b2L, 0x4662eec2eb3869afL, 0x4697aa73a606841bL, 0x46cd95108f882522L, 0x47027d2a59b51735L, 0x47371c74f0225d03L, 0x476ce3922c2af443L, 0x47a20e3b5b9ad8aaL, 0x47d691ca32818ed5L, 0x480c363cbf21f28aL, 0x4841a1e5f7753796L, 0x48760a5f7552857cL, 0x48ab8cf752a726daL, 0x48e1381a93a87849L, 0x491586213892965bL, 0x494ae7a986b73bf1L, 0x4980d0c9f4328577L, 0x49b504fc713f26d5L, 0x49ea463b8d8ef08aL, 0x4a206be538795656L, 0x4a5486de8697abecL, 0x4a89a896283d96e6L, 0x4ac0095dd9267e50L, 0x4af40bb54f701de4L, 0x4b290ea2a34c255dL, 0x4b5f524b4c1f2eb4L, 0x4b93936f0f937d31L, 0x4bc8784ad3785c7dL, 0x4bfe965d8856739cL, 0x4c331dfa75360842L, 0x4c67e57912838a52L, 0x4c9dded757246ce6L, 0x4cd2ab469676c410L, 0x4d0756183c147514L, 0x4d3d2b9e4b199259L, 0x4d723b42eeeffb78L, 0x4da6ca13aaabfa56L, 0x4ddc7c989556f8ebL, 0x4e11cddf5d565b93L, 0x4e46415734abf278L, 0x4e7bd1ad01d6ef15L, 0x4eb1630c2126556dL, 0x4ee5bbcf296feac9L, 0x4f1b2ac2f3cbe57bL, 0x4f50fab9d85f6f6dL, 0x4f8539684e774b48L, 0x4fba87c262151e1aL, 0x4ff094d97d4d32d0L, 0x5024ba0fdca07f84L, 0x5059e893d3c89f65L, 0x5090315c645d639fL, 0x50c43db37d74bc87L, 0x50f94d205cd1eba9L, 0x512fa06874066693L, 0x5163c4414884001cL, 0x5198b5519aa50023L, 0x51cee2a6014e402cL, 0x52034da7c0d0e81bL, 0x52382111b1052222L, 0x526e29561d466aabL, 0x52a2d9d5d24c02abL, 0x52d7904b46df0355L, 0x530d745e1896c42bL, 0x534268bacf5e3a9bL, 0x537702e98335c941L, 0x53acc3a3e4033b92L, 0x53e1fa466e82053bL, 0x541678d80a22868aL, 0x544c170e0cab282cL, 0x54818e68c7eaf91cL, 0x54b5f202f9e5b763L, 0x54eb6e83b85f253bL, 0x55212512533b7745L, 0x55556e56e80a5516L, 0x558ac9eca20cea5cL, 0x55c0be33e5481279L, 0x55f4edc0de9a1718L, 0x562a293116409cdeL, 0x566059beade8620bL, 0x5694702e59627a8dL, 0x56c98c39efbb1931L, 0x56ffef486ba9df7dL, 0x5733f58d434a2baeL, 0x5768f2f0941cb699L, 0x579f2facb923e440L, 0x57d37dcbf3b66ea8L, 0x58085d3ef0a40a52L, 0x583e748eaccd0ce6L, 0x587308d92c002810L, 0x58a7cb0f77003214L, 0x58ddbdd354c03e99L, 0x591296a414f82720L, 0x59473c4d1a3630e8L, 0x597d0b6060c3bd21L, 0x59b2271c3c7a5635L, 0x59e6b0e34b98ebc2L, 0x5a1c5d1c1e7f26b3L, 0x5a51ba31930f7830L, 0x5a8628bdf7d3563cL, 0x5abbb2ed75c82bcaL, 0x5af14fd4699d1b5fL, 0x5b25a3c984046236L, 0x5b5b0cbbe5057ac4L, 0x5b90e7f56f236cbaL, 0x5bc521f2caec47e9L, 0x5bfa6a6f7da759e3L, 0x5c308285ae88982eL, 0x5c64a3271a2abe39L, 0x5c99cbf0e0b56dc8L, 0x5cd01f768c71649dL, 0x5d0427542f8dbdc4L, 0x5d3931293b712d35L, 0x5d6f7d738a4d7882L, 0x5da3ae6836706b51L, 0x5dd89a02440c8626L, 0x5e0ec082d50fa7afL, 0x5e433851c529c8ceL, 0x5e78066636743b01L, 0x5eae07ffc41149c1L, 0x5ee2c4ffda8ace19L, 0x5f17763fd12d819fL, 0x5f4d53cfc578e207L, 0x5f825461db6b8d44L, 0x5fb6e97a52467095L, 0x5feca3d8e6d80cbbL, 0x6021e667904707f5L, 0x605660017458c9f2L, 0x608bf801d16efc6eL, 0x60c17b0122e55dc5L, 0x60f5d9c16b9eb536L, 0x612b5031c6866284L, 0x6161121f1c13fd92L, 0x619556a6e318fcf7L, 0x61caac509bdf3c34L, 0x6200abb2616b85a1L, 0x6234d69ef9c66709L, 0x626a0c46b83800cbL, 0x62a047ac3323007fL, 0x62d459973febc09fL, 0x63096ffd0fe6b0c6L, 0x633fcbfc53e05cf8L, 0x6373df7db46c3a1bL, 0x63a8d75d218748a2L, 0x63df0d3469e91acaL, 0x64136840c231b0beL, 0x64484250f2be1ceeL, 0x647e52e52f6da42aL, 0x64b2f3cf3da4869aL, 0x64e7b0c30d0da840L, 0x651d9cf3d0511251L, 0x655282186232ab72L, 0x6587229e7abf564fL, 0x65bceb46196f2be3L, 0x65f2130bcfe57b6eL, 0x662697cec3deda49L, 0x665c3dc274d690dbL, 0x6691a69989061a89L, 0x66c6103feb47a12bL, 0x66fb944fe6198976L, 0x67313cb1efcff5eaL, 0x67658bde6bc3f364L, 0x679aeed606b4f03dL, 0x67d0d545c4311626L, 0x68050a97353d5bb0L, 0x683a4d3d028cb29cL, 0x687070462197efa2L, 0x68a48c57a9fdeb8aL, 0x68d9af6d947d666cL, 0x69100da47cce6004L, 0x6944110d9c01f805L, 0x6979155103027606L, 0x69af5aa543c31387L, 0x69e398a74a59ec35L, 0x6a187ed11cf06742L, 0x6a4e9e85642c8112L, 0x6a8323135e9bd0abL, 0x6ab7ebd83642c4d6L, 0x6aede6ce43d3760cL, 0x6b22b040ea6429c7L, 0x6b575c5124fd3439L, 0x6b8d33656e3c8147L, 0x6bc2401f64e5d0cdL, 0x6bf6d0273e1f4500L, 0x6c2c84310da71640L, 0x6c61d29ea8886de8L, 0x6c96474652aa8962L, 0x6ccbd917e7552bbaL, 0x6d0167aef0953b54L, 0x6d35c19aacba8a29L, 0x6d6b320157e92cb4L, 0x6da0ff40d6f1bbf0L, 0x6dd53f110cae2aedL, 0x6e0a8ed54fd9b5a8L, 0x6e40994551e81189L, 0x6e74bf96a66215ebL, 0x6ea9ef7c4ffa9b66L, 0x6ee035adb1fca120L, 0x6f1443191e7bc967L, 0x6f4953df661abbc1L, 0x6f7fa8d73fa16ab2L, 0x6fb3c98687c4e2afL, 0x6fe8bbe829b61b5bL, 0x701eeae23423a232L, 0x705352cd6096455fL, 0x70882780b8bbd6b7L, 0x70be3160e6eacc64L, 0x70f2dedc9052bfbfL, 0x71279693b4676faeL, 0x715d7c38a1814b9aL, 0x71926da364f0cf40L, 0x71c7090c3e2d0310L, 0x71fccb4f4db843d4L, 0x7231ff1190932a65L, 0x72667ed5f4b7f4feL, 0x729c1e8b71e5f23dL, 0x72d19317272fb766L, 0x7305f7dcf0fba540L, 0x733b75d42d3a8e90L, 0x737129a49c44991aL, 0x73a5740dc355bf60L, 0x73dad111342b2f39L, 0x7410c2aac09afd83L, 0x7444f35570c1bce4L, 0x747a302accf22c1dL, 0x74b05e1ac0175b92L, 0x74e475a1701d3277L, 0x75199309cc247f15L, 0x754ff7cc3f2d9edaL, 0x7583fadfa77c8348L, 0x75b8f997915ba41aL, 0x75ef37fd75b28d21L, 0x762382fe698f9834L, 0x765863be03f37e41L, 0x768e7cad84f05dd2L, 0x76c30dec73163aa3L, 0x76f7d1678fdbc94cL, 0x772dc5c173d2bb9fL, 0x77629b98e863b543L, 0x7797427f227ca294L, 0x77cd131eeb1bcb39L, 0x78022bf352f15f04L, 0x7836b6f027adb6c5L, 0x786c64ac31992476L, 0x78a1beeb9effb6caL, 0x78d62ea686bfa47cL, 0x790bba50286f8d9bL, 0x794154721945b881L, 0x7975a98e9f9726a1L, 0x79ab13f2477cf049L, 0x79e0ec776cae162eL, 0x7a15279547d99bb9L, 0x7a4a717a99d002a8L, 0x7a8086eca02201a9L, 0x7ab4a8a7c82a8213L, 0x7ae9d2d1ba352298L, 0x7b2023c31461359fL, 0x7b542cb3d9798307L, 0x7b8937e0cfd7e3c8L, 0x7bbf85d903cddcbaL, 0x7bf3b3a7a260a9f4L, 0x7c28a0918af8d472L, 0x7c5ec8b5edb7098eL, 0x7c933d71b49265f9L, 0x7cc80cce21b6ff77L, 0x7cfe1001aa24bf55L, 0x7d32ca010a56f795L, 0x7d677c814cecb57aL, 0x7d9d5ba1a027e2d9L, 0x7dd259450418edc7L, 0x7e06ef96451f2939L, 0x7e3cab7bd666f388L, 0x7e71eb2d66005835L, 0x7ea665f8bf806e42L, 0x7edbff76ef6089d2L, 0x7f117faa559c5623L, 0x7f45df94eb036bacL, 0x7f7b577a25c44697L, 0x7fb116ac579aac1fL, 0x7fe55c576d815726L, 0x7ff0000000000000L }; private void doTestCompareRawBits(String originalDoubleString, long expectedRawBits, String expectedString) { double result; long rawBits; String convertedString; result = Double.parseDouble(originalDoubleString); rawBits = Double.doubleToLongBits(result); convertedString = new Double(result).toString(); assertEquals(expectedRawBits, rawBits); assertEquals(expectedString.toLowerCase(Locale.US), convertedString .toLowerCase(Locale.US)); } private void test_toString(double dd, String answer) { assertEquals(answer, Double.toString(dd)); Double d = new Double(dd); assertEquals(answer, Double.toString(d.doubleValue())); assertEquals(answer, d.toString()); } /** * java.lang.Double#Double(double) */ public void test_ConstructorD() { Double d = new Double(39089.88888888888888888888888888888888); assertEquals("Created incorrect double", 39089.88888888888888888888888888888888, d .doubleValue(), 0D); } /** * java.lang.Double#Double(java.lang.String) */ public void test_ConstructorLjava_lang_String() { Double d = new Double("39089.88888888888888888888888888888888"); assertEquals("Created incorrect double", 39089.88888888888888888888888888888888, d .doubleValue(), 0D); // Regression test for HARMONY-489 try { d = new Double("1E+-20"); fail("new Double(\"1E+-20\") should throw exception"); } catch (NumberFormatException e) { // expected } // Regression test for HARMONY-329 d = Double.parseDouble("-1.233999999999999965116738099630936817275852021384209929081813042837802886790127428328465579708849276001782791006814286802871737087810957327493372866733334925806221045495205250590286471187577636646208155890426896101636282423463443661040209738873506655844025580428394216030152374941053494694642722606658935546875E-112"); if (!System.getProperty("os.arch").equals("armv7")) { assertEquals("Failed to parse long string", -1.234E-112D, d.doubleValue(), 0D); } } /** * java.lang.Double#byteValue() */ public void test_byteValue() { Double d = new Double(1923311.47712); assertEquals("Returned incorrect byte value", (byte) -17, d.byteValue()); } /** * java.lang.Double#compareTo(java.lang.Double) * java.lang.Double#compare(double, double) */ public void test_compare() { if (System.getProperty("os.arch").equals("armv7")) { return; } double[] values = new double[] { Double.NEGATIVE_INFINITY, -Double.MAX_VALUE, -2d, -Double.MIN_VALUE, -0d, 0d, Double.MIN_VALUE, 2d, Double.MAX_VALUE, Double.POSITIVE_INFINITY, Double.NaN }; for (int i = 0; i < values.length; i++) { double d1 = values[i]; assertTrue("compare() should be equal: " + d1, Double.compare(d1, d1) == 0); Double D1 = new Double(d1); assertTrue("compareTo() should be equal: " + d1, D1.compareTo(D1) == 0); for (int j = i + 1; j < values.length; j++) { double d2 = values[j]; assertTrue("compare() " + d1 + " should be less " + d2, Double.compare(d1, d2) == -1); assertTrue("compare() " + d2 + " should be greater " + d1, Double.compare(d2, d1) == 1); Double D2 = new Double(d2); assertTrue("compareTo() " + d1 + " should be less " + d2, D1.compareTo(D2) == -1); assertTrue("compareTo() " + d2 + " should be greater " + d1, D2.compareTo(D1) == 1); } } try { new Double(0.0D).compareTo(null); fail("No NPE"); } catch (NullPointerException e) { } } /** * java.lang.Double#doubleToLongBits(double) */ public void test_doubleToLongBitsD() { // Test for method long java.lang.Double.doubleToLongBits(double) Double d = new Double(Double.MAX_VALUE); long lbits = Double.doubleToLongBits(d.doubleValue()); double r = Double.longBitsToDouble(lbits); assertTrue("Bit conversion failed", d.doubleValue() == r); } /** * java.lang.Double#doubleToRawLongBits(double) */ public void test_doubleToRawLongBitsD() { long l = 0x7ff80000000004d2L; double d = Double.longBitsToDouble(l); assertTrue("Wrong raw bits", Double.doubleToRawLongBits(d) == l); } /** * java.lang.Double#doubleValue() */ public void test_doubleValue() { assertEquals("Incorrect double value returned", 999999999999999.9999999999999, new Double(999999999999999.9999999999999).doubleValue(), 0D); } /** * java.lang.Double#floatValue() */ public void test_floatValue() { // Test for method float java.lang.Double.floatValue() assertTrue( "Incorrect float value returned ", Math .abs(new Double(999999999999999.9999999999999d).floatValue() - 999999999999999.9999999999999f) < 1); } /** * java.lang.Double#hashCode() */ public void test_hashCode() { // Test for method int java.lang.Double.hashCode() for (int i = -1000; i < 1000; i++) { Double d = new Double(i); Double dd = new Double(i); assertTrue("Should not be identical ", d != dd); assertTrue("Should be equals 1 ", d.equals(dd)); assertTrue("Should be equals 2 ", dd.equals(d)); assertTrue("Should have identical values ", dd.doubleValue() == d.doubleValue()); assertTrue("Invalid hash for equal but not identical doubles ", d.hashCode() == dd .hashCode()); } assertEquals("Magic assumption hasCode (0.0) = 0 failed", 0, new Double(0.0).hashCode()); } /** * java.lang.Double#intValue() */ public void test_intValue() { // Test for method int java.lang.Double.intValue() Double d = new Double(1923311.47712); assertEquals("Returned incorrect int value", 1923311, d.intValue()); } /** * java.lang.Double#isInfinite() */ public void test_isInfinite() { // Test for method boolean java.lang.Double.isInfinite() assertTrue("NEGATIVE_INFINITY returned false", new Double(Double.NEGATIVE_INFINITY) .isInfinite()); assertTrue("POSITIVE_INFINITY returned false", new Double(Double.POSITIVE_INFINITY) .isInfinite()); assertTrue("Non infinite number returned true", !(new Double(1000).isInfinite())); } /** * java.lang.Double#isInfinite(double) */ public void test_isInfiniteD() { // Test for method boolean java.lang.Double.isInfinite(double) assertTrue(Double.isInfinite(Double.NEGATIVE_INFINITY)); assertTrue(Double.isInfinite(Double.POSITIVE_INFINITY)); assertFalse(Double.isInfinite(Double.MAX_VALUE)); assertFalse(Double.isInfinite(Double.MIN_VALUE)); assertFalse(Double.isInfinite(Double.NaN)); assertFalse(Double.isInfinite(0.0)); } /** * java.lang.Double#isFinite(double) */ public void test_isFiniteD() { // Test for method boolean java.lang.Double.isFinite(double) assertFalse(Double.isFinite(Double.NEGATIVE_INFINITY)); assertFalse(Double.isFinite(Double.POSITIVE_INFINITY)); assertTrue(Double.isFinite(Double.MAX_VALUE)); assertTrue(Double.isFinite(Double.MIN_VALUE)); assertFalse(Double.isFinite(Double.NaN)); assertTrue(Double.isFinite(0.0)); } /** * java.lang.Double#isNaN() */ public void test_isNaN() { // Test for method boolean java.lang.Double.isNaN() Double d = new Double(0.0 / 0.0); assertTrue("NAN returned false", d.isNaN()); d = new Double(0); assertTrue("Non NAN returned true", !d.isNaN()); } /** * java.lang.Double#isNaN(double) */ public void test_isNaND() { // Test for method boolean java.lang.Double.isNaN(double) Double d = new Double(0.0 / 0.0); assertTrue("NAN check failed", Double.isNaN(d.doubleValue())); } /** * java.lang.Double#longBitsToDouble(long) */ public void test_longBitsToDoubleJ() { // Test for method double java.lang.Double.longBitsToDouble(long) Double d = new Double(Double.MAX_VALUE); long lbits = Double.doubleToLongBits(d.doubleValue()); double r = Double.longBitsToDouble(lbits); assertTrue("Bit conversion failed", d.doubleValue() == r); } /** * java.lang.Double#longValue() */ public void test_longValue() { // Test for method long java.lang.Double.longValue() Double d = new Double(1923311.47712); assertEquals("Returned incorrect long value", 1923311, d.longValue()); } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDoubleLjava_lang_String() { if (System.getProperty("os.arch").equals("armv7")) { return; } assertEquals("Incorrect double returned, expected zero.", 0.0, Double .parseDouble("2.4703282292062327208828439643411e-324"), 0.0); assertTrue("Incorrect double returned, expected minimum double.", Double .parseDouble("2.4703282292062327208828439643412e-324") == Double.MIN_VALUE); for (int i = 324; i > 0; i--) { Double.parseDouble("3.4e-" + i); } for (int i = 0; i <= 309; i++) { Double.parseDouble("1.2e" + i); } /* * The first two cases and the last four cases have to placed outside * the loop due to the difference in the expected output string. */ doTestCompareRawBits("3.4e-324", rawBitsFor3_4en324ToN1[0], "4.9e-324"); doTestCompareRawBits("3.4e-323", rawBitsFor3_4en324ToN1[1], "3.5e-323"); for (int i = 322; i > 3; i--) { String testString, expectedString; testString = expectedString = "3.4e-" + i; doTestCompareRawBits(testString, rawBitsFor3_4en324ToN1[324 - i], expectedString); } doTestCompareRawBits("3.4e-3", rawBitsFor3_4en324ToN1[321], "0.0034"); doTestCompareRawBits("3.4e-2", rawBitsFor3_4en324ToN1[322], "0.034"); doTestCompareRawBits("3.4e-1", rawBitsFor3_4en324ToN1[323], "0.34"); doTestCompareRawBits("3.4e-0", rawBitsFor3_4en324ToN1[324], "3.4"); doTestCompareRawBits("1.2e0", rawBitsFor1_2e0To309[0], "1.2"); doTestCompareRawBits("1.2e1", rawBitsFor1_2e0To309[1], "12.0"); doTestCompareRawBits("1.2e2", rawBitsFor1_2e0To309[2], "120.0"); doTestCompareRawBits("1.2e3", rawBitsFor1_2e0To309[3], "1200.0"); doTestCompareRawBits("1.2e4", rawBitsFor1_2e0To309[4], "12000.0"); doTestCompareRawBits("1.2e5", rawBitsFor1_2e0To309[5], "120000.0"); doTestCompareRawBits("1.2e6", rawBitsFor1_2e0To309[6], "1200000.0"); for (int i = 7; i <= 308; i++) { String testString, expectedString; testString = expectedString = "1.2e" + i; doTestCompareRawBits(testString, rawBitsFor1_2e0To309[i], expectedString); } doTestCompareRawBits("1.2e309", rawBitsFor1_2e0To309[309], "Infinity"); doTestCompareRawBits( "111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000.92233720368547758079223372036854775807", 0x7e054218c295e43fL, "1.1122233344455567E299"); doTestCompareRawBits( "-111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000111222333444555666777888999000.92233720368547758079223372036854775807", 0xfe054218c295e43fL, "-1.1122233344455567E299"); doTestCompareRawBits("1.234123412431233E107", 0x562ae7a25fe706ebL, "1.234123412431233E107"); doTestCompareRawBits("1.2341234124312331E107", 0x562ae7a25fe706ecL, "1.2341234124312331E107"); doTestCompareRawBits("1.2341234124312332E107", 0x562ae7a25fe706ecL, "1.2341234124312331E107"); doTestCompareRawBits("-1.234123412431233E107", 0xd62ae7a25fe706ebL, "-1.234123412431233E107"); doTestCompareRawBits("-1.2341234124312331E107", 0xd62ae7a25fe706ecL, "-1.2341234124312331E107"); doTestCompareRawBits("-1.2341234124312332E107", 0xd62ae7a25fe706ecL, "-1.2341234124312331E107"); // Java spec requires Double.toString to have exponent corresponding to the // binary value, in this case is 22, since the binary value is very slightly // less than 10^23. Therefore the correct decimal representation is // 9.999999999999999e22. // See ag/831934 doTestCompareRawBits("1e23", 0x44b52d02c7e14af6L, "9.999999999999999e22"); doTestCompareRawBits("1e22", 0x4480F0CF064DD592L, "1.0e22"); /* * These particular tests verify that the extreme boundary conditions * are converted correctly. */ doTestCompareRawBits("0.0e-309", 0L, "0.0"); doTestCompareRawBits("-0.0e-309", 0x8000000000000000L, "-0.0"); doTestCompareRawBits("0.0e309", 0L, "0.0"); doTestCompareRawBits("-0.0e309", 0x8000000000000000L, "-0.0"); doTestCompareRawBits("0.1e309", 0x7fe1ccf385ebc8a0L, "1.0e308"); doTestCompareRawBits("0.2e309", 0x7ff0000000000000L, "Infinity"); doTestCompareRawBits("65e-325", 1L, "4.9e-324"); doTestCompareRawBits("1000e-326", 2L, "1.0e-323"); doTestCompareRawBits("4.0e-306", 0x86789e3750f791L, "4.0e-306"); doTestCompareRawBits("2.22507e-308", 0xffffe2e8159d0L, "2.22507e-308"); doTestCompareRawBits( "111222333444555666777888999000111228999000.92233720368547758079223372036854775807", 0x48746da623f1dd8bL, "1.1122233344455567E41"); doTestCompareRawBits( "-111222333444555666777888999000111228999000.92233720368547758079223372036854775807", 0xc8746da623f1dd8bL, "-1.1122233344455567E41"); doTestCompareRawBits( "1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890.987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210", 0x54820fe0ba17f469L, "1.2345678901234567E99"); doTestCompareRawBits( "-1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890.987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210987654321098765432109876543210", 0xd4820fe0ba17f469L, "-1.2345678901234567E99"); doTestCompareRawBits( "179769313486231570000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.01", 0x7fefffffffffffffL, "1.7976931348623157E308"); doTestCompareRawBits( "-179769313486231570000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.01", 0xffefffffffffffffL, "-1.7976931348623157E308"); doTestCompareRawBits( "1112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001234567890", 0x7ff0000000000000L, "Infinity"); doTestCompareRawBits( "-1112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001112223334445556667778889990001234567890", 0xfff0000000000000L, "-Infinity"); doTestCompareRawBits( "179769313486231590000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.01", 0x7ff0000000000000L, "Infinity"); doTestCompareRawBits( "-179769313486231590000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.01", 0xfff0000000000000L, "-Infinity"); doTestCompareRawBits( "0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0x2b392a32afcc661eL, "1.7976931348623157E-100"); doTestCompareRawBits( "-0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0xab392a32afcc661eL, "-1.7976931348623157E-100"); doTestCompareRawBits( "0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0x1b3432f0cb68e61L, "1.7976931348623157E-300"); doTestCompareRawBits( "-0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0x81b3432f0cb68e61L, "-1.7976931348623157E-300"); doTestCompareRawBits( "0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0x2117b590b942L, "1.79769313486234E-310"); doTestCompareRawBits( "-0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0x80002117b590b942L, "-1.79769313486234E-310"); doTestCompareRawBits( "0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0xe37L, "1.798E-320"); doTestCompareRawBits( "-0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000017976931348623157", 0x8000000000000e37L, "-1.798E-320"); doTestCompareRawBits( "0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001", 0x2L, "1.0E-323"); doTestCompareRawBits( "-0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001", 0x8000000000000002L, "-1.0E-323"); doTestCompareRawBits( "0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000055595409854908458349204328908234982349050934129878452378432452458968024357823490509341298784523784324524589680243578234905093412987845237843245245896802435782349050934129878452378432452458968024357868024357823490509341298784523784324524589680243578234905093412987845237843245245896802435786802435782349050934129878452378432452458968024357823490509341298784523784324524589680243578", 0x1L, "4.9E-324"); doTestCompareRawBits( "-0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000055595409854908458349204328908234982349050934129878452378432452458968024357823490509341298784523784324524589680243578234905093412987845237843245245896802435782349050934129878452378432452458968024357868024357823490509341298784523784324524589680243578234905093412987845237843245245896802435786802435782349050934129878452378432452458968024357823490509341298784523784324524589680243578", 0x8000000000000001L, "-4.9E-324"); } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_Illegal() { try { Double.parseDouble("0.0p0D"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble("+0x.p1d"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble("0Xg.gp1D"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble("-0x1.1p"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble("+0x 1.1 p2d"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble("x1.1p2d"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble(" 0x-2.1p2"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble(" 0x2.1pad"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } try { Double.parseDouble(" 0x111.222p 22d"); fail("Should throw NumberFormatException."); } catch (NumberFormatException e) { // expected } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_FromHexString() { double actual; double expected; actual = Double.parseDouble("0x0.0p0D"); assertEquals("Returned incorrect value", 0.0d, actual, 0.0D); actual = Double.parseDouble("0xa.ap+9d"); assertEquals("Returned incorrect value", 5440.0d, actual, 0.0D); actual = Double.parseDouble("+0Xb.10ap8"); assertEquals("Returned incorrect value", 2832.625d, actual, 0.0D); actual = Double.parseDouble("-0X.a0P2D"); assertEquals("Returned incorrect value", -2.5d, actual, 0.0D); actual = Double.parseDouble("\r 0x22.1p2d \t"); assertEquals("Returned incorrect value", 136.25d, actual, 0.0D); actual = Double.parseDouble("0x1.0p-1"); assertEquals("Returned incorrect value", 0.5, actual, 0.0D); actual = Double .parseDouble("0x00000000000000000000000000000000001.0p-1"); assertEquals("Returned incorrect value", 0.5, actual, 0.0D); actual = Double.parseDouble("0x1.0p-00000000000000000000000000001"); assertEquals("Returned incorrect value", 0.5, actual, 0.0D); actual = Double.parseDouble("0x.100000000000000000000000000000000p1"); assertEquals("Returned incorrect value", 0.125, actual, 0.0D); actual = Double.parseDouble("0x0.0p999999999999999999999999999999999999999999999999999999999999999"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); actual = Double.parseDouble("0xf1.0p9999999999999999999999999999999999999999999999999999999999999999"); assertEquals("Returned incorrect value", Double.POSITIVE_INFINITY, actual, 0.0D); actual = Double.parseDouble("0xffffffffffffffffffffffffffffffffffff.ffffffffffffffffffffffffffffffffffffffffffffffp1"); expected = Double.longBitsToDouble(0x4900000000000000L); assertEquals("Returned incorrect value", expected, actual, 0.0D); actual = Double.parseDouble("0x0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001p1600"); expected = Double.longBitsToDouble(0x7f30000000000000L); assertEquals("Returned incorrect value", expected, actual, 0.0D); actual = Double.parseDouble("0x0.0p-999999999999999999999999999999999999999999999999999999"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); actual = Double.parseDouble("0xf1.0p-9999999999999999999999999999999999999999999999999999999999999999"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); actual = Double.parseDouble("0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000p-1600"); expected = Double.longBitsToDouble(0xf0000000000000L); assertEquals("Returned incorrect value", expected, actual, 0.0D); actual = Double.parseDouble("0x1.p9223372036854775807"); assertEquals("Returned incorrect value", Double.POSITIVE_INFINITY, actual, 0.0D); actual = Double.parseDouble("0x1.p9223372036854775808"); assertEquals("Returned incorrect value", Double.POSITIVE_INFINITY, actual, 0.0D); actual = Double.parseDouble("0x10.p9223372036854775808"); assertEquals("Returned incorrect value", Double.POSITIVE_INFINITY, actual, 0.0D); actual = Double.parseDouble("0xabcd.ffffffffp+2000"); assertEquals("Returned incorrect value", Double.POSITIVE_INFINITY, actual, 0.0D); actual = Double.parseDouble("0x1.p-9223372036854775808"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); actual = Double.parseDouble("0x1.p-9223372036854775809"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); actual = Double.parseDouble("0x.1p-9223372036854775809"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); actual = Double.parseDouble("0xabcd.ffffffffffffffp-2000"); assertEquals("Returned incorrect value", 0.0, actual, 0.0D); } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_NormalPositiveExponent() { long[] expecteds = { 0x3f323456789abcdfL, 0x40e111012345678aL, 0x41a1110091a2b3c5L, 0x4259998091a2b3c5L, 0x4311110048d159e2L, 0x43c5554048d159e2L, 0x4479998048d159e2L, 0x452dddc048d159e2L, 0x45e111002468acf1L, 0x469333202468acf1L, 0x4751011001234568L, 0x4802112101234568L, 0x48b3213201234568L, 0x4964314301234568L, 0x4a15415401234568L, 0x4ac6516501234568L, 0x4b77617601234568L, 0x4c28718701234568L, 0x4cd9819801234568L, 0x4d9049048091a2b4L, 0x4e4101100091a2b4L, 0x4ef189188091a2b4L, 0x4fa211210091a2b4L, 0x505299298091a2b4L, 0x510321320091a2b4L, 0x51b3a93a8091a2b4L, 0x526431430091a2b4L, 0x5314b94b8091a2b4L, 0x53c841840091a2b4L, 0x5478c98c8091a2b4L, 0x552981980091a2b4L, 0x55da09a08091a2b4L, 0x568a91a90091a2b4L, 0x573b19b18091a2b4L, 0x57eba1ba0091a2b4L, 0x589c29c28091a2b4L, 0x594cb1cb0091a2b4L, 0x5a001d01c048d15aL, 0x5ab061060048d15aL, 0x5b60a50a4048d15aL, 0x5c1101100048d15aL, 0x5cc145144048d15aL, 0x5d7189188048d15aL, 0x5e21cd1cc048d15aL, 0x5ed211210048d15aL, 0x5f8255254048d15aL, 0x603419418048d15aL, 0x60e45d45c048d15aL, 0x6194a14a0048d15aL, 0x6244e54e4048d15aL, 0x62f541540048d15aL, 0x63a585584048d15aL, 0x6455c95c8048d15aL, 0x65060d60c048d15aL, 0x65b651650048d15aL, 0x666815814048d15aL, 0x671859858048d15aL, 0x67c89d89c048d15aL, 0x6878e18e0048d15aL, 0x692925924048d15aL, 0x69d981980048d15aL, 0x6a89c59c4048d15aL, 0x6b3a09a08048d15aL, 0x6bea4da4c048d15aL, 0x6c9c11c10048d15aL, 0x6d4c55c54048d15aL, 0x6dfc99c98048d15aL, 0x6eacddcdc048d15aL, 0x6f5d21d20048d15aL, 0x700d65d64048d15aL, 0x70bdc1dc0048d15aL, 0x716e05e04048d15aL, 0x721e49e48048d15aL, 0x72d00700602468adL, 0x73802902802468adL, 0x74304b04a02468adL, 0x74e06d06c02468adL, 0x75908f08e02468adL, 0x7640b10b002468adL, 0x76f0d30d202468adL, 0x77a10110002468adL, 0x78512312202468adL, 0x79020520402468adL, 0x79b22722602468adL, 0x7a624924802468adL, 0x7b126b26a02468adL, 0x7bc28d28c02468adL, 0x7c72af2ae02468adL, 0x7d22d12d002468adL, 0x7dd2f32f202468adL, 0x7e832132002468adL, 0x7f40011001012345L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L }; for (int i = 0; i < expecteds.length; i++) { int part = i * 11; String inputString = "0x" + part + "." + part + "0123456789abcdefp" + part; double actual = Double.parseDouble(inputString); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputString + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_NormalNegativeExponent() { long[] expecteds = { 0x3f323456789abcdfL, 0x3f8111012345678aL, 0x3ee1110091a2b3c5L, 0x3e39998091a2b3c5L, 0x3d91110048d159e2L, 0x3ce5554048d159e2L, 0x3c39998048d159e2L, 0x3b8dddc048d159e2L, 0x3ae111002468acf1L, 0x3a3333202468acf1L, 0x3991011001234568L, 0x38e2112101234568L, 0x3833213201234568L, 0x3784314301234568L, 0x36d5415401234568L, 0x3626516501234568L, 0x3577617601234568L, 0x34c8718701234568L, 0x3419819801234568L, 0x337049048091a2b4L, 0x32c101100091a2b4L, 0x321189188091a2b4L, 0x316211210091a2b4L, 0x30b299298091a2b4L, 0x300321320091a2b4L, 0x2f53a93a8091a2b4L, 0x2ea431430091a2b4L, 0x2df4b94b8091a2b4L, 0x2d4841840091a2b4L, 0x2c98c98c8091a2b4L, 0x2be981980091a2b4L, 0x2b3a09a08091a2b4L, 0x2a8a91a90091a2b4L, 0x29db19b18091a2b4L, 0x292ba1ba0091a2b4L, 0x287c29c28091a2b4L, 0x27ccb1cb0091a2b4L, 0x27201d01c048d15aL, 0x267061060048d15aL, 0x25c0a50a4048d15aL, 0x251101100048d15aL, 0x246145144048d15aL, 0x23b189188048d15aL, 0x2301cd1cc048d15aL, 0x225211210048d15aL, 0x21a255254048d15aL, 0x20f419418048d15aL, 0x20445d45c048d15aL, 0x1f94a14a0048d15aL, 0x1ee4e54e4048d15aL, 0x1e3541540048d15aL, 0x1d8585584048d15aL, 0x1cd5c95c8048d15aL, 0x1c260d60c048d15aL, 0x1b7651650048d15aL, 0x1ac815814048d15aL, 0x1a1859858048d15aL, 0x19689d89c048d15aL, 0x18b8e18e0048d15aL, 0x180925924048d15aL, 0x175981980048d15aL, 0x16a9c59c4048d15aL, 0x15fa09a08048d15aL, 0x154a4da4c048d15aL, 0x149c11c10048d15aL, 0x13ec55c54048d15aL, 0x133c99c98048d15aL, 0x128cddcdc048d15aL, 0x11dd21d20048d15aL, 0x112d65d64048d15aL, 0x107dc1dc0048d15aL, 0xfce05e04048d15aL, 0xf1e49e48048d15aL, 0xe700700602468adL, 0xdc02902802468adL, 0xd104b04a02468adL, 0xc606d06c02468adL, 0xbb08f08e02468adL, 0xb00b10b002468adL, 0xa50d30d202468adL, 0x9a10110002468adL, 0x8f12312202468adL, 0x8420520402468adL, 0x7922722602468adL, 0x6e24924802468adL, 0x6326b26a02468adL, 0x5828d28c02468adL, 0x4d2af2ae02468adL, 0x422d12d002468adL, 0x372f32f202468adL, 0x2c32132002468adL, 0x220011001012345L, 0x170121012012345L, 0xc0231023012345L, 0x10341034012345L, 0x208a208a024L, 0x41584158L, 0x83388L, 0x108L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L, 0x0L }; for (int i = 0; i < expecteds.length; i++) { int part = i * 11; String inputString = "0x" + part + "." + part + "0123456789abcdefp-" + part; double actual = Double.parseDouble(inputString); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputString + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_MaxNormalBoundary() { long[] expecteds = { 0x7fefffffffffffffL, 0x7fefffffffffffffL, 0x7fefffffffffffffL, 0x7fefffffffffffffL, 0x7fefffffffffffffL, 0x7fefffffffffffffL, 0x7fefffffffffffffL, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0x7ff0000000000000L, 0xffefffffffffffffL, 0xffefffffffffffffL, 0xffefffffffffffffL, 0xffefffffffffffffL, 0xffefffffffffffffL, 0xffefffffffffffffL, 0xffefffffffffffffL, 0xfff0000000000000L, 0xfff0000000000000L, 0xfff0000000000000L, 0xfff0000000000000L, 0xfff0000000000000L, 0xfff0000000000000L, 0xfff0000000000000L, 0xfff0000000000000L }; String[] inputs = { "0x1.fffffffffffffp1023", "0x1.fffffffffffff000000000000000000000000001p1023", "0x1.fffffffffffff1p1023", "0x1.fffffffffffff100000000000000000000000001p1023", "0x1.fffffffffffff1fffffffffffffffffffffffffffffffffffffffffffffp1023", "0x1.fffffffffffff7p1023", "0x1.fffffffffffff700000000000000000000000001p1023", "0x1.fffffffffffff8p1023", "0x1.fffffffffffff800000000000000000000000001p1023", "0x1.fffffffffffff8fffffffffffffffffffffffffffffffffffffffffffffp1023", "0x1.fffffffffffff9p1023", "0x1.fffffffffffff900000000000000000000000001p1023", "0x1.ffffffffffffffp1023", "0x1.ffffffffffffff00000000000000000000000001p1023", "0x1.fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp1023", "-0x1.fffffffffffffp1023", "-0x1.fffffffffffff000000000000000000000000001p1023", "-0x1.fffffffffffff1p1023", "-0x1.fffffffffffff100000000000000000000000001p1023", "-0x1.fffffffffffff1fffffffffffffffffffffffffffffffffffffffffffffp1023", "-0x1.fffffffffffff7p1023", "-0x1.fffffffffffff700000000000000000000000001p1023", "-0x1.fffffffffffff8p1023", "-0x1.fffffffffffff800000000000000000000000001p1023", "-0x1.fffffffffffff8fffffffffffffffffffffffffffffffffffffffffffffp1023", "-0x1.fffffffffffff9p1023", "-0x1.fffffffffffff900000000000000000000000001p1023", "-0x1.ffffffffffffffp1023", "-0x1.ffffffffffffff00000000000000000000000001p1023", "-0x1.fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp1023" }; for (int i = 0; i < inputs.length; i++) { double actual = Double.parseDouble(inputs[i]); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputs[i] + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_MinNormalBoundary() { long[] expecteds = { 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000001L, 0x10000000000001L, 0x10000000000001L, 0x10000000000001L, 0x10000000000001L, 0x10000000000001L, 0x10000000000001L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000001L, 0x8010000000000001L, 0x8010000000000001L, 0x8010000000000001L, 0x8010000000000001L, 0x8010000000000001L, 0x8010000000000001L }; String[] inputs = { "0x1.0p-1022", "0x1.00000000000001p-1022", "0x1.000000000000010000000000000000001p-1022", "0x1.00000000000001fffffffffffffffffffffffffffffffffp-1022", "0x1.00000000000007p-1022", "0x1.000000000000070000000000000000001p-1022", "0x1.00000000000007fffffffffffffffffffffffffffffffffp-1022", "0x1.00000000000008p-1022", "0x1.000000000000080000000000000000001p-1022", "0x1.00000000000008fffffffffffffffffffffffffffffffffp-1022", "0x1.00000000000009p-1022", "0x1.000000000000090000000000000000001p-1022", "0x1.00000000000009fffffffffffffffffffffffffffffffffp-1022", "0x1.0000000000000fp-1022", "0x1.0000000000000ffffffffffffffffffffffffffffffffffp-1022", "-0x1.0p-1022", "-0x1.00000000000001p-1022", "-0x1.000000000000010000000000000000001p-1022", "-0x1.00000000000001fffffffffffffffffffffffffffffffffp-1022", "-0x1.00000000000007p-1022", "-0x1.000000000000070000000000000000001p-1022", "-0x1.00000000000007fffffffffffffffffffffffffffffffffp-1022", "-0x1.00000000000008p-1022", "-0x1.000000000000080000000000000000001p-1022", "-0x1.00000000000008fffffffffffffffffffffffffffffffffp-1022", "-0x1.00000000000009p-1022", "-0x1.000000000000090000000000000000001p-1022", "-0x1.00000000000009fffffffffffffffffffffffffffffffffp-1022", "-0x1.0000000000000fp-1022", "-0x1.0000000000000ffffffffffffffffffffffffffffffffffp-1022" }; for (int i = 0; i < inputs.length; i++) { double actual = Double.parseDouble(inputs[i]); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputs[i] + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_MaxSubNormalBoundary() { long[] expecteds = { 0xfffffffffffffL, 0xfffffffffffffL, 0xfffffffffffffL, 0xfffffffffffffL, 0xfffffffffffffL, 0xfffffffffffffL, 0xfffffffffffffL, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x10000000000000L, 0x800fffffffffffffL, 0x800fffffffffffffL, 0x800fffffffffffffL, 0x800fffffffffffffL, 0x800fffffffffffffL, 0x800fffffffffffffL, 0x800fffffffffffffL, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L, 0x8010000000000000L }; String[] inputs = { "0x0.fffffffffffffp-1022", "0x0.fffffffffffff00000000000000000000000000000000001p-1022", "0x0.fffffffffffff1p-1022", "0x0.fffffffffffff10000000000000000000000000000000001p-1022", "0x0.fffffffffffff1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "0x0.fffffffffffff7p-1022", "0x0.fffffffffffff7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "0x0.fffffffffffff8p-1022", "0x0.fffffffffffff80000000000000000000000000000000001p-1022", "0x0.fffffffffffff8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "0x0.fffffffffffff9p-1022", "0x0.fffffffffffff9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "0x0.ffffffffffffffp-1022", "0x0.ffffffffffffff0000000000000000000000000000000001p-1022", "0x0.ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "-0x0.fffffffffffffp-1022", "-0x0.fffffffffffff00000000000000000000000000000000001p-1022", "-0x0.fffffffffffff1p-1022", "-0x0.fffffffffffff10000000000000000000000000000000001p-1022", "-0x0.fffffffffffff1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "-0x0.fffffffffffff7p-1022", "-0x0.fffffffffffff7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "-0x0.fffffffffffff8p-1022", "-0x0.fffffffffffff80000000000000000000000000000000001p-1022", "-0x0.fffffffffffff8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "-0x0.fffffffffffff9p-1022", "-0x0.fffffffffffff9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022", "-0x0.ffffffffffffffp-1022", "-0x0.ffffffffffffff0000000000000000000000000000000001p-1022", "-0x0.ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffp-1022" }; for (int i = 0; i < inputs.length; i++) { double actual = Double.parseDouble(inputs[i]); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputs[i] + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_MinSubNormalBoundary() { long[] expecteds = { 0x1L, 0x1L, 0x2L, 0x1L, 0x1L, 0x1L, 0x2L, 0x2L, 0x2L, 0x2L, 0x2L, 0x2L, 0x2L, 0x2L, 0x2L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000002L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L, 0x8000000000000002L }; String[] inputs = { "0x0.0000000000001p-1022", "0x0.00000000000010000000000000000001p-1022", "0x0.0000000000001fffffffffffffffffffffffffffffffffp-1022", "0x0.00000000000017p-1022", "0x0.000000000000170000000000000000001p-1022", "0x0.00000000000017fffffffffffffffffffffffffffffffffp-1022", "0x0.00000000000018p-1022", "0x0.000000000000180000000000000000001p-1022", "0x0.00000000000018fffffffffffffffffffffffffffffffffp-1022", "0x0.00000000000019p-1022", "0x0.000000000000190000000000000000001p-1022", "0x0.00000000000019fffffffffffffffffffffffffffffffffp-1022", "0x0.0000000000001fp-1022", "0x0.0000000000001f0000000000000000001p-1022", "0x0.0000000000001ffffffffffffffffffffffffffffffffffp-1022", "-0x0.0000000000001p-1022", "-0x0.00000000000010000000000000000001p-1022", "-0x0.0000000000001fffffffffffffffffffffffffffffffffp-1022", "-0x0.00000000000017p-1022", "-0x0.000000000000170000000000000000001p-1022", "-0x0.00000000000017fffffffffffffffffffffffffffffffffp-1022", "-0x0.00000000000018p-1022", "-0x0.000000000000180000000000000000001p-1022", "-0x0.00000000000018fffffffffffffffffffffffffffffffffp-1022", "-0x0.00000000000019p-1022", "-0x0.000000000000190000000000000000001p-1022", "-0x0.00000000000019fffffffffffffffffffffffffffffffffp-1022", "-0x0.0000000000001fp-1022", "-0x0.0000000000001f0000000000000000001p-1022", "-0x0.0000000000001ffffffffffffffffffffffffffffffffffp-1022" }; for (int i = 0; i < inputs.length; i++) { double actual = Double.parseDouble(inputs[i]); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputs[i] + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#parseDouble(java.lang.String) */ public void test_parseDouble_LString_ZeroBoundary() { long[] expecteds = { 0x0L, 0x0L, 0x0L, 0x1L, 0x1L, 0x1L, 0x1L, 0x1L, 0x1L, 0x8000000000000000L, 0x8000000000000000L, 0x8000000000000000L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000001L, 0x8000000000000001L }; String[] inputs = { "0x0.00000000000004p-1022", "0x0.00000000000007ffffffffffffffffffffffp-1022", "0x0.00000000000008p-1022", "0x0.000000000000080000000000000000001p-1022", "0x0.00000000000008fffffffffffffffffffffffffffffffp-1022", "0x0.00000000000009p-1022", "0x0.000000000000090000000000000000001p-1022", "0x0.00000000000009fffffffffffffffffffffffffffffffffp-1022", "0x0.0000000000000fffffffffffffffffffffffffffffffffffp-1022", "-0x0.00000000000004p-1022", "-0x0.00000000000007ffffffffffffffffffffffp-1022", "-0x0.00000000000008p-1022", "-0x0.000000000000080000000000000000001p-1022", "-0x0.00000000000008fffffffffffffffffffffffffffffffp-1022", "-0x0.00000000000009p-1022", "-0x0.000000000000090000000000000000001p-1022", "-0x0.00000000000009fffffffffffffffffffffffffffffffffp-1022", "-0x0.0000000000000fffffffffffffffffffffffffffffffffffp-1022" }; for (int i = 0; i < inputs.length; i++) { double actual = Double.parseDouble(inputs[i]); double expected = Double.longBitsToDouble(expecteds[i]); String expectedString = "0x" + Long.toHexString(Double.doubleToLongBits(expected)); String actualString = "0x" + Long.toHexString(Double.doubleToLongBits(actual)); String errorMsg = i + "th input string is:<" + inputs[i] + ">.The expected result should be:<" + expectedString + ">, but was: <" + actualString + ">. "; assertEquals(errorMsg, expected, actual, 0.0D); } } /** * java.lang.Double#shortValue() */ public void test_shortValue() { // Test for method short java.lang.Double.shortValue() Double d = new Double(1923311.47712); assertEquals("Returned incorrect short value", 22767, d.shortValue()); } /** * java.lang.Double#toString() */ public void test_toString() { // Test for method java.lang.String java.lang.Double.toString() test_toString(1.7976931348623157E308, "1.7976931348623157E308"); test_toString(5.0E-4, "5.0E-4"); } /** * java.lang.Double#toString(double) */ public void test_toStringD() { // Test for method java.lang.String java.lang.Double.toString(double) test_toString(1.7976931348623157E308, "1.7976931348623157E308"); test_toString(1.0 / 0.0, "Infinity"); test_toString(0.0 / 0.0, "NaN"); test_toString(-1.0 / 0.0, "-Infinity"); double d; d = Double.longBitsToDouble(0x470fffffffffffffL); test_toString(d, "2.0769187434139308E34"); d = Double.longBitsToDouble(0x4710000000000000L); test_toString(d, "2.076918743413931E34"); d = Double.longBitsToDouble(0x470000000000000aL); test_toString(d, "1.0384593717069678E34"); d = Double.longBitsToDouble(0x470000000000000bL); test_toString(d, "1.038459371706968E34"); d = Double.longBitsToDouble(0x4700000000000017L); test_toString(d, "1.0384593717069708E34"); d = Double.longBitsToDouble(0x4700000000000018L); test_toString(d, "1.038459371706971E34"); d = Double.longBitsToDouble(0x4700000000000024L); test_toString(d, "1.0384593717069738E34"); d = Double.longBitsToDouble(0x4700000000000025L); test_toString(d, "1.038459371706974E34"); d = Double.longBitsToDouble(0x4700000000000031L); test_toString(d, "1.0384593717069768E34"); d = Double.longBitsToDouble(0x4700000000000032L); test_toString(d, "1.038459371706977E34"); d = Double.longBitsToDouble(0x470000000000003eL); test_toString(d, "1.0384593717069798E34"); d = Double.longBitsToDouble(0x470000000000003fL); test_toString(d, "1.03845937170698E34"); d = Double.longBitsToDouble(0x7e00000000000003L); test_toString(d, "8.371160993642719E298"); d = Double.longBitsToDouble(0x7e00000000000004L); test_toString(d, "8.37116099364272E298"); d = Double.longBitsToDouble(0x7e00000000000008L); test_toString(d, "8.371160993642728E298"); d = Double.longBitsToDouble(0x7e00000000000009L); test_toString(d, "8.37116099364273E298"); d = Double.longBitsToDouble(0x7e00000000000013L); test_toString(d, "8.371160993642749E298"); d = Double.longBitsToDouble(0x7e00000000000014L); test_toString(d, "8.37116099364275E298"); d = Double.longBitsToDouble(0x7e00000000000023L); test_toString(d, "8.371160993642779E298"); d = Double.longBitsToDouble(0x7e00000000000024L); test_toString(d, "8.37116099364278E298"); d = Double.longBitsToDouble(0x7e0000000000002eL); test_toString(d, "8.371160993642799E298"); d = Double.longBitsToDouble(0x7e0000000000002fL); test_toString(d, "8.3711609936428E298"); d = Double.longBitsToDouble(0xda00000000000001L); test_toString(d, "-3.3846065602060736E125"); d = Double.longBitsToDouble(0xda00000000000002L); test_toString(d, "-3.384606560206074E125"); d = Double.longBitsToDouble(0xda00000000000005L); test_toString(d, "-3.3846065602060766E125"); d = Double.longBitsToDouble(0xda00000000000006L); test_toString(d, "-3.384606560206077E125"); d = Double.longBitsToDouble(0xda00000000000009L); test_toString(d, "-3.3846065602060796E125"); d = Double.longBitsToDouble(0xda0000000000000aL); test_toString(d, "-3.38460656020608E125"); d = Double.longBitsToDouble(0xda0000000000000dL); test_toString(d, "-3.3846065602060826E125"); d = Double.longBitsToDouble(0xda0000000000000eL); test_toString(d, "-3.384606560206083E125"); } /** * java.lang.Double#valueOf(java.lang.String) */ public void test_valueOfLjava_lang_String() { // Test for method java.lang.Double // java.lang.Double.valueOf(java.lang.String) assertTrue("Incorrect double returned", Math.abs(Double.valueOf("999999999999.999") .doubleValue() - 999999999999.999d) < 1); try { Double.valueOf(null); fail("Expected Double.valueOf(null) to throw NPE."); } catch (NullPointerException ex) { // expected } try { Double.valueOf(""); fail("Expected Double.valueOf(\"\") to throw NFE"); } catch (NumberFormatException e) { // expected } Double pi = Double.valueOf("3.141592654"); assertEquals(3.141592654, pi.doubleValue(), 0D); Double posZero = Double.valueOf("+0.0"); Double negZero = Double.valueOf("-0.0"); assertFalse("Doubletest0", posZero.equals(negZero)); // Tests for double values by name. Double expectedNaN = new Double(Double.NaN); Double posNaN = Double.valueOf("NaN"); assertTrue("Doubletest1", posNaN.equals(expectedNaN)); Double posNaNSigned = Double.valueOf("+NaN"); assertTrue("Doubletest2", posNaNSigned.equals(expectedNaN)); Double negNaNSigned = Double.valueOf("-NaN"); assertTrue("Doubletest3", negNaNSigned.equals(expectedNaN)); Double posInfinite = Double.valueOf("Infinity"); assertTrue("Doubletest4", posInfinite.equals(new Double(Double.POSITIVE_INFINITY))); Double posInfiniteSigned = Double.valueOf("+Infinity"); assertTrue("Doubletest5", posInfiniteSigned .equals(new Double(Double.POSITIVE_INFINITY))); Double negInfiniteSigned = Double.valueOf("-Infinity"); assertTrue("Doubletest6", negInfiniteSigned .equals(new Double(Double.NEGATIVE_INFINITY))); } /** * java.lang.Double#compareTo(java.lang.Double) * java.lang.Double#compare(double, double) */ public void test_compareToLjava_lang_Double() { if (System.getProperty("os.arch").equals("armv7")) { return; } // A selection of double values in ascending order. double[] values = new double[] { Double.NEGATIVE_INFINITY, -Double.MAX_VALUE, -2d, -Double.MIN_VALUE, -0d, 0d, Double.MIN_VALUE, 2d, Double.MAX_VALUE, Double.POSITIVE_INFINITY, Double.NaN }; for (int i = 0; i < values.length; i++) { double d1 = values[i]; // Test that each value compares equal to itself; and each object is // equal to another object like itself. assertTrue("Assert 0: compare() should be equal: " + d1, Double.compare(d1, d1) == 0); Double objDouble = new Double(d1); assertTrue("Assert 1: compareTo() should be equal: " + d1, objDouble .compareTo(objDouble) == 0); // Test that the Double-defined order is respected for (int j = i + 1; j < values.length; j++) { double d2 = values[j]; assertTrue("Assert 2: compare() " + d1 + " should be less " + d2, Double .compare(d1, d2) == -1); assertTrue("Assert 3: compare() " + d2 + " should be greater " + d1, Double .compare(d2, d1) == 1); Double D2 = new Double(d2); assertTrue("Assert 4: compareTo() " + d1 + " should be less " + d2, objDouble .compareTo(D2) == -1); assertTrue("Assert 5: compareTo() " + d2 + " should be greater " + d1, D2 .compareTo(objDouble) == 1); } } try { new Double(0.0D).compareTo(null); fail("No NPE"); } catch (NullPointerException e) { } } /** * java.lang.Double#equals(java.lang.Object) */ @SuppressWarnings("EqualsNaN") public void test_equalsLjava_lang_Object() { Double d1 = new Double(87654321.12345d); Double d2 = new Double(87654321.12345d); Double d3 = new Double(0.0002f); assertTrue("Assert 0: Equality test failed", d1.equals(d2) && !(d1.equals(d3))); assertTrue("Assert 2: NaN should not be == Nan", Double.NaN != Double.NaN); assertTrue("Assert 3: NaN should not be == Nan", new Double(Double.NaN) .equals(new Double(Double.NaN))); assertTrue("Assert 4: -0d should be == 0d", 0d == -0d); assertTrue("Assert 5: -0d should not be equals() 0d", !new Double(0d) .equals(new Double(-0d))); Double dmax = new Double(Double.MAX_VALUE); Double dmax1 = new Double(Double.MAX_VALUE); assertTrue("Equality test failed", dmax.equals(dmax1) && !(dmax.equals(new Object()))); } /** * java.lang.Double#toHexString(double) */ public void test_toHexStringF() { // the follow values come from the Double Javadoc/Spec assertEquals("0x0.0p0", Double.toHexString(0.0D)); assertEquals("-0x0.0p0", Double.toHexString(-0.0D)); assertEquals("0x1.0p0", Double.toHexString(1.0D)); assertEquals("-0x1.0p0", Double.toHexString(-1.0D)); assertEquals("0x1.0p1", Double.toHexString(2.0D)); assertEquals("0x1.8p1", Double.toHexString(3.0D)); assertEquals("0x1.0p-1", Double.toHexString(0.5D)); assertEquals("0x1.0p-2", Double.toHexString(0.25D)); assertEquals("0x1.fffffffffffffp1023", Double.toHexString(Double.MAX_VALUE)); assertEquals("0x0.0000000000001p-1022", Double.toHexString(Double.MIN_VALUE)); // test edge cases assertEquals("NaN", Double.toHexString(Double.NaN)); assertEquals("-Infinity", Double.toHexString(Double.NEGATIVE_INFINITY)); assertEquals("Infinity", Double.toHexString(Double.POSITIVE_INFINITY)); // test various numbers assertEquals("-0x1.da8p6", Double.toHexString(-118.625D)); assertEquals("0x1.2957874cccccdp23", Double.toHexString(9743299.65D)); assertEquals("0x1.2957874cccccdp23", Double.toHexString(9743299.65000D)); assertEquals("0x1.2957874cccf63p23", Double.toHexString(9743299.650001234D)); assertEquals("0x1.700d1061d3333p33", Double.toHexString(12349743299.65000D)); // test HARMONY-2132 assertEquals("0x1.01p10", Double.toHexString(0x1.01p10)); } /** * java.lang.Double#valueOf(double) */ public void test_valueOfD() { assertEquals(new Double(Double.MIN_VALUE), Double.valueOf(Double.MIN_VALUE)); assertEquals(new Double(Double.MAX_VALUE), Double.valueOf(Double.MAX_VALUE)); assertEquals(new Double(0), Double.valueOf(0)); int s = -128; while (s < 128) { assertEquals(new Double(s), Double.valueOf(s)); assertEquals(new Double(s + 0.1D), Double.valueOf(s + 0.1D)); s++; } } /** * {@link java.lang.Double#MAX_EXPONENT} * @since 1.6 */ public void test_MAX_EXPONENT() { assertTrue("Wrong value of java.lang.Double.MAX_EXPONENT", Double.MAX_EXPONENT == 1023); assertTrue("Wrong value of java.lang.Double.MAX_EXPONENT", Double.MAX_EXPONENT == Math.getExponent(Double.MAX_VALUE)); } /** * {@link java.lang.Double#MIN_EXPONENT} * @since 1.6 */ public void test_MIN_EXPONENT() { assertTrue("Wrong value of java.lang.Double.MIN_EXPONENT", Double.MIN_EXPONENT == -1022); assertTrue("Wrong value of java.lang.Double.MIN_EXPONENT", Double.MIN_EXPONENT == Math.getExponent(Double.MIN_NORMAL)); } /** * {@link java.lang.Double#MIN_NORMAL} * @since 1.6 */ public void test_MIN_NORMAL() { assertTrue("Wrong value of java.lang.Double.MIN_NORMAL", Double.MIN_NORMAL == 0x1.0p-1022); assertTrue("Wrong value of java.lang.Double.MIN_NORMAL", Double.MIN_NORMAL == Double .longBitsToDouble(0x0010000000000000L)); assertTrue("Wrong value of java.lang.Double.MIN_NORMAL", Double.MIN_NORMAL == 2.2250738585072014E-308); } }
package ca.uhn.fhir.test.utilities; /*- * #%L * HAPI FHIR Test Utilities * %% * Copyright (C) 2014 - 2022 Smile CDR, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import ca.uhn.fhir.context.BaseRuntimeChildDefinition; import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.RuntimeResourceDefinition; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseReference; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.ICompositeType; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import javax.annotation.Nullable; import java.util.function.Consumer; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.matchesPattern; /** * This is an experiment to see if we can make test data creation for storage unit tests a bit more readable. */ @SuppressWarnings({"unchecked", "ConstantConditions"}) public interface ITestDataBuilder { /** * Set Patient.active = true */ default Consumer<IBaseResource> withActiveTrue() { return t -> __setPrimitiveChild(getFhirContext(), t, "active", "boolean", "true"); } /** * Set Patient.active = false */ default Consumer<IBaseResource> withActiveFalse() { return t -> __setPrimitiveChild(getFhirContext(), t, "active", "boolean", "false"); } default Consumer<IBaseResource> withFamily(String theFamily) { return t -> { IPrimitiveType<?> family = (IPrimitiveType<?>) getFhirContext().getElementDefinition("string").newInstance(); family.setValueAsString(theFamily); BaseRuntimeElementCompositeDefinition<?> humanNameDef = (BaseRuntimeElementCompositeDefinition<?>) getFhirContext().getElementDefinition("HumanName"); ICompositeType humanName = (ICompositeType) humanNameDef.newInstance(); humanNameDef.getChildByName("family").getMutator().addValue(humanName, family); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass()); resourceDef.getChildByName("name").getMutator().addValue(t, humanName); }; } /** * Set Patient.birthdate */ default Consumer<IBaseResource> withBirthdate(String theBirthdate) { return t -> __setPrimitiveChild(getFhirContext(), t, "birthDate", "dateTime", theBirthdate); } /** * Set Observation.status */ default Consumer<IBaseResource> withStatus(String theStatus) { return t -> __setPrimitiveChild(getFhirContext(), t, "status", "code", theStatus); } /** * Set Observation.effectiveDate */ default Consumer<IBaseResource> withEffectiveDate(String theDate) { return t -> __setPrimitiveChild(getFhirContext(), t, "effectiveDateTime", "dateTime", theDate); } /** * Set [Resource].identifier.system and [Resource].identifier.value */ default Consumer<IBaseResource> withIdentifier(String theSystem, String theValue) { return t -> { IPrimitiveType<?> system = (IPrimitiveType<?>) getFhirContext().getElementDefinition("uri").newInstance(); system.setValueAsString(theSystem); IPrimitiveType<?> value = (IPrimitiveType<?>) getFhirContext().getElementDefinition("string").newInstance(); value.setValueAsString(theValue); BaseRuntimeElementCompositeDefinition<?> identifierDef = (BaseRuntimeElementCompositeDefinition<?>) getFhirContext().getElementDefinition("Identifier"); ICompositeType identifier = (ICompositeType) identifierDef.newInstance(); identifierDef.getChildByName("system").getMutator().addValue(identifier, system); identifierDef.getChildByName("value").getMutator().addValue(identifier, value); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass()); resourceDef.getChildByName("identifier").getMutator().addValue(t, identifier); }; } /** * Set Organization.name */ default Consumer<IBaseResource> withName(String theStatus) { return t -> __setPrimitiveChild(getFhirContext(), t, "name", "string", theStatus); } default Consumer<IBaseResource> withId(String theId) { return t -> { assertThat(theId, matchesPattern("[a-zA-Z0-9-]+")); t.setId(theId); }; } default Consumer<IBaseResource> withId(IIdType theId) { return t -> t.setId(theId.toUnqualifiedVersionless()); } default Consumer<IBaseResource> withTag(String theSystem, String theCode) { return t -> t.getMeta().addTag().setSystem(theSystem).setCode(theCode).setDisplay(theCode); } default IIdType createObservation(Consumer<IBaseResource>... theModifiers) { return createResource("Observation", theModifiers); } default IBaseResource buildPatient(Consumer<IBaseResource>... theModifiers) { return buildResource("Patient", theModifiers); } default IIdType createPatient(Consumer<IBaseResource>... theModifiers) { return createResource("Patient", theModifiers); } default IIdType createOrganization(Consumer<IBaseResource>... theModifiers) { return createResource("Organization", theModifiers); } default IIdType createResource(String theResourceType, Consumer<IBaseResource>... theModifiers) { IBaseResource resource = buildResource(theResourceType, theModifiers); if (isNotBlank(resource.getIdElement().getValue())) { return doUpdateResource(resource); } else { return doCreateResource(resource); } } default IBaseResource buildResource(String theResourceType, Consumer<IBaseResource>... theModifiers) { IBaseResource resource = getFhirContext().getResourceDefinition(theResourceType).newInstance(); for (Consumer<IBaseResource> next : theModifiers) { next.accept(resource); } return resource; } default Consumer<IBaseResource> withSubject(@Nullable IIdType theSubject) { return t -> { if (theSubject != null) { IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance(); reference.setReference(theSubject.getValue()); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass()); resourceDef.getChildByName("subject").getMutator().addValue(t, reference); } }; } default Consumer<IBaseResource> withObservationCode(@Nullable String theSystem, @Nullable String theCode) { return t -> { ICompositeType codeableConcept = (ICompositeType) getFhirContext().getElementDefinition("CodeableConcept").newInstance(); IBase coding = getFhirContext().newTerser().addElement(codeableConcept, "coding"); getFhirContext().newTerser().addElement(coding, "system", theSystem); getFhirContext().newTerser().addElement(coding, "code", theCode); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass()); resourceDef.getChildByName("code").getMutator().addValue(t, codeableConcept); }; } default Consumer<IBaseResource> withObservationHasMember(@Nullable IIdType theHasMember) { return t -> { if (theHasMember != null) { IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance(); reference.setReference(theHasMember.getValue()); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass()); resourceDef.getChildByName("hasMember").getMutator().addValue(t, reference); } }; } default Consumer<IBaseResource> withOrganization(@Nullable IIdType theHasMember) { return t -> { if (theHasMember != null) { IBaseReference reference = (IBaseReference) getFhirContext().getElementDefinition("Reference").newInstance(); reference.setReference(theHasMember.getValue()); RuntimeResourceDefinition resourceDef = getFhirContext().getResourceDefinition(t.getClass()); resourceDef.getChildByName("managingOrganization").getMutator().addValue(t, reference); } }; } /** * Users of this API must implement this method */ IIdType doCreateResource(IBaseResource theResource); /** * Users of this API must implement this method */ IIdType doUpdateResource(IBaseResource theResource); /** * Users of this API must implement this method */ FhirContext getFhirContext(); /** * Name chosen to avoid potential for conflict. This is an internal API to this interface. */ static void __setPrimitiveChild(FhirContext theFhirContext, IBaseResource theTarget, String theElementName, String theElementType, String theValue) { RuntimeResourceDefinition def = theFhirContext.getResourceDefinition(theTarget.getClass()); BaseRuntimeChildDefinition activeChild = def.getChildByName(theElementName); IPrimitiveType<?> booleanType = (IPrimitiveType<?>) activeChild.getChildByName(theElementName).newInstance(); booleanType.setValueAsString(theValue); activeChild.getMutator().addValue(theTarget, booleanType); } }
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core.jms; import static com.adaptris.core.jms.JmsUtils.closeQuietly; import static com.adaptris.interlok.junit.scaffolding.jms.JmsConfig.DEFAULT_PAYLOAD; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.jms.Destination; import javax.jms.Message; import javax.jms.MessageListener; import javax.jms.Queue; import javax.jms.QueueReceiver; import javax.jms.Session; import javax.jms.TextMessage; import javax.jms.Topic; import javax.jms.TopicSubscriber; import org.apache.activemq.ActiveMQConnection; import org.apache.activemq.ActiveMQQueueSender; import org.apache.activemq.ActiveMQSession; import org.apache.activemq.ActiveMQTopicPublisher; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.adaptris.core.AdaptrisMessage; import com.adaptris.core.Service; import com.adaptris.core.ServiceList; import com.adaptris.core.StandaloneConsumer; import com.adaptris.core.StandaloneProducer; import com.adaptris.core.StandaloneRequestor; import com.adaptris.core.jms.activemq.EmbeddedActiveMq; import com.adaptris.core.stubs.MockMessageListener; import com.adaptris.interlok.junit.scaffolding.services.ExampleServiceCase; import com.adaptris.util.TimeInterval; public abstract class BasicJmsProducerCase extends com.adaptris.interlok.junit.scaffolding.jms.JmsProducerCase { protected abstract DefinedJmsProducer createProducer(String dest); protected abstract JmsConsumerImpl createConsumer(String dest); protected abstract Loopback createLoopback(EmbeddedActiveMq mq, String dest); private static final Logger logger = LoggerFactory.getLogger(BasicJmsProducerCase.class); private static EmbeddedActiveMq activeMqBroker; @BeforeClass public static void setUpAll() throws Exception { activeMqBroker = new EmbeddedActiveMq(); activeMqBroker.start(); } @AfterClass public static void tearDownAll() throws Exception { if(activeMqBroker != null) activeMqBroker.destroy(); } // INTERLOK-2121 @Test public void testProducerSession_Invalided() throws Exception { DefinedJmsProducer producer = createProducer(getName()); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneProducer); AdaptrisMessage msg = createMessage(); standaloneProducer.doService(msg); assertNotNull(producer.producerSession()); stop(standaloneProducer); assertNull(producer.producerSession()); } finally { stop(standaloneProducer); } } @Test public void testProduce_CaptureOutgoingMessageDetails() throws Exception { DefinedJmsProducer producer = createProducer(getName()); producer.setCaptureOutgoingMessageDetails(true); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { AdaptrisMessage msg = createMessage(); ExampleServiceCase.execute(standaloneProducer, msg); Map<Object, Object> objectMetadata = msg.getObjectHeaders(); assertTrue(objectMetadata.containsKey(Message.class.getCanonicalName() + "." + JmsConstants.JMS_MESSAGE_ID)); assertTrue(objectMetadata.containsKey(Message.class.getCanonicalName() + "." + JmsConstants.JMS_DESTINATION)); assertTrue(objectMetadata.containsKey(Message.class.getCanonicalName() + "." + JmsConstants.JMS_PRIORITY)); assertTrue(objectMetadata.containsKey(Message.class.getCanonicalName() + "." + JmsConstants.JMS_TIMESTAMP)); } finally { stop(standaloneProducer); } } @Test public void testProduceAndConsume_IntegerAcknowledgementMode_IntegerDeliveryMode() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode(String.valueOf(AcknowledgeMode.Mode.AUTO_ACKNOWLEDGE.acknowledgeMode())); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); standaloneConsumer.registerAdaptrisMessageListener(jms); DefinedJmsProducer producer = createProducer(getName()); producer.setDeliveryMode(String.valueOf(com.adaptris.core.jms.DeliveryMode.Mode.PERSISTENT.deliveryMode())); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); execute(standaloneConsumer, standaloneProducer, createMessage(), jms); assertMessages(jms, 1); } @Test public void testSetProducerSessionFactory() throws Exception { DefinedJmsProducer producer = createProducer(getName()); assertEquals(DefaultProducerSessionFactory.class, producer.getSessionFactory().getClass()); try { producer.setSessionFactory(null); fail(); } catch (IllegalArgumentException e) { } TimedInactivityProducerSessionFactory psf = new TimedInactivityProducerSessionFactory(); producer.setSessionFactory(psf); assertEquals(psf, producer.getSessionFactory()); } @Test public void testDefaultSessionFactory() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); producer.setSessionFactory(new DefaultProducerSessionFactory()); StandaloneProducer sp = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); standaloneConsumer.registerAdaptrisMessageListener(jms); try { start(standaloneConsumer, sp); sp.doService(createMessage()); sp.doService(createMessage()); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(sp, standaloneConsumer); } } @Test public void testResolvableEndpoint() throws Exception { String resolveString = "%message{endpoint}"; AdaptrisMessage adaptrisMessage1 = createMessage(); adaptrisMessage1.addMessageHeader("endpoint", getName()); AdaptrisMessage adaptrisMessage2 = createMessage(); adaptrisMessage2.addMessageHeader("endpoint", getName()); JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(resolveString); producer.setSessionFactory(new DefaultProducerSessionFactory()); StandaloneProducer sp = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); standaloneConsumer.registerAdaptrisMessageListener(jms); try { start(standaloneConsumer, sp); sp.doService(adaptrisMessage1); sp.doService(adaptrisMessage2); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(sp, standaloneConsumer); } } @Test public void testPerMessageSession() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); standaloneConsumer.registerAdaptrisMessageListener(jms); DefinedJmsProducer producer = createProducer(getName()); producer.setSessionFactory(new PerMessageProducerSessionFactory()); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); // Should create a new Session now. standaloneProducer.doService(createMessage()); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testTimedInactivitySession() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); TimedInactivityProducerSessionFactory psf = new TimedInactivityProducerSessionFactory(new TimeInterval(10L, TimeUnit.MILLISECONDS)); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); Thread.sleep(200); assertTrue(psf.newSessionRequired()); // Should create a new Session now. standaloneProducer.doService(createMessage()); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testTimedInactivitySession_SessionStillValid() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); TimedInactivityProducerSessionFactory psf = new TimedInactivityProducerSessionFactory(); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); Thread.sleep(200); assertFalse(psf.newSessionRequired()); // Still should be a valid session; and could produce regardless. standaloneProducer.doService(createMessage()); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testMessageCountSession() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); MessageCountProducerSessionFactory psf = new MessageCountProducerSessionFactory(1); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); standaloneProducer.doService(createMessage()); assertTrue(psf.newSessionRequired()); // Should create a new Session now. standaloneProducer.doService(createMessage()); waitForMessages(jms, 3); assertMessages(jms, 3); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testMessageCountSession_SessionStillValid() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); MessageCountProducerSessionFactory psf = new MessageCountProducerSessionFactory(); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); assertFalse(psf.newSessionRequired()); standaloneProducer.doService(createMessage()); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testMessageSizeSession() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); MessageSizeProducerSessionFactory psf = new MessageSizeProducerSessionFactory(Integer.valueOf(DEFAULT_PAYLOAD.length() - 1) .longValue()); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); standaloneProducer.doService(createMessage()); assertTrue(psf.newSessionRequired()); // Should create a new Session now. standaloneProducer.doService(createMessage()); waitForMessages(jms, 3); assertMessages(jms, 3); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testMessageSizeSession_SessionStillValid() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); MessageSizeProducerSessionFactory psf = new MessageSizeProducerSessionFactory(); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); standaloneProducer.doService(createMessage()); assertFalse(psf.newSessionRequired()); standaloneProducer.doService(createMessage()); waitForMessages(jms, 2); assertMessages(jms, 2); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testMetadataSession() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); DefinedJmsProducer producer = createProducer(getName()); MetadataProducerSessionFactory psf = new MetadataProducerSessionFactory(getName()); producer.setSessionFactory(psf); standaloneConsumer.registerAdaptrisMessageListener(jms); StandaloneProducer standaloneProducer = new StandaloneProducer(activeMqBroker.getJmsConnection(), producer); try { start(standaloneConsumer, standaloneProducer); AdaptrisMessage msg1 = createMessage(); AdaptrisMessage msg2 = createMessage(); AdaptrisMessage msg3 = createMessage(); msg3.addMetadata(getName(), Boolean.FALSE.toString()); AdaptrisMessage msg4 = createMessage(); msg4.addMetadata(getName(), Boolean.TRUE.toString()); standaloneProducer.doService(msg1); assertFalse(psf.newSessionRequired(msg2)); standaloneProducer.doService(msg2); assertFalse(psf.newSessionRequired(msg3)); standaloneProducer.doService(msg3); assertTrue(psf.newSessionRequired(msg4)); standaloneProducer.doService(msg4); waitForMessages(jms, 4); assertMessages(jms, 4); } finally { stop(standaloneProducer, standaloneConsumer); } } @Test public void testMultipleProducersWithSession() throws Exception { JmsConsumerImpl consumer = createConsumer(getName()); consumer.setAcknowledgeMode("AUTO_ACKNOWLEDGE"); StandaloneConsumer standaloneConsumer = new StandaloneConsumer(activeMqBroker.getJmsConnection(), consumer); MockMessageListener jms = new MockMessageListener(); standaloneConsumer.registerAdaptrisMessageListener(jms); ServiceList serviceList = new ServiceList(new Service[] { new StandaloneProducer(activeMqBroker.getJmsConnection(), createProducer(getName())), new StandaloneProducer(activeMqBroker.getJmsConnection(), createProducer(getName())) }); try { start(standaloneConsumer, serviceList); AdaptrisMessage msg1 = createMessage(); AdaptrisMessage msg2 = createMessage(); serviceList.doService(msg1); serviceList.doService(msg2); waitForMessages(jms, 4); assertMessages(jms, 4); } finally { stop(serviceList, standaloneConsumer); } } @Test public void testMultipleRequestorWithSession() throws Exception { ServiceList serviceList = new ServiceList(new Service[] { new StandaloneRequestor(activeMqBroker.getJmsConnection(), createProducer(getName()), new TimeInterval(1L, TimeUnit.SECONDS)), new StandaloneRequestor(activeMqBroker.getJmsConnection(), createProducer(getName()), new TimeInterval(1L, TimeUnit.SECONDS)) }); Loopback echo = createLoopback(activeMqBroker, getName()); try { echo.start(); start(serviceList); AdaptrisMessage msg1 = createMessage(); AdaptrisMessage msg2 = createMessage(); serviceList.doService(msg1); serviceList.doService(msg2); assertEquals(DEFAULT_PAYLOAD.toUpperCase(), msg1.getContent()); assertEquals(DEFAULT_PAYLOAD.toUpperCase(), msg2.getContent()); } finally { stop(serviceList); echo.stop(); } } protected static abstract class Loopback implements MessageListener { protected String listenQueueOrTopic; protected EmbeddedActiveMq broker; protected ActiveMQSession session; protected Message lastMsg = null; protected ActiveMQConnection conn; private boolean isTextMessage = true; Loopback(EmbeddedActiveMq mq, String dest) { listenQueueOrTopic = dest; broker = mq; } Loopback(EmbeddedActiveMq mq, String dest, boolean isText) { listenQueueOrTopic = dest; broker = mq; isTextMessage = isText; } public void start() throws Exception { conn = broker.createConnection(); session = (ActiveMQSession) conn.createSession(false, Session.AUTO_ACKNOWLEDGE); startListener(listenQueueOrTopic); conn.start(); } public void stop() throws Exception { stopListener(); closeQuietly(session); closeQuietly(conn, true); } @Override public void onMessage(Message m) { try { logger.debug("Got Message " + m.getJMSMessageID()); TextMessage reply = session.createTextMessage(); if (isTextMessage) { reply.setText(((TextMessage) m).getText().toUpperCase()); } else { reply.setText(DEFAULT_PAYLOAD.toUpperCase()); } try { Destination replyTo = m.getJMSReplyTo(); if (replyTo != null) { reply(reply, replyTo); } } catch (Exception e) { ; } lastMsg = m; } catch (Exception e) { logger.error("Got exception ", e); } } Message getLastMessage() { return lastMsg; } void waitFor(long timeout) { int count = 0; while (getLastMessage() == null && count <= timeout) { try { Thread.sleep(100); count += 100; } catch (InterruptedException e) { } } } abstract void startListener(String listenOn) throws Exception; abstract void stopListener() throws Exception; abstract void reply(Message reply, Destination replyTo) throws Exception; } protected static class TopicLoopback extends Loopback { private TopicSubscriber subscriber; TopicLoopback(EmbeddedActiveMq mq, String dest) { super(mq, dest); } TopicLoopback(EmbeddedActiveMq mq, String dest, boolean b) { super(mq, dest, b); } @Override void reply(Message reply, Destination replyTo) throws Exception { if (replyTo != null) { ActiveMQTopicPublisher pub = (ActiveMQTopicPublisher) session.createPublisher((Topic) replyTo); pub.publish(reply); pub.close(); } } @Override void startListener(String listenOn) throws Exception { Topic d = session.createTopic(listenOn); subscriber = session.createSubscriber(d); subscriber.setMessageListener(this); } @Override void stopListener() throws Exception { closeQuietly(subscriber); } } protected static class QueueLoopback extends Loopback { private QueueReceiver receiver; QueueLoopback(EmbeddedActiveMq mq, String dest) { super(mq, dest); } QueueLoopback(EmbeddedActiveMq mq, String dest, boolean b) { super(mq, dest, b); } @Override void reply(Message reply, Destination replyTo) throws Exception { if (replyTo != null) { ActiveMQQueueSender pub = (ActiveMQQueueSender) session.createSender((Queue) replyTo); pub.send(reply); pub.close(); } } @Override void startListener(String listenOn) throws Exception { Queue d = session.createQueue(listenOn); receiver = session.createReceiver(d); receiver.setMessageListener(this); } @Override void stopListener() throws Exception { closeQuietly(receiver); } } }
/* * Copyright (c) 2018, Cameron <https://github.com/noremac201>, SoyChai <https://github.com/SoyChai> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.experiencedrop; import com.google.inject.Provides; import java.util.Arrays; import java.util.EnumMap; import java.util.Map; import java.util.stream.IntStream; import javax.inject.Inject; import net.runelite.api.Client; import net.runelite.api.EnumComposition; import net.runelite.api.EnumID; import static net.runelite.api.ScriptID.XPDROPS_SETDROPSIZE; import static net.runelite.api.ScriptID.XPDROP_DISABLED; import net.runelite.api.Skill; import net.runelite.api.SpriteID; import net.runelite.api.Varbits; import net.runelite.api.events.GameTick; import net.runelite.api.events.ScriptPreFired; import net.runelite.api.events.StatChanged; import net.runelite.api.widgets.Widget; import static net.runelite.api.widgets.WidgetInfo.TO_CHILD; import static net.runelite.api.widgets.WidgetInfo.TO_GROUP; import net.runelite.client.config.ConfigManager; import net.runelite.client.eventbus.Subscribe; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; @PluginDescriptor( name = "XP Drop", description = "Enable customization of the way XP drops are displayed", tags = {"experience", "levels", "tick", "prayer", "xpdrop"} ) public class XpDropPlugin extends Plugin { @Inject private Client client; @Inject private XpDropConfig config; private int tickCounter = 0; private int previousExpGained; private boolean hasDropped = false; private boolean correctPrayer; private Skill lastSkill = null; private Map<Skill, Integer> previousSkillExpTable = new EnumMap<>(Skill.class); @Provides XpDropConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(XpDropConfig.class); } @Subscribe public void onScriptPreFired(ScriptPreFired scriptPreFired) { if (scriptPreFired.getScriptId() == XPDROPS_SETDROPSIZE) { final int[] intStack = client.getIntStack(); final int intStackSize = client.getIntStackSize(); // This runs prior to the proc being invoked, so the arguments are still on the stack. // Grab the first argument to the script. final int widgetId = intStack[intStackSize - 4]; processXpDrop(widgetId); } } private void processXpDrop(int widgetId) { final Widget xpdrop = client.getWidget(TO_GROUP(widgetId), TO_CHILD(widgetId)); final Widget[] children = xpdrop.getChildren(); // child 0 is the xpdrop text, everything else are sprite ids for skills final Widget text = children[0]; PrayerType prayer = getActivePrayerType(); if (prayer == null) { hideSkillIcons(xpdrop); resetTextColor(text); return; } final IntStream spriteIDs = Arrays.stream(children) .skip(1) // skip text .mapToInt(Widget::getSpriteId); int color = 0; switch (prayer) { case MELEE: if (correctPrayer || spriteIDs.anyMatch(id -> id == SpriteID.SKILL_ATTACK || id == SpriteID.SKILL_STRENGTH || id == SpriteID.SKILL_DEFENCE)) { color = config.getMeleePrayerColor().getRGB(); correctPrayer = true; } break; case RANGE: if (correctPrayer || spriteIDs.anyMatch(id -> id == SpriteID.SKILL_RANGED)) { color = config.getRangePrayerColor().getRGB(); correctPrayer = true; } break; case MAGIC: if (correctPrayer || spriteIDs.anyMatch(id -> id == SpriteID.SKILL_MAGIC)) { color = config.getMagePrayerColor().getRGB(); correctPrayer = true; } break; } if (color != 0) { text.setTextColor(color); } else { resetTextColor(text); } hideSkillIcons(xpdrop); } private void resetTextColor(Widget widget) { EnumComposition colorEnum = client.getEnum(EnumID.XPDROP_COLORS); int defaultColorId = client.getVar(Varbits.EXPERIENCE_DROP_COLOR); int color = colorEnum.getIntValue(defaultColorId); widget.setTextColor(color); } private void hideSkillIcons(Widget xpdrop) { if (config.hideSkillIcons()) { Widget[] children = xpdrop.getChildren(); // keep only text Arrays.fill(children, 1, children.length, null); } } private PrayerType getActivePrayerType() { for (XpPrayer prayer : XpPrayer.values()) { if (client.isPrayerActive(prayer.getPrayer())) { return prayer.getType(); } } return null; } @Subscribe public void onGameTick(GameTick tick) { correctPrayer = false; final int fakeTickDelay = config.fakeXpDropDelay(); if (fakeTickDelay == 0 || lastSkill == null) { return; } // If an xp drop was created this tick, reset the counter if (hasDropped) { hasDropped = false; tickCounter = 0; return; } if (++tickCounter % fakeTickDelay != 0) { return; } client.runScript(XPDROP_DISABLED, lastSkill.ordinal(), previousExpGained); } @Subscribe public void onStatChanged(StatChanged statChanged) { final Skill skill = statChanged.getSkill(); final int xp = statChanged.getXp(); lastSkill = skill; Integer previous = previousSkillExpTable.put(skill, xp); if (previous != null) { previousExpGained = xp - previous; hasDropped = true; } } }
/** * This document is a part of the source code and related artifacts * for CollectionSpace, an open source collections management system * for museums and related institutions: * * http://www.collectionspace.org * http://wiki.collectionspace.org * * Copyright (c)) 2009 Regents of the University of California * * Licensed under the Educational Community License (ECL), Version 2.0. * You may not use this file except in compliance with this License. * * You may obtain a copy of the ECL 2.0 License at * https://source.collectionspace.org/collection-space/LICENSE.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.collectionspace.services.client.test; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.collectionspace.services.OrganizationJAXBSchema; import org.collectionspace.services.client.AbstractCommonListUtils; import org.collectionspace.services.client.AuthorityClient; import org.collectionspace.services.client.CollectionSpaceClient; import org.collectionspace.services.client.ContactClient; import org.collectionspace.services.client.ContactClientUtils; import org.collectionspace.services.client.PayloadOutputPart; import org.collectionspace.services.client.PoxPayloadIn; import org.collectionspace.services.client.PoxPayloadOut; import org.collectionspace.services.contact.AddressGroup; import org.collectionspace.services.contact.AddressGroupList; import org.collectionspace.services.contact.ContactsCommon; import org.collectionspace.services.client.OrgAuthorityClient; import org.collectionspace.services.client.OrgAuthorityClientUtils; import org.collectionspace.services.jaxb.AbstractCommonList; import org.collectionspace.services.organization.OrgauthoritiesCommon; import org.collectionspace.services.organization.OrganizationsCommon; import org.collectionspace.services.organization.OrgTermGroup; import org.collectionspace.services.organization.OrgTermGroupList; import org.jboss.resteasy.client.ClientResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.Test; /** * OrgAuthorityServiceTest, carries out tests against a * deployed and running OrgAuthority Service. * * $LastChangedRevision$ * $LastChangedDate$ */ public class OrgAuthorityServiceTest extends AbstractAuthorityServiceTest<OrgauthoritiesCommon, OrganizationsCommon> { /** The logger. */ private final String CLASS_NAME = OrgAuthorityServiceTest.class.getName(); private final Logger logger = LoggerFactory.getLogger(CLASS_NAME); @Override public String getServicePathComponent() { return OrgAuthorityClient.SERVICE_PATH_COMPONENT; } @Override protected String getServiceName() { return OrgAuthorityClient.SERVICE_NAME; } private final String TEST_SHORT_ID = "TestOrg"; private final String TEST_ORG_NAME = "Test Org"; private final String TEST_ORG_MAIN_BODY_NAME = "The real official test organization"; private final String TEST_ORG_FOUNDING_PLACE = "Anytown, USA"; // FIXME: Change this to a structured date once this field changes in the schema. private final String TEST_ORG_FOUNDING_DATE = "May 26, 1907"; private String knownItemResourceShortIdentifer = null; /** The known contact resource id. */ private String knownContactResourceId = null; /** The all contact resource ids created. */ private Map<String, String> allContactResourceIdsCreated = new HashMap<String, String>(); protected void setKnownItemResource(String id, String shortIdentifer) { knownItemResourceId = id; knownItemResourceShortIdentifer = shortIdentifer; } /* (non-Javadoc) * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance() */ @Override protected CollectionSpaceClient getClientInstance() { return new OrgAuthorityClient(); } @Override protected PoxPayloadOut createInstance(String identifier) { OrgAuthorityClient client = new OrgAuthorityClient(); String displayName = "displayName-" + identifier; PoxPayloadOut multipart = OrgAuthorityClientUtils.createOrgAuthorityInstance( displayName, identifier, client.getCommonPartName()); return multipart; } @Override protected PoxPayloadOut createItemInstance(String parentCsid, String identifier) { String headerLabel = new OrgAuthorityClient().getItemCommonPartName(); String shortId = TEST_SHORT_ID; Map<String, String> testOrgMap = new HashMap<String, String>(); testOrgMap.put(OrganizationJAXBSchema.SHORT_IDENTIFIER, shortId); testOrgMap.put(OrganizationJAXBSchema.FOUNDING_DATE, TEST_ORG_FOUNDING_DATE); testOrgMap.put(OrganizationJAXBSchema.FOUNDING_PLACE, TEST_ORG_FOUNDING_PLACE); List<OrgTermGroup> terms = new ArrayList<OrgTermGroup>(); OrgTermGroup term = new OrgTermGroup(); term.setTermDisplayName(TEST_ORG_NAME); term.setTermName(TEST_ORG_NAME); term.setMainBodyName(TEST_ORG_MAIN_BODY_NAME); terms.add(term); return OrgAuthorityClientUtils.createOrganizationInstance(identifier, testOrgMap, terms, headerLabel); } /** * Creates the item in authority. * * @param vcsid the vcsid * @param authRefName the auth ref name * @return the string */ private String createItemInAuthority(String vcsid, String authRefName) { final String testName = "createItemInAuthority"; if (logger.isDebugEnabled()) { logger.debug(testName + ":..."); } // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); String shortId = TEST_SHORT_ID; Map<String, String> testOrgMap = new HashMap<String, String>(); testOrgMap.put(OrganizationJAXBSchema.SHORT_IDENTIFIER, shortId); testOrgMap.put(OrganizationJAXBSchema.FOUNDING_DATE, TEST_ORG_FOUNDING_DATE); testOrgMap.put(OrganizationJAXBSchema.FOUNDING_PLACE, TEST_ORG_FOUNDING_PLACE); List<OrgTermGroup> terms = new ArrayList<OrgTermGroup>(); OrgTermGroup term = new OrgTermGroup(); term.setTermDisplayName(TEST_ORG_NAME); term.setTermName(TEST_ORG_NAME); term.setMainBodyName(TEST_ORG_MAIN_BODY_NAME); terms.add(term); Map<String, List<String>> testOrgRepeatablesMap = new HashMap<String, List<String>>(); List<String> testOrgContactNames = new ArrayList<String>(); testOrgContactNames.add("joe@example.org"); testOrgContactNames.add("sally@example.org"); testOrgRepeatablesMap.put(OrganizationJAXBSchema.CONTACT_NAMES, testOrgContactNames); String newID = OrgAuthorityClientUtils.createItemInAuthority( vcsid, authRefName, testOrgMap, terms, testOrgRepeatablesMap, client); // Store the ID returned from the first item resource created // for additional tests below. if (knownItemResourceId == null) { setKnownItemResource(newID, shortId); if (logger.isDebugEnabled()) { logger.debug(testName + ": knownItemResourceId=" + knownItemResourceId); } } // Store the IDs from any item resources created // by tests, along with the IDs of their parents, so these items // can be deleted after all tests have been run. allResourceItemIdsCreated.put(newID, vcsid); return newID; } /** * Creates the contact. * * @param testName the test name */ @Test(dataProvider = "testName", groups = {"create"}, dependsOnMethods = {"createItem"}) public void createContact(String testName) { setupCreate(); String newID = createContactInItem(knownResourceId, knownItemResourceId); } /** * Creates the contact in item. * * @param parentcsid the parentcsid * @param itemcsid the itemcsid * @return the string */ private String createContactInItem(String parentcsid, String itemcsid) { final String testName = "createContactInItem"; if (logger.isDebugEnabled()) { logger.debug(getTestBanner(testName, CLASS_NAME)); } setupCreate(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); String identifier = createIdentifier(); PoxPayloadOut multipart = ContactClientUtils.createContactInstance(parentcsid, itemcsid, identifier, new ContactClient().getCommonPartName()); String newID = null; Response res = client.createContact(parentcsid, itemcsid, multipart); try { assertStatusCode(res, testName); newID = OrgAuthorityClientUtils.extractId(res); } finally { if (res != null) { res.close(); } } // Store the ID returned from the first contact resource created // for additional tests below. if (knownContactResourceId == null) { knownContactResourceId = newID; if (logger.isDebugEnabled()) { logger.debug(testName + ": knownContactResourceId=" + knownContactResourceId); } } // Store the IDs from any contact resources created // by tests, along with the IDs of their parent items, // so these items can be deleted after all tests have been run. allContactResourceIdsCreated.put(newID, itemcsid); return newID; } /** * Creates the contact list. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"createList"}, dependsOnMethods = {"createItemList"}) public void createContactList(String testName) throws Exception { // Add contacts to the initially-created, known item record. for (int j = 0; j < nItemsToCreateInList; j++) { createContact(testName); } } // --------------------------------------------------------------- // CRUD tests : READ tests // --------------------------------------------------------------- // Success outcomes /* (non-Javadoc) * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#read(java.lang.String) */ @Override // @Test(dataProvider = "testName", dataProviderClass = AbstractServiceTestImpl.class, // groups = {"read"}, dependsOnGroups = {"create"}) public void read(String testName) throws Exception { readInternal(testName, knownResourceId, null); } /** * Read by name. * * @param testName the test name * @throws Exception the exception */ // @Test(dataProvider = "testName", dataProviderClass = AbstractServiceTestImpl.class, // groups = {"read"}, dependsOnGroups = {"create"}) public void readByName(String testName) throws Exception { readInternal(testName, null, knownResourceShortIdentifer); } protected void readInternal(String testName, String CSID, String shortId) { // Perform setup. setupRead(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = null; if (CSID != null) { res = client.read(CSID); } else if (shortId != null) { res = client.readByName(shortId); } else { Assert.fail("readInternal: Internal error. One of CSID or shortId must be non-null"); } try { assertStatusCode(res, testName); //FIXME: remove the following try catch once Aron fixes signatures try { PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class)); OrgauthoritiesCommon orgAuthority = (OrgauthoritiesCommon) extractPart(input, new OrgAuthorityClient().getCommonPartName(), OrgauthoritiesCommon.class); if (logger.isDebugEnabled()) { logger.debug(objectAsXmlString(orgAuthority, OrgauthoritiesCommon.class)); } Assert.assertNotNull(orgAuthority); } catch (Exception e) { throw new RuntimeException(e); } } finally { if (res != null) { res.close(); } } } /** * Read item in Named Auth. * * TODO Enable this if we really need this - it is a funky case, where we would have * the shortId of the item, but the CSID of the parent authority!? Unlikely. * * @param testName the test name * @throws Exception the exception @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class, groups = {"readItem"}, dependsOnGroups = {"read"}) public void readItemInNamedAuth(String testName) throws Exception { readItemInternal(testName, null, knownResourceShortIdentifer, knownItemResourceId, null); } */ /** * Read named item. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"readItem"}, dependsOnMethods = {"readItemInNamedAuth"}) public void readNamedItem(String testName) throws Exception { readItemInternal(testName, knownResourceId, null, null, knownItemResourceShortIdentifer); } /** * Read item in Named Auth. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"readItem"}, dependsOnMethods = {"readItem"}) public void readItemInNamedAuth(String testName) throws Exception { readItemInternal(testName, null, knownResourceShortIdentifer, knownItemResourceId, null); } /** * Read Named item in Named Auth. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"readItem"}, dependsOnMethods = {"readItem"}) public void readNamedItemInNamedAuth(String testName) throws Exception { readItemInternal(testName, null, knownResourceShortIdentifer, null, knownItemResourceShortIdentifer); } protected void readItemInternal(String testName, String authCSID, String authShortId, String itemCSID, String itemShortId) throws Exception { // Perform setup. setupRead(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = null; if (authCSID != null) { if (itemCSID != null) { res = client.readItem(authCSID, itemCSID); } else if (itemShortId != null) { res = client.readNamedItem(authCSID, itemShortId); } else { Assert.fail("readInternal: Internal error. One of CSID or shortId must be non-null"); } } else if (authShortId != null) { if (itemCSID != null) { res = client.readItemInNamedAuthority(authShortId, itemCSID); } else if (itemShortId != null) { res = client.readNamedItemInNamedAuthority(authShortId, itemShortId); } else { Assert.fail("readInternal: Internal error. One of CSID or shortId must be non-null"); } } else { Assert.fail("readInternal: Internal error. One of authCSID or authShortId must be non-null"); } try { assertStatusCode(res, testName); // Check whether we've received a organization. PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class)); OrganizationsCommon organization = (OrganizationsCommon) extractPart(input, client.getItemCommonPartName(), OrganizationsCommon.class); Assert.assertNotNull(organization); boolean showFull = true; if (showFull && logger.isDebugEnabled()) { logger.debug(testName + ": returned payload:"); logger.debug(objectAsXmlString(organization, OrganizationsCommon.class)); } // Check that the organization item is within the expected OrgAuthority. Assert.assertEquals(organization.getInAuthority(), knownResourceId); // Verify the number and contents of values in a repeatable field, // as created in the instance record used for testing. List<String> contactNames = organization.getContactNames().getContactName(); Assert.assertTrue(contactNames.size() > 0); Assert.assertNotNull(contactNames.get(0)); } finally { if (res != null) { res.close(); } } } @Override protected void verifyReadItemInstance(OrganizationsCommon item) throws Exception { List<String> contactNames = item.getContactNames().getContactName(); Assert.assertTrue(contactNames.size() > 0); Assert.assertNotNull(contactNames.get(0)); } /** * Verify illegal item display name. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName") public void verifyIllegalItemDisplayName(String testName) throws Exception { // Perform setup for read. setupRead(); // // First read our known resource. // OrgAuthorityClient client = new OrgAuthorityClient(); Response res = client.readItem(knownResourceId, knownItemResourceId); OrganizationsCommon organization = null; try { assertStatusCode(res, testName); // Check whether organization has expected displayName. PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class)); organization = (OrganizationsCommon) extractPart(input, client.getItemCommonPartName(), OrganizationsCommon.class); Assert.assertNotNull(organization); } finally { if (res != null) { res.close(); } } // // Make an invalid UPDATE request, without a display name // OrgTermGroupList termList = organization.getOrgTermGroupList(); Assert.assertNotNull(termList); List<OrgTermGroup> terms = termList.getOrgTermGroup(); Assert.assertNotNull(terms); Assert.assertTrue(terms.size() > 0); terms.get(0).setTermDisplayName(null); terms.get(0).setTermName(null); setupUpdateWithInvalidBody(); // we expect a failure // Submit the updated resource to the service and store the response. PoxPayloadOut output = new PoxPayloadOut(OrgAuthorityClient.SERVICE_ITEM_PAYLOAD_NAME); PayloadOutputPart commonPart = output.addPart(client.getItemCommonPartName(), organization); res = client.updateItem(knownResourceId, knownItemResourceId, output); try { assertStatusCode(res, testName); } finally { if (res != null) { res.close(); } } } /** * Read contact. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"readItem"}, dependsOnMethods = {"readItem"}) public void readContact(String testName) throws Exception { // Perform setup. setupRead(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res =client.readContact(knownResourceId, knownItemResourceId, knownContactResourceId); try { assertStatusCode(res, testName); // Check whether we've received a contact. PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class)); ContactsCommon contact = (ContactsCommon) extractPart(input, new ContactClient().getCommonPartName(), ContactsCommon.class); Assert.assertNotNull(contact); boolean showFull = true; if (showFull && logger.isDebugEnabled()) { logger.debug(testName + ": returned payload:"); logger.debug(objectAsXmlString(contact, ContactsCommon.class)); } Assert.assertEquals(contact.getInAuthority(), knownResourceId); Assert.assertEquals(contact.getInItem(), knownItemResourceId); } finally { if (res != null) { res.close(); } } } /** * Read contact non existent. * * @param testName the test name */ @Test(dataProvider = "testName", groups = {"readItem"}, dependsOnMethods = {"readContact"}) public void readContactNonExistent(String testName) { // Perform setup. setupReadNonExistent(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = client.readContact(knownResourceId, knownItemResourceId, NON_EXISTENT_ID); try { int statusCode = res.getStatus(); // Check the status code of the response: does it match // the expected response(s)? if (logger.isDebugEnabled()) { logger.debug(testName + ": status = " + statusCode); } Assert.assertTrue(testRequestType.isValidStatusCode(statusCode), invalidStatusCodeMessage(testRequestType, statusCode)); Assert.assertEquals(statusCode, testExpectedStatusCode); } finally { if (res != null) { res.close(); } } } /** * Read item list. */ @Override // @Test(groups = {"readList"}, dependsOnMethods = {"readList"}) public void readItemList(String testName) { readItemList(knownAuthorityWithItems, null); } /** * Read item list by authority name. */ @Override // @Test(dataProvider = "testName", // dependsOnMethods = {"readItem"}) public void readItemListByName(String testName) { readItemList(null, READITEMS_SHORT_IDENTIFIER); } /** * Read item list. * * @param vcsid the vcsid * @param name the name */ private void readItemList(String vcsid, String name) { final String testName = "readItemList"; // Perform setup. setupReadList(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = null; if (vcsid != null) { res = client.readItemList(vcsid, null, null); } else if (name != null) { res = client.readItemListForNamedAuthority(name, null, null); } else { Assert.fail("readItemList passed null csid and name!"); } AbstractCommonList list = null; try { assertStatusCode(res, testName); list = res.readEntity(AbstractCommonList.class); } finally { if (res != null) { res.close(); } } List<AbstractCommonList.ListItem> items = list.getListItem(); int nItemsReturned = items.size(); // There will be 'nItemsToCreateInList' // items created by the createItemList test, // all associated with the same parent resource. int nExpectedItems = nItemsToCreateInList; if (logger.isDebugEnabled()) { logger.debug(testName + ": Expected " + nExpectedItems + " items; got: " + nItemsReturned); } Assert.assertEquals(nItemsReturned, nExpectedItems); for (AbstractCommonList.ListItem item : items) { String value = AbstractCommonListUtils.ListItemGetElementValue(item, OrganizationJAXBSchema.REF_NAME); Assert.assertTrue((null != value), "Item refName is null!"); value = AbstractCommonListUtils.ListItemGetElementValue(item, OrganizationJAXBSchema.TERM_DISPLAY_NAME); Assert.assertTrue((null != value), "Item termDisplayName is null!"); } if (logger.isTraceEnabled()) { AbstractCommonListUtils.ListItemsInAbstractCommonList(list, logger, testName); } } /** * Read contact list. */ @Test(groups = {"readList"}, dependsOnMethods = {"org.collectionspace.services.client.test.AbstractAuthorityServiceTest.readItemList"}) public void readContactList() { readContactList(knownResourceId, knownItemResourceId); } /** * Read contact list. * * @param parentcsid the parentcsid * @param itemcsid the itemcsid */ private void readContactList(String parentcsid, String itemcsid) { final String testName = "readContactList"; // Perform setup. setupReadList(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = client.readContactList(parentcsid, itemcsid); AbstractCommonList list = null; try { assertStatusCode(res, testName); list = res.readEntity(AbstractCommonList.class); } finally { if (res != null) { res.close(); } } List<AbstractCommonList.ListItem> listitems = list.getListItem(); int nItemsReturned = listitems.size(); // There will be one item created, associated with a // known parent resource, by the createItem test. // // In addition, there will be 'nItemsToCreateInList' // additional items created by the createItemList test, // all associated with the same parent resource. int nExpectedItems = nItemsToCreateInList + 1; if (logger.isDebugEnabled()) { logger.debug(testName + ": Expected " + nExpectedItems + " items; got: " + nItemsReturned); } Assert.assertEquals(nItemsReturned, nExpectedItems); // Optionally output additional data about list members for debugging. boolean iterateThroughList = false; if (iterateThroughList && logger.isDebugEnabled()) { AbstractCommonListUtils.ListItemsInAbstractCommonList(list, logger, testName); } } /** * Update contact. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"update"}, dependsOnMethods = {"org.collectionspace.services.client.test.AbstractAuthorityServiceTest.updateItem"}) public void updateContact(String testName) throws Exception { // Perform setup. setupUpdate(); // Retrieve the contents of a resource to update. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = client.readContact(knownResourceId, knownItemResourceId, knownContactResourceId); ContactsCommon contact = null; try { assertStatusCode(res, testName); if (logger.isDebugEnabled()) { logger.debug("got Contact to update with ID: " + knownContactResourceId + " in item: " + knownItemResourceId + " in parent: " + knownResourceId); } PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class)); contact = (ContactsCommon) extractPart(input, new ContactClient().getCommonPartName(), ContactsCommon.class); Assert.assertNotNull(contact); } finally { if (res != null) { res.close(); } } // Verify the contents of this resource AddressGroupList addressGroupList = contact.getAddressGroupList(); Assert.assertNotNull(addressGroupList); List<AddressGroup> addressGroups = addressGroupList.getAddressGroup(); Assert.assertNotNull(addressGroups); Assert.assertTrue(addressGroups.size() > 0); String addressPlace1 = addressGroups.get(0).getAddressPlace1(); Assert.assertNotNull(addressPlace1); // Update the contents of this resource. addressGroups.get(0).setAddressPlace1("updated-" + addressPlace1); contact.setAddressGroupList(addressGroupList); if (logger.isDebugEnabled()) { logger.debug("to be updated Contact"); logger.debug(objectAsXmlString(contact, ContactsCommon.class)); } // Submit the updated resource to the service and store the response. PoxPayloadOut output = new PoxPayloadOut(ContactClient.SERVICE_PAYLOAD_NAME); PayloadOutputPart commonPart = output.addPart(contact, MediaType.APPLICATION_XML_TYPE); commonPart.setLabel(new ContactClient().getCommonPartName()); res = client.updateContact(knownResourceId, knownItemResourceId, knownContactResourceId, output); try { assertStatusCode(res, testName); // Retrieve the updated resource and verify that its contents exist. PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class)); ContactsCommon updatedContact = (ContactsCommon) extractPart(input, new ContactClient().getCommonPartName(), ContactsCommon.class); Assert.assertNotNull(updatedContact); // Verify that the updated resource received the correct data. Assert.assertEquals(updatedContact.getAddressGroupList().getAddressGroup().get(0).getAddressPlace1(), contact.getAddressGroupList().getAddressGroup().get(0).getAddressPlace1(), "Data in updated object did not match submitted data."); } finally { if (res != null) { res.close(); } } } /** * Update non existent contact. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"update"}, dependsOnMethods = {"updateContact", "testContactSubmitRequest"}) public void updateNonExistentContact(String testName) throws Exception { // Currently a no-op test } // --------------------------------------------------------------- // CRUD tests : DELETE tests // --------------------------------------------------------------- // Success outcomes // Note: delete sub-resources in ascending hierarchical order, // before deleting their parents. /** * Delete contact. * * @param testName the test name * @throws Exception the exception */ @Test(dataProvider = "testName", groups = {"delete"}, dependsOnMethods = {"updateContact"}) public void deleteContact(String testName) throws Exception { // Perform setup. setupDelete(); if (logger.isDebugEnabled()) { logger.debug("parentcsid =" + knownResourceId + " itemcsid = " + knownItemResourceId + " csid = " + knownContactResourceId); } // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = client.deleteContact(knownResourceId, knownItemResourceId, knownContactResourceId); try { assertStatusCode(res, testName); } finally { if (res != null) { res.close(); } } } @Override public void delete(String testName) throws Exception { // Do nothing. See localDelete(). This ensure proper test order. } @Test(dataProvider = "testName", dependsOnMethods = {"localDeleteItem"}) public void localDelete(String testName) throws Exception { super.delete(testName); } @Override public void deleteItem(String testName) throws Exception { // Do nothing. We need to wait until after the test "localDelete" gets run. When it does, // its dependencies will get run first and then we can call the base class' delete method. } @Test(dataProvider = "testName", groups = {"delete"}, dependsOnMethods = {"verifyIllegalItemDisplayName", "testContactSubmitRequest", "deleteContact"}) public void localDeleteItem(String testName) throws Exception { super.deleteItem(testName); } /** * Delete non existent contact. * * @param testName the test name */ @Test(dataProvider = "testName", groups = {"delete"}, dependsOnMethods = {"deleteContact"}) public void deleteNonExistentContact(String testName) { // Perform setup. setupDeleteNonExistent(); // Submit the request to the service and store the response. OrgAuthorityClient client = new OrgAuthorityClient(); Response res = client.deleteContact(knownResourceId, knownItemResourceId, NON_EXISTENT_ID); try { assertStatusCode(res, testName); } finally { if (res != null) { res.close(); } } } /** * Test contact submit request. */ @Test(dependsOnMethods = {"createContact", "readContact", "testItemSubmitRequest"}) public void testContactSubmitRequest() { // Expected status code: 200 OK final int EXPECTED_STATUS = Response.Status.OK.getStatusCode(); // Submit the request to the service and store the response. String method = ServiceRequestType.READ.httpMethodName(); String url = getContactResourceURL(knownResourceId, knownItemResourceId, knownContactResourceId); int statusCode = submitRequest(method, url); // Check the status code of the response: does it match // the expected response(s)? if (logger.isDebugEnabled()) { logger.debug("testContactSubmitRequest: url=" + url + " status=" + statusCode); } Assert.assertEquals(statusCode, EXPECTED_STATUS); } // --------------------------------------------------------------- // Cleanup of resources created during testing // --------------------------------------------------------------- /** * Deletes all resources created by tests, after all tests have been run. * * This cleanup method will always be run, even if one or more tests fail. * For this reason, it attempts to remove all resources created * at any point during testing, even if some of those resources * may be expected to be deleted by certain tests. */ @AfterClass(alwaysRun = true) @Override public void cleanUp() { String noTest = System.getProperty("noTestCleanup"); if (Boolean.TRUE.toString().equalsIgnoreCase(noTest)) { if (logger.isDebugEnabled()) { logger.debug("Skipping Cleanup phase ..."); } return; } if (logger.isDebugEnabled()) { logger.debug("Cleaning up temporary resources created for testing ..."); } String parentResourceId; String itemResourceId; String contactResourceId; // Clean up contact resources. parentResourceId = knownResourceId; OrgAuthorityClient client = new OrgAuthorityClient(); for (Map.Entry<String, String> entry : allContactResourceIdsCreated.entrySet()) { contactResourceId = entry.getKey(); itemResourceId = entry.getValue(); // Note: Any non-success responses from the delete operation // below are ignored and not reported. client.deleteContact(parentResourceId, itemResourceId, contactResourceId).close(); } // Clean up item resources. for (Map.Entry<String, String> entry : allResourceItemIdsCreated.entrySet()) { itemResourceId = entry.getKey(); parentResourceId = entry.getValue(); // Note: Any non-success responses from the delete operation // below are ignored and not reported. client.deleteItem(parentResourceId, itemResourceId).close(); } // Clean up parent resources. super.cleanUp(); } // --------------------------------------------------------------- // Utility methods used by tests above // --------------------------------------------------------------- /* (non-Javadoc) * @see org.collectionspace.services.client.test.BaseServiceTest#getServicePathComponent() */ /** * Gets the item service path component. * * @return the item service path component */ public String getItemServicePathComponent() { return AuthorityClient.ITEMS; } /** * Gets the contact service path component. * * @return the contact service path component */ public String getContactServicePathComponent() { return ContactClient.SERVICE_PATH_COMPONENT; } /** * Returns the root URL for the item service. * * This URL consists of a base URL for all services, followed by * a path component for the owning parent, followed by the * path component for the items. * * @param parentResourceIdentifier An identifier (such as a UUID) for the * parent authority resource of the relevant item resource. * * @return The root URL for the item service. */ protected String getItemServiceRootURL(String parentResourceIdentifier) { return getResourceURL(parentResourceIdentifier) + "/" + getItemServicePathComponent(); } /** * Returns the URL of a specific item resource managed by a service, and * designated by an identifier (such as a universally unique ID, or UUID). * * @param parentResourceIdentifier An identifier (such as a UUID) for the * parent authority resource of the relevant item resource. * * @param itemResourceIdentifier An identifier (such as a UUID) for an * item resource. * * @return The URL of a specific item resource managed by a service. */ protected String getItemResourceURL(String parentResourceIdentifier, String itemResourceIdentifier) { return getItemServiceRootURL(parentResourceIdentifier) + "/" + itemResourceIdentifier; } /** * Returns the root URL for the contact service. * * This URL consists of a base URL for all services, followed by * a path component for the owning authority, followed by the * path component for the owning item, followed by the path component * for the contact service. * * @param parentResourceIdentifier An identifier (such as a UUID) for the * parent authority resource of the relevant item resource. * * @param itemResourceIdentifier An identifier (such as a UUID) for an * item resource. * * @return The root URL for the contact service. */ protected String getContactServiceRootURL(String parentResourceIdentifier, String itemResourceIdentifier) { return getItemResourceURL(parentResourceIdentifier, itemResourceIdentifier) + "/" + getContactServicePathComponent(); } /** * Returns the URL of a specific contact resource managed by a service, and * designated by an identifier (such as a universally unique ID, or UUID). * * @param parentResourceIdentifier An identifier (such as a UUID) for the * parent resource of the relevant item resource. * * @param resourceIdentifier An identifier (such as a UUID) for an * item resource. * * @return The URL of a specific resource managed by a service. */ protected String getContactResourceURL(String parentResourceIdentifier, String itemResourceIdentifier, String contactResourceIdentifier) { return getContactServiceRootURL(parentResourceIdentifier, itemResourceIdentifier) + "/" + contactResourceIdentifier; } @Override public void authorityTests(String testName) { // TODO Auto-generated method stub } @Override protected String createItemInAuthority(String authorityId) { return createItemInAuthority(authorityId, null /*refname*/); } @Override protected OrganizationsCommon updateItemInstance(OrganizationsCommon organizationsCommon) { OrgTermGroupList termList = organizationsCommon.getOrgTermGroupList(); Assert.assertNotNull(termList); List<OrgTermGroup> terms = termList.getOrgTermGroup(); Assert.assertNotNull(terms); Assert.assertTrue(terms.size() > 0); terms.get(0).setTermDisplayName("updated-" + terms.get(0).getTermDisplayName()); terms.get(0).setTermName("updated-" + terms.get(0).getTermName()); organizationsCommon.setOrgTermGroupList(termList); return organizationsCommon; } @Override protected void compareUpdatedItemInstances(OrganizationsCommon original, OrganizationsCommon updated) throws Exception { OrgTermGroupList originalTermList = original.getOrgTermGroupList(); Assert.assertNotNull(originalTermList); List<OrgTermGroup> originalTerms = originalTermList.getOrgTermGroup(); Assert.assertNotNull(originalTerms); Assert.assertTrue(originalTerms.size() > 0); OrgTermGroupList updatedTermList = updated.getOrgTermGroupList(); Assert.assertNotNull(updatedTermList); List<OrgTermGroup> updatedTerms = updatedTermList.getOrgTermGroup(); Assert.assertNotNull(updatedTerms); Assert.assertTrue(updatedTerms.size() > 0); Assert.assertEquals(updatedTerms.get(0).getTermDisplayName(), originalTerms.get(0).getTermDisplayName(), "Value in updated record did not match submitted data."); } @Override protected PoxPayloadOut createInstance(String commonPartName, String identifier) { String shortId = identifier; String displayName = "displayName-" + shortId; //String baseRefName = OrgAuthorityClientUtils.createOrgAuthRefName(shortId, null); PoxPayloadOut result = OrgAuthorityClientUtils.createOrgAuthorityInstance( displayName, shortId, commonPartName); return result; } @Override protected PoxPayloadOut createNonExistenceInstance(String commonPartName, String identifier) { String shortId = identifier; String displayName = "displayName-" + shortId; //String baseRefName = OrgAuthorityClientUtils.createOrgAuthRefName(shortId, null); PoxPayloadOut result = OrgAuthorityClientUtils.createOrgAuthorityInstance( displayName, shortId, commonPartName); return result; } protected PoxPayloadOut createNonExistenceItemInstance(String commonPartName, String identifier) { Map<String, String> nonexOrgMap = new HashMap<String, String>(); nonexOrgMap.put(OrganizationJAXBSchema.SHORT_IDENTIFIER, "nonExistent"); PoxPayloadOut result = OrgAuthorityClientUtils.createOrganizationInstance( knownResourceRefName, nonexOrgMap, OrgAuthorityClientUtils.getTermGroupInstance(TEST_ORG_NAME), commonPartName); return result; } @Override protected OrgauthoritiesCommon updateInstance(OrgauthoritiesCommon orgauthoritiesCommon) { OrgauthoritiesCommon result = new OrgauthoritiesCommon(); result.setDisplayName("updated-" + orgauthoritiesCommon.getDisplayName()); result.setVocabType("updated-" + orgauthoritiesCommon.getVocabType()); return result; } @Override protected void compareUpdatedInstances(OrgauthoritiesCommon original, OrgauthoritiesCommon updated) throws Exception { // Verify that the updated resource received the correct data. Assert.assertEquals(updated.getDisplayName(), original.getDisplayName(), "Display name in updated object did not match submitted data."); } }
package org.apache.lucene.analysis.ngram; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.StringReader; import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.core.LetterTokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil; /** * Tests {@link EdgeNGramTokenFilter} for correctness. */ public class EdgeNGramTokenFilterTest extends BaseTokenStreamTestCase { private TokenStream input; @Override public void setUp() throws Exception { super.setUp(); input = whitespaceMockTokenizer("abcde"); } public void testInvalidInput() throws Exception { boolean gotException = false; try { new EdgeNGramTokenFilter(input, 0, 0); } catch (IllegalArgumentException e) { gotException = true; } assertTrue(gotException); } public void testInvalidInput2() throws Exception { boolean gotException = false; try { new EdgeNGramTokenFilter(input, 2, 1); } catch (IllegalArgumentException e) { gotException = true; } assertTrue(gotException); } public void testInvalidInput3() throws Exception { boolean gotException = false; try { new EdgeNGramTokenFilter(input, -1, 2); } catch (IllegalArgumentException e) { gotException = true; } assertTrue(gotException); } public void testFrontUnigram() throws Exception { EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(input, 1, 1); assertTokenStreamContents(tokenizer, new String[]{"a"}, new int[]{0}, new int[]{5}); } public void testOversizedNgrams() throws Exception { EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(input, 6, 6); assertTokenStreamContents(tokenizer, new String[0], new int[0], new int[0]); } public void testFrontRangeOfNgrams() throws Exception { EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(input, 1, 3); assertTokenStreamContents(tokenizer, new String[]{"a","ab","abc"}, new int[]{0,0,0}, new int[]{5,5,5}); } public void testFilterPositions() throws Exception { TokenStream ts = whitespaceMockTokenizer("abcde vwxyz"); EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(ts, 1, 3); assertTokenStreamContents(tokenizer, new String[]{"a","ab","abc","v","vw","vwx"}, new int[]{0,0,0,6,6,6}, new int[]{5,5,5,11,11,11}, null, new int[]{1,0,0,1,0,0}, null, null, false); } private static class PositionFilter extends TokenFilter { private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class); private boolean started; PositionFilter(final TokenStream input) { super(input); } @Override public final boolean incrementToken() throws IOException { if (input.incrementToken()) { if (started) { posIncrAtt.setPositionIncrement(0); } else { started = true; } return true; } else { return false; } } @Override public void reset() throws IOException { super.reset(); started = false; } } public void testFirstTokenPositionIncrement() throws Exception { TokenStream ts = whitespaceMockTokenizer("a abc"); ts = new PositionFilter(ts); // All but first token will get 0 position increment EdgeNGramTokenFilter filter = new EdgeNGramTokenFilter(ts, 2, 3); // The first token "a" will not be output, since it's smaller than the mingram size of 2. // The second token on input to EdgeNGramTokenFilter will have position increment of 0, // which should be increased to 1, since this is the first output token in the stream. assertTokenStreamContents(filter, new String[] { "ab", "abc" }, new int[] { 2, 2 }, new int[] { 5, 5 }, new int[] { 1, 0 } ); } public void testSmallTokenInStream() throws Exception { input = whitespaceMockTokenizer("abc de fgh"); EdgeNGramTokenFilter tokenizer = new EdgeNGramTokenFilter(input, 3, 3); assertTokenStreamContents(tokenizer, new String[]{"abc","fgh"}, new int[]{0,7}, new int[]{3,10}); } public void testReset() throws Exception { WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(); tokenizer.setReader(new StringReader("abcde")); EdgeNGramTokenFilter filter = new EdgeNGramTokenFilter(tokenizer, 1, 3); assertTokenStreamContents(filter, new String[]{"a","ab","abc"}, new int[]{0,0,0}, new int[]{5,5,5}); tokenizer.setReader(new StringReader("abcde")); assertTokenStreamContents(filter, new String[]{"a","ab","abc"}, new int[]{0,0,0}, new int[]{5,5,5}); } /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { for (int i = 0; i < 10; i++) { final int min = TestUtil.nextInt(random(), 2, 10); final int max = TestUtil.nextInt(random(), min, 20); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new EdgeNGramTokenFilter(tokenizer, min, max)); } }; checkRandomData(random(), a, 100*RANDOM_MULTIPLIER); } } public void testEmptyTerm() throws Exception { Random random = random(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new KeywordTokenizer(); return new TokenStreamComponents(tokenizer, new EdgeNGramTokenFilter(tokenizer, 2, 15)); } }; checkAnalysisConsistency(random, a, random.nextBoolean(), ""); } public void testGraphs() throws IOException { TokenStream tk = new LetterTokenizer(); ((Tokenizer)tk).setReader(new StringReader("abc d efgh ij klmno p q")); tk = new ShingleFilter(tk); tk = new EdgeNGramTokenFilter(tk, 7, 10); assertTokenStreamContents(tk, new String[] { "efgh ij", "ij klmn", "ij klmno", "klmno p" }, new int[] { 6,11,11,14 }, new int[] { 13,19,19,21 }, new int[] { 3,1,0,1 }, new int[] { 2,2,2,2 }, 23 ); } public void testSupplementaryCharacters() throws IOException { final String s = TestUtil.randomUnicodeString(random(), 10); final int codePointCount = s.codePointCount(0, s.length()); final int minGram = TestUtil.nextInt(random(), 1, 3); final int maxGram = TestUtil.nextInt(random(), minGram, 10); TokenStream tk = new KeywordTokenizer(); ((Tokenizer)tk).setReader(new StringReader(s)); tk = new EdgeNGramTokenFilter(tk, minGram, maxGram); final CharTermAttribute termAtt = tk.addAttribute(CharTermAttribute.class); final OffsetAttribute offsetAtt = tk.addAttribute(OffsetAttribute.class); tk.reset(); for (int i = minGram; i <= Math.min(codePointCount, maxGram); ++i) { assertTrue(tk.incrementToken()); assertEquals(0, offsetAtt.startOffset()); assertEquals(s.length(), offsetAtt.endOffset()); final int end = Character.offsetByCodePoints(s, 0, i); assertEquals(s.substring(0, end), termAtt.toString()); } assertFalse(tk.incrementToken()); } public void test43Tokenizer() { new Lucene43EdgeNGramTokenizer(1, 1); } }
/** * Copyright (c) 2013-2021 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.redisson.transaction; import org.redisson.RedissonBuckets; import org.redisson.RedissonKeys; import org.redisson.RedissonMultiLock; import org.redisson.api.RFuture; import org.redisson.api.RKeys; import org.redisson.api.RLock; import org.redisson.client.codec.Codec; import org.redisson.command.CommandAsyncExecutor; import org.redisson.misc.RPromise; import org.redisson.misc.RedissonPromise; import org.redisson.transaction.operation.TransactionalOperation; import org.redisson.transaction.operation.bucket.BucketSetOperation; import org.redisson.transaction.operation.bucket.BucketsTrySetOperation; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * * @author Nikita Koksharov * */ public class RedissonTransactionalBuckets extends RedissonBuckets { static final Object NULL = new Object(); private long timeout; private final AtomicBoolean executed; private final List<TransactionalOperation> operations; private Map<String, Object> state = new HashMap<>(); private final String transactionId; public RedissonTransactionalBuckets(CommandAsyncExecutor commandExecutor, long timeout, List<TransactionalOperation> operations, AtomicBoolean executed, String transactionId) { super(commandExecutor); this.timeout = timeout; this.operations = operations; this.executed = executed; this.transactionId = transactionId; } public RedissonTransactionalBuckets(Codec codec, CommandAsyncExecutor commandExecutor, long timeout, List<TransactionalOperation> operations, AtomicBoolean executed, String transactionId) { super(codec, commandExecutor); this.timeout = timeout; this.operations = operations; this.executed = executed; this.transactionId = transactionId; } @Override public <V> RFuture<Map<String, V>> getAsync(String... keys) { checkState(); if (keys.length == 0) { return RedissonPromise.newSucceededFuture(Collections.emptyMap()); } Set<String> keysToLoad = new HashSet<>(); Map<String, V> map = new LinkedHashMap<>(); for (String key : keys) { Object value = state.get(key); if (value != null) { if (value != NULL) { map.put(key, (V) value); } } else { keysToLoad.add(key); } } if (keysToLoad.isEmpty()) { return RedissonPromise.newSucceededFuture(map); } RPromise<Map<String, V>> result = new RedissonPromise<>(); super.getAsync(keysToLoad.toArray(new String[keysToLoad.size()])).onComplete((res, e) -> { if (e != null) { result.tryFailure(e); return; } map.putAll((Map<String, V>) res); result.trySuccess(map); }); return result; } @Override public RFuture<Void> setAsync(Map<String, ?> buckets) { checkState(); RPromise<Void> result = new RedissonPromise<>(); long currentThreadId = Thread.currentThread().getId(); executeLocked(result, () -> { for (Entry<String, ?> entry : buckets.entrySet()) { operations.add(new BucketSetOperation<>(entry.getKey(), getLockName(entry.getKey()), codec, entry.getValue(), transactionId, currentThreadId)); if (entry.getValue() == null) { state.put(entry.getKey(), NULL); } else { state.put(entry.getKey(), entry.getValue()); } } result.trySuccess(null); }, buckets.keySet()); return result; } // Add RKeys.deleteAsync support // // public RFuture<Long> deleteAsync(String... keys) { // checkState(); // RPromise<Long> result = new RedissonPromise<>(); // long threadId = Thread.currentThread().getId(); // executeLocked(result, new Runnable() { // @Override // public void run() { // AtomicLong counter = new AtomicLong(); // AtomicLong executions = new AtomicLong(keys.length); // for (String key : keys) { // Object st = state.get(key); // if (st != null) { // operations.add(new DeleteOperation(key, getLockName(key), transactionId, threadId)); // if (st != NULL) { // state.put(key, NULL); // counter.incrementAndGet(); // } // if (executions.decrementAndGet() == 0) { // result.trySuccess(counter.get()); // } // continue; // } // // RedissonKeys ks = new RedissonKeys(commandExecutor); // ks.countExistsAsync(key).onComplete((res, e) -> { // if (e != null) { // result.tryFailure(e); // return; // } // // if (res > 0) { // operations.add(new DeleteOperation(key, getLockName(key), transactionId, threadId)); // state.put(key, NULL); // counter.incrementAndGet(); // } // // if (executions.decrementAndGet() == 0) { // result.trySuccess(counter.get()); // } // }); // } // } // }, Arrays.asList(keys)); // return result; // } @Override public RFuture<Boolean> trySetAsync(Map<String, ?> buckets) { checkState(); RPromise<Boolean> result = new RedissonPromise<>(); executeLocked(result, () -> { Set<String> keysToSet = new HashSet<>(); for (String key : buckets.keySet()) { Object value = state.get(key); if (value != null) { if (value != NULL) { operations.add(new BucketsTrySetOperation(codec, (Map<String, Object>) buckets, transactionId)); result.trySuccess(false); return; } } else { keysToSet.add(key); } } if (keysToSet.isEmpty()) { operations.add(new BucketsTrySetOperation(codec, (Map<String, Object>) buckets, transactionId)); state.putAll(buckets); result.trySuccess(true); return; } RKeys keys = new RedissonKeys(commandExecutor); String[] ks = keysToSet.toArray(new String[keysToSet.size()]); keys.countExistsAsync(ks).onComplete((res, e) -> { if (e != null) { result.tryFailure(e); return; } operations.add(new BucketsTrySetOperation(codec, (Map<String, Object>) buckets, transactionId)); if (res == 0) { state.putAll(buckets); result.trySuccess(true); } else { result.trySuccess(false); } }); }, buckets.keySet()); return result; } protected <R> void executeLocked(RPromise<R> promise, Runnable runnable, Collection<String> keys) { List<RLock> locks = new ArrayList<>(keys.size()); for (String key : keys) { RLock lock = getLock(key); locks.add(lock); } RedissonMultiLock multiLock = new RedissonMultiLock(locks.toArray(new RLock[locks.size()])); long threadId = Thread.currentThread().getId(); multiLock.lockAsync(timeout, TimeUnit.MILLISECONDS).onComplete((res, e) -> { if (e == null) { runnable.run(); } else { multiLock.unlockAsync(threadId); promise.tryFailure(e); } }); } private RLock getLock(String name) { return new RedissonTransactionalLock(commandExecutor, getLockName(name), transactionId); } private String getLockName(String name) { return name + ":transaction_lock"; } protected void checkState() { if (executed.get()) { throw new IllegalStateException("Unable to execute operation. Transaction is in finished state!"); } } }
package com.intellij.openapi.externalSystem.service.notification; import com.intellij.execution.rmi.RemoteUtil; import com.intellij.ide.errorTreeView.*; import com.intellij.notification.Notification; import com.intellij.notification.NotificationGroup; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.externalSystem.ExternalSystemConfigurableAware; import com.intellij.openapi.externalSystem.ExternalSystemManager; import com.intellij.openapi.externalSystem.model.ExternalSystemDataKeys; import com.intellij.openapi.externalSystem.model.LocationAwareExternalSystemException; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemBundle; import com.intellij.openapi.externalSystem.util.ExternalSystemUtil; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.pom.Navigatable; import com.intellij.ui.EditorNotifications; import com.intellij.ui.content.Content; import com.intellij.ui.content.ContentFactory; import com.intellij.ui.content.MessageView; import com.intellij.util.ObjectUtils; import com.intellij.util.concurrency.SequentialTaskExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.ide.PooledThreadExecutor; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This class is responsible for ide user by external system integration-specific events. * <p/> * One example use-case is a situation when an error occurs during external project refresh. We need to * show corresponding message to the end-user. * <p/> * Thread-safe. * * @author Denis Zhdanov, Vladislav Soroka * @since 3/21/12 4:04 PM */ public class ExternalSystemNotificationManager { @NotNull private static final Key<Pair<NotificationSource, ProjectSystemId>> CONTENT_ID_KEY = Key.create("CONTENT_ID"); @NotNull private final SequentialTaskExecutor myUpdater = new SequentialTaskExecutor(PooledThreadExecutor.INSTANCE); @NotNull private final Project myProject; @NotNull private final List<Notification> myNotifications; @NotNull private final Set<ProjectSystemId> initializedExternalSystem; @NotNull private final MessageCounter myMessageCounter; public ExternalSystemNotificationManager(@NotNull final Project project) { myProject = project; myNotifications = ContainerUtil.newArrayList(); initializedExternalSystem = ContainerUtil.newHashSet(); myMessageCounter = new MessageCounter(); } @NotNull public static ExternalSystemNotificationManager getInstance(@NotNull Project project) { return ServiceManager.getService(project, ExternalSystemNotificationManager.class); } public void processExternalProjectRefreshError(@NotNull Throwable error, @NotNull String externalProjectName, @NotNull ProjectSystemId externalSystemId) { if (myProject.isDisposed() || !myProject.isOpen()) { return; } ExternalSystemManager<?, ?, ?, ?, ?> manager = ExternalSystemApiUtil.getManager(externalSystemId); if (!(manager instanceof ExternalSystemConfigurableAware)) { return; } String title = ExternalSystemBundle.message("notification.project.refresh.fail.title", externalSystemId.getReadableName(), externalProjectName); String message = ExternalSystemApiUtil.buildErrorMessage(error); NotificationCategory notificationCategory = NotificationCategory.ERROR; String filePath = null; Integer line = null; Integer column = null; //noinspection ThrowableResultOfMethodCallIgnored Throwable unwrapped = RemoteUtil.unwrap(error); if (unwrapped instanceof LocationAwareExternalSystemException) { LocationAwareExternalSystemException locationAwareExternalSystemException = (LocationAwareExternalSystemException)unwrapped; filePath = locationAwareExternalSystemException.getFilePath(); line = locationAwareExternalSystemException.getLine(); column = locationAwareExternalSystemException.getColumn(); } NotificationData notificationData = new NotificationData( title, message, notificationCategory, NotificationSource.PROJECT_SYNC, filePath, ObjectUtils.notNull(line, -1), ObjectUtils.notNull(column, -1), false); for (ExternalSystemNotificationExtension extension : ExternalSystemNotificationExtension.EP_NAME.getExtensions()) { if (!externalSystemId.equals(extension.getTargetExternalSystemId())) { continue; } extension.customize(notificationData, myProject, error); } EditorNotifications.getInstance(myProject).updateAllNotifications(); showNotification(externalSystemId, notificationData); } public void showNotification(@NotNull final ProjectSystemId externalSystemId, @NotNull final NotificationData notificationData) { myUpdater.execute(new Runnable() { @Override public void run() { if (myProject.isDisposed()) return; if (!initializedExternalSystem.contains(externalSystemId)) { final Application app = ApplicationManager.getApplication(); Runnable action = new Runnable() { @Override public void run() { app.runWriteAction(new Runnable() { @Override public void run() { if (myProject.isDisposed()) return; ExternalSystemUtil.ensureToolWindowContentInitialized(myProject, externalSystemId); initializedExternalSystem.add(externalSystemId); } }); } }; if (app.isDispatchThread()) { action.run(); } else { app.invokeAndWait(action, ModalityState.defaultModalityState()); } } final NotificationGroup group = ExternalSystemUtil.getToolWindowElement( NotificationGroup.class, myProject, ExternalSystemDataKeys.NOTIFICATION_GROUP, externalSystemId); if (group == null) return; final Notification notification = group.createNotification( notificationData.getTitle(), notificationData.getMessage(), notificationData.getNotificationCategory().getNotificationType(), notificationData.getListener()); myNotifications.add(notification); if (notificationData.isBalloonNotification()) { applyNotification(notification); } else { addMessage(notification, externalSystemId, notificationData); } } }); } public void openMessageView(@NotNull final ProjectSystemId externalSystemId, @NotNull final NotificationSource notificationSource) { UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { prepareMessagesView(externalSystemId, notificationSource, true); } }); } public void clearNotifications(@NotNull final NotificationSource notificationSource, @NotNull final ProjectSystemId externalSystemId) { clearNotifications(null, notificationSource, externalSystemId); } public void clearNotifications(@Nullable final String groupName, @NotNull final NotificationSource notificationSource, @NotNull final ProjectSystemId externalSystemId) { myMessageCounter.remove(groupName, notificationSource, externalSystemId); myUpdater.execute(new Runnable() { @Override public void run() { for (Iterator<Notification> iterator = myNotifications.iterator(); iterator.hasNext(); ) { Notification notification = iterator.next(); if (groupName == null || groupName.equals(notification.getGroupId())) { notification.expire(); iterator.remove(); } } final ToolWindow toolWindow = ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.MESSAGES_WINDOW); if (toolWindow == null) return; final Pair<NotificationSource, ProjectSystemId> contentIdPair = Pair.create(notificationSource, externalSystemId); final MessageView messageView = ServiceManager.getService(myProject, MessageView.class); UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { for (Content content : messageView.getContentManager().getContents()) { if (!content.isPinned() && contentIdPair.equals(content.getUserData(CONTENT_ID_KEY))) { if (groupName == null) { messageView.getContentManager().removeContent(content, true); } else { assert content.getComponent() instanceof NewEditableErrorTreeViewPanel; NewEditableErrorTreeViewPanel errorTreeView = (NewEditableErrorTreeViewPanel)content.getComponent(); ErrorViewStructure errorViewStructure = errorTreeView.getErrorViewStructure(); errorViewStructure.removeGroup(groupName); } } } } }); } }); } public int getMessageCount(@NotNull final NotificationSource notificationSource, @Nullable final NotificationCategory notificationCategory, @NotNull final ProjectSystemId externalSystemId) { return getMessageCount(null, notificationSource, notificationCategory, externalSystemId); } public int getMessageCount(@Nullable final String groupName, @NotNull final NotificationSource notificationSource, @Nullable final NotificationCategory notificationCategory, @NotNull final ProjectSystemId externalSystemId) { return myMessageCounter.getCount(groupName, notificationSource, notificationCategory, externalSystemId); } private void addMessage(@NotNull final Notification notification, @NotNull final ProjectSystemId externalSystemId, @NotNull final NotificationData notificationData) { final VirtualFile virtualFile = notificationData.getFilePath() != null ? ExternalSystemUtil.waitForTheFile(notificationData.getFilePath()) : null; final String groupName = virtualFile != null ? virtualFile.getPresentableUrl() : notificationData.getTitle(); myMessageCounter .increment(groupName, notificationData.getNotificationSource(), notificationData.getNotificationCategory(), externalSystemId); int line = notificationData.getLine() - 1; int column = notificationData.getColumn() - 1; if (virtualFile == null) line = column = -1; final int guiLine = line < 0 ? -1 : line + 1; final int guiColumn = column < 0 ? 0 : column + 1; final Navigatable navigatable = notificationData.getNavigatable() != null ? notificationData.getNavigatable() : virtualFile != null ? new OpenFileDescriptor(myProject, virtualFile, line, column) : null; final ErrorTreeElementKind kind = ErrorTreeElementKind.convertMessageFromCompilerErrorType(notificationData.getNotificationCategory().getMessageCategory()); final String[] message = notificationData.getMessage().split("\n"); final String exportPrefix = NewErrorTreeViewPanel.createExportPrefix(guiLine); final String rendererPrefix = NewErrorTreeViewPanel.createRendererPrefix(guiLine, guiColumn); UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { boolean activate = notificationData.getNotificationCategory() == NotificationCategory.ERROR || notificationData.getNotificationCategory() == NotificationCategory.WARNING; final NewErrorTreeViewPanel errorTreeView = prepareMessagesView(externalSystemId, notificationData.getNotificationSource(), activate); final GroupingElement groupingElement = errorTreeView.getErrorViewStructure().getGroupingElement(groupName, null, virtualFile); final NavigatableMessageElement navigatableMessageElement; if (notificationData.hasLinks()) { navigatableMessageElement = new EditableNotificationMessageElement( notification, kind, groupingElement, message, navigatable, exportPrefix, rendererPrefix); } else { navigatableMessageElement = new NotificationMessageElement( kind, groupingElement, message, navigatable, exportPrefix, rendererPrefix); } errorTreeView.getErrorViewStructure().addNavigatableMessage(groupName, navigatableMessageElement); errorTreeView.updateTree(); } }); } private void applyNotification(@NotNull final Notification notification) { if (!myProject.isDisposed() && myProject.isOpen()) { notification.notify(myProject); } } @NotNull public NewErrorTreeViewPanel prepareMessagesView(@NotNull final ProjectSystemId externalSystemId, @NotNull final NotificationSource notificationSource, boolean activateView) { ApplicationManager.getApplication().assertIsDispatchThread(); final NewErrorTreeViewPanel errorTreeView; final String contentDisplayName = getContentDisplayName(notificationSource, externalSystemId); final Pair<NotificationSource, ProjectSystemId> contentIdPair = Pair.create(notificationSource, externalSystemId); Content targetContent = findContent(contentIdPair, contentDisplayName); final MessageView messageView = ServiceManager.getService(myProject, MessageView.class); if (targetContent == null || !contentIdPair.equals(targetContent.getUserData(CONTENT_ID_KEY))) { errorTreeView = new NewEditableErrorTreeViewPanel(myProject, null, true, true, null); targetContent = ContentFactory.SERVICE.getInstance().createContent(errorTreeView, contentDisplayName, true); targetContent.putUserData(CONTENT_ID_KEY, contentIdPair); messageView.getContentManager().addContent(targetContent); Disposer.register(targetContent, errorTreeView); } else { assert targetContent.getComponent() instanceof NewEditableErrorTreeViewPanel; errorTreeView = (NewEditableErrorTreeViewPanel)targetContent.getComponent(); } messageView.getContentManager().setSelectedContent(targetContent); final ToolWindow tw = ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.MESSAGES_WINDOW); if (activateView && tw != null && !tw.isActive()) { tw.activate(null, false); } return errorTreeView; } @Nullable private Content findContent(@NotNull Pair<NotificationSource, ProjectSystemId> contentIdPair, @NotNull String contentDisplayName) { Content targetContent = null; final MessageView messageView = ServiceManager.getService(myProject, MessageView.class); for (Content content : messageView.getContentManager().getContents()) { if (contentIdPair.equals(content.getUserData(CONTENT_ID_KEY)) && StringUtil.equals(content.getDisplayName(), contentDisplayName) && !content.isPinned()) { targetContent = content; } } return targetContent; } @NotNull public static String getContentDisplayName(@NotNull final NotificationSource notificationSource, @NotNull final ProjectSystemId externalSystemId) { final String contentDisplayName; switch (notificationSource) { case PROJECT_SYNC: contentDisplayName = ExternalSystemBundle.message("notification.messages.project.sync.tab.name", externalSystemId.getReadableName()); break; default: throw new AssertionError("unsupported notification source found: " + notificationSource); } return contentDisplayName; } }
/* * Copyright (C) 2010 Andrew P McSherry * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.andymcsherry.library; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.util.ArrayList; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import edu.hws.jcm.data.Expression; import edu.hws.jcm.data.Parser; import edu.hws.jcm.data.Variable; import android.app.Activity; import android.content.SharedPreferences; import android.opengl.GLSurfaceView; public class Graph3DRenderer implements GLSurfaceView.Renderer { private Parser p = new Parser(Parser.STANDARD_FUNCTIONS | Parser.OPTIONAL_PARENS | Parser.OPTIONAL_STARS | Parser.OPTIONAL_SPACES | Parser.BRACES | Parser.BRACKETS| Parser.BOOLEANS); private Variable x = new Variable("x"), y = new Variable("y"); private Expression f; public float scale, newScale; private ArrayList<String> functions; private Activity context; private SharedPreferences sp; float[][][][] vertexArray; FloatBuffer[][][] vertexBuffer; FloatBuffer axisBuffer; long milliseconds; boolean dirty, userRotate = false; private float alpha = 0, beta = 0, gamma = 0; public Graph3DRenderer(Activity c) { context = c; scale = 3; } public void onDrawFrame(GL10 gl) { gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT); gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glLoadIdentity(); gl.glTranslatef(0, 0, -3.0f); gl.glRotatef(gamma, 0, 1f, 0); //gl.glRotatef(alpha, 1f, 1f, 0); gl.glRotatef(beta,0,0,1f); gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); if(dirty){ setUpArray(); dirty = false; } for(int k = 0; k < functions.size(); k++){ int colors[] = GraphView.colors[k]; gl.glColor4f((float)colors[0]/255, (float)colors[1]/255, (float)colors[2], 1); for(int i = 0; i <= 30; i++){ vertexBuffer[k][i][1].position(0); gl.glVertexPointer(3,GL10.GL_FLOAT,0,vertexBuffer[k][i][0]); gl.glDrawArrays(GL10.GL_LINE_STRIP,0,31); vertexBuffer[k][i][1].position(0); gl.glVertexPointer(3,GL10.GL_FLOAT,0,vertexBuffer[k][i][1]); gl.glDrawArrays(GL10.GL_LINE_STRIP,0,31); } } /*gl.glColor4f(1,1,1,1); axisBuffer.position(0); gl.glVertexPointer(3,GL10.GL_FLOAT,0,axisBuffer); gl.glDrawArrays(GL10.GL_LINE_STRIP,0,31); gl.glRotatef(gamma*2.0f, 0, 0, 1); gl.glTranslatef(0.5f, 0.5f, 0.5f);*/ long newSeconds = System.currentTimeMillis(); if(!userRotate){ //gamma += .04f*(newSeconds-milliseconds); } milliseconds = newSeconds; } public void move(float xMove, float yMove){ float tempAlpha = alpha, tempBeta = beta; tempAlpha += yMove;// * (float)(Math.cos(gamma/180*Math.PI)); tempBeta += yMove;// * (float)(Math.sin(gamma/180*Math.PI)); gamma += xMove + yMove * (float)(Math.abs(Math.cos(gamma/180*Math.PI)/2)); beta = tempBeta; alpha = tempAlpha; } public void setUpArray(){ vertexBuffer = new FloatBuffer[functions.size()][31][2]; for(int k = 0; k < functions.size(); k++){ f = p.parse(functions.get(k)); for(int i = 0; i <= 30; i++){ float[] temp = getDataPoints(true,i); ByteBuffer bb = ByteBuffer.allocateDirect(temp.length*4); bb.order(ByteOrder.nativeOrder()); FloatBuffer fbVertices = bb.asFloatBuffer(); for(int j = 0; j < temp.length; j++){ fbVertices.put(j,temp[j]); } vertexBuffer[k][i][0] = fbVertices; temp = getDataPoints(false,i); bb = ByteBuffer.allocateDirect(temp.length*4); bb.order(ByteOrder.nativeOrder()); fbVertices = bb.asFloatBuffer(); for(int j = 0; j < temp.length; j++){ fbVertices.put(j,temp[j]); } vertexBuffer[k][i][1] = fbVertices; } } ByteBuffer bb = ByteBuffer.allocateDirect(72); bb.order(ByteOrder.nativeOrder()); axisBuffer = bb.asFloatBuffer(); float[] temp = new float[]{0,0,0, 0,0,2, 0,0,0, 0,2,0, 0,0,0, 2,0,0}; axisBuffer.put(temp); } public void setScale(float s){ newScale = s; } public void stopRotate(){ userRotate = true; } public void spin(float a){ gamma += a; } public float[] getDataPoints(boolean isX, int num){ float[] vertices = new float[93]; if(isX){ float xVal = (float) (-1.25 + num * .08333333333); x.setVal(xVal*scale); for(int j = 0; j <= 30; j++){ vertices[3*j] = xVal; vertices[3*j+2] = (float)(-1.25 + j*.083333333); y.setVal(vertices[3*j+2]*scale); float z = (float)f.getVal()/scale; vertices[3*j+1] = z; } }else{ float yVal = (float) (-1.25 + num * .0833333333); y.setVal(yVal*scale); for(int j = 0; j <= 30; j++){ vertices[3*j+2] = yVal; vertices[3*j] = (float) (-1.25 + j*.083333333); x.setVal(vertices[3*j]*scale); float z = (float)f.getVal()/scale; vertices[3*j+1] = z; } } return vertices; } public void onSurfaceChanged(GL10 gl, int width, int height) { gl.glViewport(0, 0, width, height); float ratio = (float) width / height; gl.glMatrixMode(GL10.GL_PROJECTION); gl.glLoadIdentity(); gl.glFrustumf(-ratio, ratio, -1, 1, 1, 10); } public void onSurfaceCreated(GL10 gl, EGLConfig config) { p = new Parser(Parser.STANDARD_FUNCTIONS | Parser.OPTIONAL_PARENS | Parser.OPTIONAL_STARS | Parser.OPTIONAL_SPACES | Parser.BRACES | Parser.BRACKETS); x = new Variable("x"); y = new Variable("y"); p.add(x); p.add(y); AndyMath.setUpParser(p); milliseconds = System.currentTimeMillis(); sp = context.getSharedPreferences("functions",0); functions = new ArrayList<String>(); for(int i = 0; i < 6; i ++){ String s = sp.getString("3d" + (i+1), "thisshouldntparse"); if(AndyMath.isValid(s,new String[]{"x","y"})){ functions.add(s); } } setUpArray(); gl.glDisable(GL10.GL_DITHER); gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST); gl.glClearColor(0,0,0,1); gl.glEnable(GL10.GL_CULL_FACE); gl.glShadeModel(GL10.GL_SMOOTH); gl.glEnable(GL10.GL_DEPTH_TEST); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.local; import org.apache.ignite.internal.processors.cache.CacheLockCandidates; import org.apache.ignite.internal.processors.cache.CacheObject; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.GridCacheMapEntry; import org.apache.ignite.internal.processors.cache.GridCacheMvcc; import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.typedef.internal.S; import org.jetbrains.annotations.Nullable; /** * Cache entry for local caches. */ @SuppressWarnings({"TooBroadScope"}) public class GridLocalCacheEntry extends GridCacheMapEntry { /** * @param ctx Cache registry. * @param key Cache key. */ GridLocalCacheEntry( GridCacheContext ctx, KeyCacheObject key ) { super(ctx, key); } /** {@inheritDoc} */ @Override public boolean isLocal() { return true; } /** * Add local candidate. * * @param threadId Owning thread ID. * @param ver Lock version. * @param serOrder Version for serializable transactions ordering. * @param serReadVer Optional read entry version for optimistic serializable transaction. * @param timeout Timeout to acquire lock. * @param reenter Reentry flag. * @param tx Transaction flag. * @param implicitSingle Implicit transaction flag. * @param read Read lock flag. * @return New candidate. * @throws GridCacheEntryRemovedException If entry has been removed. */ @Nullable GridCacheMvccCandidate addLocal( long threadId, GridCacheVersion ver, @Nullable GridCacheVersion serOrder, @Nullable GridCacheVersion serReadVer, long timeout, boolean reenter, boolean tx, boolean implicitSingle, boolean read ) throws GridCacheEntryRemovedException { assert serReadVer == null || serOrder != null; CacheObject val; GridCacheMvccCandidate cand; CacheLockCandidates prev; CacheLockCandidates owner = null; lockEntry(); try { checkObsolete(); if (serReadVer != null) { if (!checkSerializableReadVersion(serReadVer)) return null; } GridCacheMvcc mvcc = mvccExtras(); if (mvcc == null) { mvcc = new GridCacheMvcc(cctx); mvccExtras(mvcc); } prev = mvcc.localOwners(); cand = mvcc.addLocal( this, /*nearNodeId*/null, /*nearVer*/null, threadId, ver, timeout, serOrder, reenter, tx, implicitSingle, /*dht-local*/false, read ); if (mvcc.isEmpty()) mvccExtras(null); else owner = mvcc.localOwners(); val = this.val; } finally { unlockEntry(); } if (cand != null && !cand.reentry()) cctx.mvcc().addNext(cctx, cand); checkOwnerChanged(prev, owner, val); return cand; } /** * @param cand Candidate. */ void readyLocal(GridCacheMvccCandidate cand) { CacheObject val; CacheLockCandidates prev = null; CacheLockCandidates owner = null; lockEntry(); try { GridCacheMvcc mvcc = mvccExtras(); if (mvcc != null) { prev = mvcc.localOwners(); owner = mvcc.readyLocal(cand); if (mvcc.isEmpty()) mvccExtras(null); } val = this.val; } finally { unlockEntry(); } checkOwnerChanged(prev, owner, val); } /** {@inheritDoc} */ @Override public boolean tmLock(IgniteInternalTx tx, long timeout, @Nullable GridCacheVersion serOrder, GridCacheVersion serReadVer, boolean read) throws GridCacheEntryRemovedException { GridCacheMvccCandidate cand = addLocal( tx.threadId(), tx.xidVersion(), serOrder, serReadVer, timeout, /*reenter*/false, /*tx*/true, tx.implicitSingle(), read ); if (cand != null) { readyLocal(cand); return true; } return false; } /** * Rechecks if lock should be reassigned. */ public void recheck() { CacheObject val; CacheLockCandidates prev = null; CacheLockCandidates owner = null; lockEntry(); try { GridCacheMvcc mvcc = mvccExtras(); if (mvcc != null) { prev = mvcc.allOwners(); owner = mvcc.recheck(); if (mvcc.isEmpty()) mvccExtras(null); } val = this.val; } finally { unlockEntry(); } checkOwnerChanged(prev, owner, val); } /** {@inheritDoc} */ @Override protected void checkThreadChain(GridCacheMvccCandidate owner) { assert !lockedByCurrentThread(); assert owner != null; assert owner.owner() || owner.used() : "Neither owner or used flags are set on ready local candidate: " + owner; if (owner.next() != null) { for (GridCacheMvccCandidate cand = owner.next(); cand != null; cand = cand.next()) { assert cand.local(); // Allow next lock in the thread to proceed. if (!cand.used()) { GridCacheContext cctx0 = cand.parent().context(); GridLocalCacheEntry e = (GridLocalCacheEntry)cctx0.cache().peekEx(cand.parent().key()); // At this point candidate may have been removed and entry destroyed, // so we check for null. if (e != null) e.recheck(); break; } } } } /** * Releases local lock. */ void releaseLocal() { releaseLocal(Thread.currentThread().getId()); } /** * Releases local lock. * * @param threadId Thread ID. */ private void releaseLocal(long threadId) { CacheObject val; CacheLockCandidates prev = null; CacheLockCandidates owner = null; lockEntry(); try { GridCacheMvcc mvcc = mvccExtras(); if (mvcc != null) { prev = mvcc.localOwners(); mvcc.releaseLocal(threadId); if (mvcc.isEmpty()) mvccExtras(null); else owner = mvcc.allOwners(); } val = this.val; } finally { unlockEntry(); } if (prev != null) { for (int i = 0; i < prev.size(); i++) { GridCacheMvccCandidate cand = prev.candidate(i); boolean unlocked = owner == null || !owner.hasCandidate(cand.version()); if (unlocked) checkThreadChain(cand); } } checkOwnerChanged(prev, owner, val); } /** {@inheritDoc} */ @Override public boolean removeLock(GridCacheVersion ver) throws GridCacheEntryRemovedException { CacheObject val; CacheLockCandidates prev = null; CacheLockCandidates owner = null; GridCacheMvccCandidate doomed; GridCacheVersion deferredDelVer; lockEntry(); try { GridCacheVersion obsoleteVer = obsoleteVersionExtras(); if (obsoleteVer != null && !obsoleteVer.equals(ver)) checkObsolete(); GridCacheMvcc mvcc = mvccExtras(); doomed = mvcc == null ? null : mvcc.candidate(ver); if (doomed != null) { prev = mvcc.allOwners(); mvcc.remove(ver); if (mvcc.isEmpty()) mvccExtras(null); else owner = mvcc.allOwners(); } val = this.val; deferredDelVer = this.ver; } finally { unlockEntry(); } if (val == null) { boolean deferred = cctx.deferredDelete() && !detached() && !isInternal(); if (deferred) { if (deferredDelVer != null) cctx.onDeferredDelete(this, deferredDelVer); } } if (doomed != null) checkThreadChain(doomed); checkOwnerChanged(prev, owner, val); return doomed != null; } /** {@inheritDoc} */ @Override public String toString() { lockEntry(); try { return S.toString(GridLocalCacheEntry.class, this, super.toString()); } finally { unlockEntry(); } } }
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.transport.mailets; import java.util.Collection; import java.util.HashSet; import javax.mail.MessagingException; import javax.mail.internet.InternetAddress; import org.apache.mailet.Mail; import org.apache.mailet.MailAddress; /** * <p> * A mailet providing configurable redirection services. * </p> * <p> * Can produce listserver, forward and notify behaviour, with the original * message intact, attached, appended or left out altogether. * </p> * <p> * It differs from {@link Resend} because (i) some defaults are different, * notably for the following parameters: <i>&lt;recipients&gt;</i>, * <i>&lt;to&gt;</i>, <i>&lt;reversePath&gt;</i> and <i>&lt;inline&gt;</i>; (ii) * because it allows the use of the <i>&lt;static&gt;</i> parameter;.<br> * Use <code>Resend</code> if you need full control, <code>Redirect</code> if * the more automatic behaviour of some parameters is appropriate. * </p> * <p> * This built in functionality is controlled by the configuration as laid out * below. In the table please note that the parameters controlling message * headers accept the <b>&quot;unaltered&quot;</b> value, whose meaning is to * keep the associated header unchanged and, unless stated differently, * corresponds to the assumed default if the parameter is missing. * </p> * <p> * The configuration parameters are: * </p> * <table width="75%" border="1" cellspacing="2" cellpadding="2"> * <tr valign=top> * <td width="20%">&lt;recipients&gt;</td> * <td width="80%"> * A comma delimited list of addresses for recipients of this message; it will * use the &quot;to&quot; list if not specified, and &quot;unaltered&quot; if * none of the lists is specified.<br> * These addresses will only appear in the To: header if no &quot;to&quot; list * is supplied.<br> * Such addresses can contain &quot;full names&quot;, like <i>Mr. John D. Smith * &lt;john.smith@xyz.com&gt;</i>.<br> * The list can include constants &quot;sender&quot;, &quot;from&quot;, * &quot;replyTo&quot;, &quot;postmaster&quot;, &quot;reversePath&quot;, * &quot;recipients&quot;, &quot;to&quot;, &quot;null&quot; and * &quot;unaltered&quot;; &quot;replyTo&quot; uses the ReplyTo header if * available, otherwise the From header if available, otherwise the Sender * header if available, otherwise the return-path; &quot;from&quot; is made * equivalent to &quot;sender&quot;, and &quot;to&quot; is made equivalent to * &quot;recipients&quot;; &quot;null&quot; is ignored.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;to&gt;</td> * <td width="80%"> * A comma delimited list of addresses to appear in the To: header; the email * will be delivered to any of these addresses if it is also in the recipients * list.<br> * The recipients list will be used if this list is not supplied; if none of the * lists is specified it will be &quot;unaltered&quot;.<br> * Such addresses can contain &quot;full names&quot;, like <i>Mr. John D. Smith * &lt;john.smith@xyz.com&gt;</i>.<br> * The list can include constants &quot;sender&quot;, &quot;from&quot;, * &quot;replyTo&quot;, &quot;postmaster&quot;, &quot;reversePath&quot;, * &quot;recipients&quot;, &quot;to&quot;, &quot;null&quot; and * &quot;unaltered&quot;; &quot;from&quot; uses the From header if available, * otherwise the Sender header if available, otherwise the return-path; * &quot;replyTo&quot; uses the ReplyTo header if available, otherwise the From * header if available, otherwise the Sender header if available, otherwise the * return-path; &quot;recipients&quot; is made equivalent to &quot;to&quot;; if * &quot;null&quot; is specified alone it will remove this header.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;sender&gt;</td> * <td width="80%"> * A single email address to appear in the From: and Return-Path: headers and * become the sender.<br> * It can include constants &quot;sender&quot;, &quot;postmaster&quot; and * &quot;unaltered&quot;; &quot;sender&quot; is equivalent to * &quot;unaltered&quot;.<br> * Default: &quot;unaltered&quot;.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;message&gt;</td> * <td width="80%"> * A text message to insert into the body of the email.<br> * Default: no message is inserted.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;inline&gt;</td> * <td width="80%"> * <p> * One of the following items: * </p> * <ul> * <li>unaltered &nbsp;&nbsp;&nbsp;&nbsp;The original message is the new * message, for forwarding/aliasing</li> * <li>heads&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The * headers of the original message are appended to the message</li> * <li>body&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The * body of the original is appended to the new message</li> * <li> * all&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp * ;&nbsp;&nbsp;&nbsp;Both headers and body are appended</li> * <li>none&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; * Neither body nor headers are appended</li> * </ul> * Default: &quot;body&quot;.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;attachment&gt;</td> * <td width="80%"> * <p> * One of the following items: * </p> * <ul> * <li>heads&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The headers of the original are * attached as text</li> * <li>body&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The body of the original is * attached as text</li> * <li>all&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Both * headers and body are attached as a single text file</li> * <li>none&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Nothing is attached</li> * <li>message &nbsp;The original message is attached as type message/rfc822, * this means that it can, in many cases, be opened, resent, fw'd, replied to * etc by email client software.</li> * </ul> * Default: &quot;none&quot;.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;passThrough&gt;</td> * <td width="80%"> * true or false, if true the original message continues in the mailet processor * after this mailet is finished. False causes the original to be stopped.<br> * Default: false.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;fakeDomainCheck&gt;</td> * <td width="80%"> * true or false, if true will check if the sender domain is valid.<br> * Default: true.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;attachError&gt;</td> * <td width="80%"> * true or false, if true any error message available to the mailet is appended * to the message body (except in the case of inline == unaltered).<br> * Default: false.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;replyTo&gt;</td> * <td width="80%"> * A single email address to appear in the Reply-To: header.<br> * It can include constants &quot;sender&quot;, &quot;postmaster&quot; * &quot;null&quot; and &quot;unaltered&quot;; if &quot;null&quot; is specified * it will remove this header.<br> * Default: &quot;unaltered&quot;.</td> * </tr> * </tr> * <tr valign=top> * <td width="20%">&lt;reversePath&gt;</td> * <td width="80%"> * A single email address to appear in the Return-Path: header.<br> * It can include constants &quot;sender&quot;, &quot;postmaster&quot; and * &quot;null&quot;; if &quot;null&quot; is specified then it will set it to <>, * meaning &quot;null return path&quot;.<br> * Notice: the &quot;unaltered&quot; value is <i>not allowed</i>.<br> * Default: the value of the <i>&lt;sender&gt;</i> parameter, if set, otherwise * remains unaltered.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;subject&gt;</td> * <td width="80%"> * An optional string to use as the subject.<br> * Default: keep the original message subject.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;prefix&gt;</td> * <td width="80%"> * An optional subject prefix prepended to the original message subject, or to a * new subject specified with the <i>&lt;subject&gt;</i> parameter.<br> * For example: <i>[Undeliverable mail]</i>.<br> * Default: &quot;&quot;.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;isReply&gt;</td> * <td width="80%"> * true or false, if true the IN_REPLY_TO header will be set to the id of the * current message.<br> * Default: false.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;debug&gt;</td> * <td width="80%"> * true or false. If this is true it tells the mailet to write some debugging * information to the mailet log.<br> * Default: false.</td> * </tr> * <tr valign=top> * <td width="20%">&lt;static&gt;</td> * <td width="80%"> * true or false. If this is true it tells the mailet that it can reuse all the * initial parameters (to, from, etc) without re-calculating their values. This * will boost performance where a redirect task doesn't contain any dynamic * values. If this is false, it tells the mailet to recalculate the values for * each e-mail processed.<br> * Default: false.</td> * </tr> * </table> * * <p> * Example: * </p> * * <pre> * <code> * &lt;mailet match=&quot;RecipientIs=test@localhost&quot; class=&quot;Redirect&quot;&gt; * &lt;recipients&gt;x@localhost, y@localhost, z@localhost&lt;/recipients&gt; * &lt;to&gt;list@localhost&lt;/to&gt; * &lt;sender&gt;owner@localhost&lt;/sender&gt; * &lt;message&gt;sent on from James&lt;/message&gt; * &lt;inline&gt;unaltered&lt;/inline&gt; * &lt;passThrough&gt;FALSE&lt;/passThrough&gt; * &lt;replyTo&gt;postmaster&lt;/replyTo&gt; * &lt;prefix xml:space="preserve"&gt;[test mailing] &lt;/prefix&gt; * &lt;!-- note the xml:space="preserve" to preserve whitespace --&gt; * &lt;static&gt;TRUE&lt;/static&gt; * &lt;/mailet&gt; * </code> * </pre> * * <p> * and: * </p> * * <pre> * <code> * &lt;mailet match=&quot;All&quot; class=&quot;Redirect&quot;&gt; * &lt;recipients&gt;x@localhost&lt;/recipients&gt; * &lt;sender&gt;postmaster&lt;/sender&gt; * &lt;message xml:space="preserve"&gt;Message marked as spam:&lt;/message&gt; * &lt;inline&gt;heads&lt;/inline&gt; * &lt;attachment&gt;message&lt;/attachment&gt; * &lt;passThrough&gt;FALSE&lt;/passThrough&gt; * &lt;attachError&gt;TRUE&lt;/attachError&gt; * &lt;replyTo&gt;postmaster&lt;/replyTo&gt; * &lt;prefix&gt;[spam notification]&lt;/prefix&gt; * &lt;static&gt;TRUE&lt;/static&gt; * &lt;/mailet&gt; * </code> * </pre> * <p> * <i>replyto</i> can be used instead of <i>replyTo</i>; such name is kept for * backward compatibility. * </p> */ public class Redirect extends AbstractRedirect { /** * Returns a string describing this mailet. * * @return a string describing this mailet */ public String getMailetInfo() { return "Redirect Mailet"; } /** Gets the expected init parameters. */ protected String[] getAllowedInitParameters() { String[] allowedArray = { "static", "debug", "passThrough", "fakeDomainCheck", "inline", "attachment", "message", "recipients", "to", "replyTo", "replyto", "reversePath", "sender", "subject", "prefix", "attachError", "isReply" }; return allowedArray; } /** * @return the <code>static</code> init parameter */ protected boolean isStatic() { return isStatic; } /** * @return the <code>inline</code> init parameter */ protected int getInLineType() throws MessagingException { return getTypeCode(getInitParameter("inline", "body")); } /** * @return the <code>recipients</code> init parameter or the postmaster * address or <code>SpecialAddress.SENDER</code> or * <code>SpecialAddress.REVERSE_PATH</code> or * <code>SpecialAddress.UNALTERED</code> or the <code>to</code> init * parameter if missing or <code>null</code> if also the latter is * missing */ protected Collection<MailAddress> getRecipients() throws MessagingException { Collection<MailAddress> newRecipients = new HashSet<MailAddress>(); String addressList = getInitParameter("recipients", getInitParameter("to")); // if nothing was specified, return <code>null</code> meaning no change if (addressList == null) { return null; } try { InternetAddress[] iaarray = InternetAddress.parse(addressList, false); for (int i = 0; i < iaarray.length; i++) { String addressString = iaarray[i].getAddress(); MailAddress specialAddress = getSpecialAddress(addressString, new String[] { "postmaster", "sender", "from", "replyTo", "reversePath", "unaltered", "recipients", "to", "null" }); if (specialAddress != null) { newRecipients.add(specialAddress); } else { newRecipients.add(new MailAddress(iaarray[i])); } } } catch (Exception e) { throw new MessagingException("Exception thrown in getRecipients() parsing: " + addressList, e); } if (newRecipients.size() == 0) { throw new MessagingException("Failed to initialize \"recipients\" list; empty <recipients> init parameter found."); } return newRecipients; } /** * @return the <code>to</code> init parameter or the postmaster address or * <code>SpecialAddress.SENDER</code> or * <code>SpecialAddress.REVERSE_PATH</code> or * <code>SpecialAddress.UNALTERED</code> or the * <code>recipients</code> init parameter if missing or * <code>null</code> if also the latter is missing */ protected InternetAddress[] getTo() throws MessagingException { InternetAddress[] iaarray = null; String addressList = getInitParameter("to", getInitParameter("recipients")); // if nothing was specified, return null meaning no change if (addressList == null) { return null; } try { iaarray = InternetAddress.parse(addressList, false); for (int i = 0; i < iaarray.length; ++i) { String addressString = iaarray[i].getAddress(); MailAddress specialAddress = getSpecialAddress(addressString, new String[] { "postmaster", "sender", "from", "replyTo", "reversePath", "unaltered", "recipients", "to", "null" }); if (specialAddress != null) { iaarray[i] = specialAddress.toInternetAddress(); } } } catch (Exception e) { throw new MessagingException("Exception thrown in getTo() parsing: " + addressList, e); } if (iaarray.length == 0) { throw new MessagingException("Failed to initialize \"to\" list; empty <to> init parameter found."); } return iaarray; } /** * @return the <code>reversePath</code> init parameter or the postmaster * address or <code>SpecialAddress.SENDER</code> or * <code>SpecialAddress.NULL</code> or <code>null</code> if missing */ protected MailAddress getReversePath() throws MessagingException { String addressString = getInitParameter("reversePath"); if (addressString != null) { MailAddress specialAddress = getSpecialAddress(addressString, new String[] { "postmaster", "sender", "null" }); if (specialAddress != null) { return specialAddress; } try { return new MailAddress(addressString); } catch (Exception e) { throw new MessagingException("Exception thrown in getReversePath() parsing: " + addressString, e); } } return null; } /** * @return {@link AbstractRedirect#getReversePath()}; if null return * {@link AbstractRedirect#getSender(Mail)}, meaning the new * requested sender if any */ protected MailAddress getReversePath(Mail originalMail) throws MessagingException { MailAddress reversePath = super.getReversePath(originalMail); if (reversePath == null) { reversePath = getSender(originalMail); } return reversePath; } }
/* Copyright (c) 2007-2009, Yusuke Yamamoto All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Yusuke Yamamoto nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY Yusuke Yamamoto ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Yusuke Yamamoto BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.icooding.weibo.http; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.GZIPInputStream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; import org.w3c.dom.Document; import org.xml.sax.SAXException; import com.icooding.weibo.model.Configuration; import com.icooding.weibo.model.WeiboException; import com.icooding.weibo.org.json.JSONArray; import com.icooding.weibo.org.json.JSONException; import com.icooding.weibo.org.json.JSONObject; /** * A data class representing HTTP Response * * @author Yusuke Yamamoto - yusuke at mac.com */ public class Response { private final static boolean DEBUG = Configuration.getDebug(); static Logger log = Logger.getLogger(Response.class.getName()); private static ThreadLocal<DocumentBuilder> builders = new ThreadLocal<DocumentBuilder>() { @Override protected DocumentBuilder initialValue() { try { return DocumentBuilderFactory.newInstance() .newDocumentBuilder(); } catch (ParserConfigurationException ex) { throw new ExceptionInInitializerError(ex); } } }; private int statusCode; private Document responseAsDocument = null; private String responseAsString = null; private InputStream is; private HttpURLConnection con; private boolean streamConsumed = false; public Response() { } public Response(HttpURLConnection con) throws IOException { this.con = con; this.statusCode = con.getResponseCode(); if(null == (is = con.getErrorStream())){ is = con.getInputStream(); } if (null != is && "gzip".equals(con.getContentEncoding())) { // the response is gzipped is = new GZIPInputStream(is); } } // for test purpose /*package*/ Response(String content) { this.responseAsString = content; } public int getStatusCode() { return statusCode; } public String getResponseHeader(String name) { if (con != null) return con.getHeaderField(name); else return null; } /** * Returns the response stream.<br> * This method cannot be called after calling asString() or asDcoument()<br> * It is suggested to call disconnect() after consuming the stream. * * Disconnects the internal HttpURLConnection silently. * @return response body stream * @throws WeiboException * @see #disconnect() */ public InputStream asStream() { if(streamConsumed){ throw new IllegalStateException("Stream has already been consumed."); } return is; } /** * Returns the response body as string.<br> * Disconnects the internal HttpURLConnection silently. * @return response body * @throws WeiboException */ public String asString() throws WeiboException{ if(null == responseAsString){ BufferedReader br; try { InputStream stream = asStream(); if (null == stream) { return null; } br = new BufferedReader(new InputStreamReader(stream, "UTF-8")); StringBuffer buf = new StringBuffer(); String line; while (null != (line = br.readLine())) { buf.append(line).append("\n"); } this.responseAsString = buf.toString(); if(Configuration.isDalvik()){ this.responseAsString = unescape(responseAsString); } log(responseAsString); stream.close(); con.disconnect(); streamConsumed = true; } catch (NullPointerException npe) { // don't remember in which case npe can be thrown throw new WeiboException(npe.getMessage(), npe); } catch (IOException ioe) { throw new WeiboException(ioe.getMessage(), ioe); } } return responseAsString; } /** * Returns the response body as org.w3c.dom.Document.<br> * Disconnects the internal HttpURLConnection silently. * @return response body as org.w3c.dom.Document * @throws WeiboException */ public Document asDocument() throws WeiboException { if (null == responseAsDocument) { try { // it should be faster to read the inputstream directly. // but makes it difficult to troubleshoot this.responseAsDocument = builders.get().parse(new ByteArrayInputStream(asString().getBytes("UTF-8"))); } catch (SAXException saxe) { throw new WeiboException("The response body was not well-formed:\n" + responseAsString, saxe); } catch (IOException ioe) { throw new WeiboException("There's something with the connection.", ioe); } } return responseAsDocument; } /** * Returns the response body as sinat4j.org.json.JSONObject.<br> * Disconnects the internal HttpURLConnection silently. * @return response body as sinat4j.org.json.JSONObject * @throws WeiboException */ public JSONObject asJSONObject() throws WeiboException { try { return new JSONObject(asString()); } catch (JSONException jsone) { throw new WeiboException(jsone.getMessage() + ":" + this.responseAsString, jsone); } } /** * Returns the response body as sinat4j.org.json.JSONArray.<br> * Disconnects the internal HttpURLConnection silently. * @return response body as sinat4j.org.json.JSONArray * @throws WeiboException */ public JSONArray asJSONArray() throws WeiboException { try { return new JSONArray(asString()); } catch (Exception jsone) { throw new WeiboException(jsone.getMessage() + ":" + this.responseAsString, jsone); } } public InputStreamReader asReader() { try { return new InputStreamReader(is, "UTF-8"); } catch (java.io.UnsupportedEncodingException uee) { return new InputStreamReader(is); } } public void disconnect(){ con.disconnect(); } private static Pattern escaped = Pattern.compile("&#([0-9]{3,5});"); /** * Unescape UTF-8 escaped characters to string. * @author pengjianq...@gmail.com * * @param original The string to be unescaped. * @return The unescaped string */ public static String unescape(String original) { Matcher mm = escaped.matcher(original); StringBuffer unescaped = new StringBuffer(); while (mm.find()) { mm.appendReplacement(unescaped, Character.toString( (char) Integer.parseInt(mm.group(1), 10))); } mm.appendTail(unescaped); return unescaped.toString(); } @Override public String toString() { if(null != responseAsString){ return responseAsString; } return "Response{" + "statusCode=" + statusCode + ", response=" + responseAsDocument + ", responseString='" + responseAsString + '\'' + ", is=" + is + ", con=" + con + '}'; } private void log(String message) { if (DEBUG) { log.debug("[" + new java.util.Date() + "]" + message); } } private void log(String message, String message2) { if (DEBUG) { log(message + message2); } } public String getResponseAsString() { return responseAsString; } public void setResponseAsString(String responseAsString) { this.responseAsString = responseAsString; } public void setStatusCode(int statusCode) { this.statusCode = statusCode; } }
/* * Copyright 2014 The Board of Trustees of The Leland Stanford Junior University. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.susom.database; import java.io.InputStream; import java.io.Reader; import java.io.StringReader; import java.math.BigDecimal; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import java.time.LocalDate; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.susom.database.MixedParameterSql.RewriteArg; /** * This is the key class for configuring (query parameters) and executing a database query. * * @author garricko */ public class SqlInsertImpl implements SqlInsert { private static final Logger log = LoggerFactory.getLogger(Database.class); private final Connection connection; private final DatabaseMock mock; private final StatementAdaptor adaptor; private final String sql; private final Options options; private List<Batch> batched; private List<Object> parameterList; // !null ==> traditional ? args private Map<String, Object> parameterMap; // !null ==> named :abc args private String pkArgName; private int pkPos; private String pkSeqName; private Long pkLong; public SqlInsertImpl(Connection connection, DatabaseMock mock, String sql, Options options) { this.connection = connection; this.mock = mock; this.sql = sql; this.options = options; adaptor = new StatementAdaptor(options); } @Nonnull @Override public SqlInsert argBoolean(Boolean arg) { return positionalArg(adaptor.nullString(booleanToString(arg))); } @Nonnull @Override public SqlInsert argBoolean(@Nonnull String argName, Boolean arg) { return namedArg(argName, adaptor.nullString(booleanToString(arg))); } @Override @Nonnull public SqlInsert argInteger(Integer arg) { return positionalArg(adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argInteger(@Nonnull String argName, Integer arg) { return namedArg(argName, adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argLong(Long arg) { return positionalArg(adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argLong(@Nonnull String argName, Long arg) { return namedArg(argName, adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argFloat(Float arg) { return positionalArg(adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argFloat(@Nonnull String argName, Float arg) { return namedArg(argName, adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argDouble(Double arg) { return positionalArg(adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argDouble(@Nonnull String argName, Double arg) { return namedArg(argName, adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argBigDecimal(BigDecimal arg) { return positionalArg(adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argBigDecimal(@Nonnull String argName, BigDecimal arg) { return namedArg(argName, adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argString(String arg) { return positionalArg(adaptor.nullString(arg)); } @Override @Nonnull public SqlInsert argString(@Nonnull String argName, String arg) { return namedArg(argName, adaptor.nullString(arg)); } @Override @Nonnull public SqlInsert argDate(Date arg) { return positionalArg(adaptor.nullDate(arg)); } @Override @Nonnull public SqlInsert argDate(@Nonnull String argName, Date arg) { return namedArg(argName, adaptor.nullDate(arg)); } @Override @Nonnull public SqlInsert argLocalDate(@Nonnull String argName, LocalDate arg) { return namedArg(argName, adaptor.nullLocalDate(arg)); } @Override @Nonnull public SqlInsert argLocalDate(LocalDate arg) { return positionalArg(adaptor.nullLocalDate(arg)); } @Nonnull @Override public SqlInsert argDateNowPerApp() { return positionalArg(adaptor.nullDate(options.currentDate())); } @Override @Nonnull public SqlInsert argDateNowPerApp(@Nonnull String argName) { return namedArg(argName, adaptor.nullDate(options.currentDate())); } @Nonnull @Override public SqlInsert argDateNowPerDb() { if (options.useDatePerAppOnly()) { return positionalArg(adaptor.nullDate(options.currentDate())); } return positionalArg(new RewriteArg(options.flavor().dbTimeMillis())); } @Override @Nonnull public SqlInsert argDateNowPerDb(@Nonnull String argName) { if (options.useDatePerAppOnly()) { return namedArg(argName, adaptor.nullDate(options.currentDate())); } return namedArg(argName, new RewriteArg(options.flavor().dbTimeMillis())); } @Override @Nonnull public SqlInsert argBlobBytes(byte[] arg) { return positionalArg(adaptor.nullBytes(arg)); } @Override @Nonnull public SqlInsert argBlobBytes(@Nonnull String argName, byte[] arg) { return namedArg(argName, adaptor.nullBytes(arg)); } @Override @Nonnull public SqlInsert argBlobStream(InputStream arg) { return positionalArg(adaptor.nullInputStream(arg)); } @Override @Nonnull public SqlInsert argBlobStream(@Nonnull String argName, InputStream arg) { return namedArg(argName, adaptor.nullInputStream(arg)); } @Override @Nonnull public SqlInsert argClobString(String arg) { return positionalArg(adaptor.nullClobReader(arg == null ? null : new InternalStringReader(arg))); } @Override @Nonnull public SqlInsert argClobString(@Nonnull String argName, String arg) { return namedArg(argName, adaptor.nullClobReader(arg == null ? null : new InternalStringReader(arg))); } @Override @Nonnull public SqlInsert argClobReader(Reader arg) { return positionalArg(adaptor.nullClobReader(arg)); } @Override @Nonnull public SqlInsert argClobReader(@Nonnull String argName, Reader arg) { return namedArg(argName, adaptor.nullClobReader(arg)); } @Nonnull @Override public SqlInsert withArgs(SqlArgs args) { return apply(args); } @Nonnull @Override public SqlInsert apply(Apply apply) { apply.apply(this); return this; } @Override public SqlInsert batch() { if ((parameterList != null && !parameterList.isEmpty()) || (parameterMap != null && !parameterMap.isEmpty())) { if (batched == null) { batched = new ArrayList<>(); } batched.add(new Batch(parameterList, parameterMap)); parameterList = new ArrayList<>(); parameterMap = new HashMap<>(); } return this; } @Override public int insert() { return updateInternal(0); } @Override public void insert(int expectedRowsUpdated) { updateInternal(expectedRowsUpdated); } @Override public void insertBatch() { int[] result = updateBatch(); for (int r : result) { // Tolerate SUCCESS_NO_INFO for older versions of Oracle if (r != 1 && r != Statement.SUCCESS_NO_INFO) { throw new DatabaseException("Batch did not return the expected result: " + Arrays.toString(result)); } } } @Override public int[] insertBatchUnchecked() { return updateBatch(); } @Override public Long insertReturningPkSeq(String primaryKeyColumnName) { if (!hasPk()) { throw new DatabaseException("Call argPkSeq() before insertReturningPkSeq()"); } if (options.flavor().supportsInsertReturning()) { return updateInternal(1, primaryKeyColumnName); } else { // Simulate by issuing a select for the next sequence value, inserting, and returning it Long pk = new SqlSelectImpl(connection, mock, options.flavor().sequenceSelectNextVal(pkSeqName), options).queryLongOrNull(); if (pk == null) { throw new DatabaseException("Unable to retrieve next sequence value from " + pkSeqName); } if (pkArgName != null) { namedArg(pkArgName, adaptor.nullNumeric(pk)); } else { parameterList.set(pkPos, adaptor.nullNumeric(pk)); } updateInternal(1); return pk; } } @Override public <T> T insertReturning(String tableName, String primaryKeyColumnName, RowsHandler<T> handler, String... otherColumnNames) { if (!hasPk()) { throw new DatabaseException("Identify a primary key with argPk*() before insertReturning()"); } if (options.flavor().supportsInsertReturning()) { return updateInternal(1, primaryKeyColumnName, handler, otherColumnNames); } else if (pkSeqName != null) { // Simulate by issuing a select for the next sequence value, inserting, and returning it Long pk = new SqlSelectImpl(connection, mock, options.flavor().sequenceSelectNextVal(pkSeqName), options) .queryLongOrNull(); if (pk == null) { throw new DatabaseException("Unable to retrieve next sequence value from " + pkSeqName); } if (pkArgName != null) { namedArg(pkArgName, adaptor.nullNumeric(pk)); } else { parameterList.set(pkPos, adaptor.nullNumeric(pk)); } updateInternal(1); StringBuilder sql = new StringBuilder(); sql.append("select ").append(primaryKeyColumnName); for (String colName : otherColumnNames) { sql.append(", ").append(colName); } sql.append(" from ").append(tableName).append(" where ").append(primaryKeyColumnName).append("=?"); return new SqlSelectImpl(connection, mock, sql.toString(), options).argLong(pk).query(handler); } else if (pkLong != null) { // Insert the value, then do a select based on the primary key updateInternal(1); StringBuilder sql = new StringBuilder(); sql.append("select ").append(primaryKeyColumnName); for (String colName : otherColumnNames) { sql.append(", ").append(colName); } sql.append(" from ").append(tableName).append(" where ").append(primaryKeyColumnName).append("=?"); return new SqlSelectImpl(connection, mock, sql.toString(), options).argLong(pkLong).query(handler); } else { // Should never happen if our safety checks worked throw new DatabaseException("Internal error"); } } @Nonnull @Override public SqlInsert argPkSeq(@Nonnull String sequenceName) { if (hasPk() && batched == null) { throw new DatabaseException("Only call one argPk*() method"); } if (hasPk() && (!pkSeqName.equals(sequenceName) || pkPos != parameterList.size())) { throw new DatabaseException("The argPkSeq() calls must be in the same position across batch records"); } pkSeqName = sequenceName; SqlInsert sqlInsert = positionalArg(new RewriteArg(options.flavor().sequenceNextVal(sequenceName))); pkPos = parameterList.size() - 1; return sqlInsert; } @Override @Nonnull public SqlInsert argPkSeq(@Nonnull String argName, @Nonnull String sequenceName) { if (hasPk() && batched == null) { throw new DatabaseException("Only call one argPk*() method"); } if (hasPk() && !argName.equals(pkArgName)) { throw new DatabaseException("The primary key argument name must match across batch rows"); } pkArgName = argName; pkSeqName = sequenceName; return namedArg(argName, new RewriteArg(options.flavor().sequenceNextVal(sequenceName))); } @Override @Nonnull public SqlInsert argPkLong(String argName, Long arg) { if (hasPk() && batched == null) { throw new DatabaseException("Only call one argPk*() method"); } if (hasPk() && !argName.equals(pkArgName)) { throw new DatabaseException("The primary key argument name must match across batch rows"); } pkArgName = argName; pkLong = arg; return namedArg(argName, adaptor.nullNumeric(arg)); } @Override @Nonnull public SqlInsert argPkLong(Long arg) { if (hasPk() && batched == null) { throw new DatabaseException("Only call one argPk*() method"); } if (hasPk() && pkPos != parameterList.size()) { throw new DatabaseException("The argPkLong() calls must be in the same position across batch records"); } pkLong = arg; SqlInsert sqlInsert = positionalArg(adaptor.nullNumeric(arg)); pkPos = parameterList.size() - 1; return sqlInsert; } private boolean hasPk() { return pkArgName != null || pkSeqName != null || pkLong != null; } private int[] updateBatch() { batch(); if (batched == null || batched.size() == 0) { throw new DatabaseException("Batch insert requires parameters"); } PreparedStatement ps = null; Metric metric = new Metric(log.isDebugEnabled()); String executeSql = sql; Object[] firstRowParameters = null; List<Object[]> parameters = new ArrayList<>(); boolean isSuccess = false; String errorCode = null; Exception logEx = null; try { for (Batch batch : batched) { MixedParameterSql mpSql = new MixedParameterSql(sql, batch.parameterList, batch.parameterMap); if (firstRowParameters == null) { executeSql = mpSql.getSqlToExecute(); firstRowParameters = mpSql.getArgs(); } else { if (!executeSql.equals(mpSql.getSqlToExecute())) { throw new DatabaseException("All rows in a batch must use parameters in the same way. \nSQL1: " + executeSql + "\nSQL2: " + mpSql.getSqlToExecute()); } } parameters.add(mpSql.getArgs()); } if (connection != null) { ps = connection.prepareStatement(executeSql); for (Object[] params : parameters) { adaptor.addParameters(ps, params); ps.addBatch(); } metric.checkpoint("prep"); int[] numAffectedRows = ps.executeBatch(); metric.checkpoint("execBatch", parameters.size()); isSuccess = true; return numAffectedRows; } else { int[] result = new int[parameters.size()]; for (int i = 0; i < parameters.size(); i++) { Object[] params = parameters.get(i); Integer numAffectedRows = mock.insert(executeSql, DebugSql.printDebugOnlySqlString(executeSql, params, options)); if (numAffectedRows == null) { // No mock behavior provided, be nice and assume the expected value log.debug("Setting numAffectedRows to expected"); numAffectedRows = 1; } result[i] = numAffectedRows; } metric.checkpoint("stubBatch", parameters.size()); isSuccess = true; return result; } } catch (WrongNumberOfRowsException e) { throw e; } catch (Exception e) { errorCode = options.generateErrorCode(); logEx = e; throw DatabaseException.wrap(DebugSql.exceptionMessage(executeSql, firstRowParameters, errorCode, options), e); } finally { adaptor.closeQuietly(ps, log); metric.done("close"); if (isSuccess) { DebugSql.logSuccess("Insert", log, metric, executeSql, firstRowParameters, options); } else { DebugSql.logError("Insert", log, metric, errorCode, executeSql, firstRowParameters, options, logEx); } } } private int updateInternal(int expectedNumAffectedRows) { if (batched != null) { throw new DatabaseException("Call insertBatch() if you are using the batch() feature"); } PreparedStatement ps = null; Metric metric = new Metric(log.isDebugEnabled()); String executeSql = sql; Object[] parameters = null; boolean isSuccess = false; String errorCode = null; Exception logEx = null; try { MixedParameterSql mpSql = new MixedParameterSql(sql, parameterList, parameterMap); executeSql = mpSql.getSqlToExecute(); parameters = mpSql.getArgs(); if (connection != null) { ps = connection.prepareStatement(executeSql); adaptor.addParameters(ps, parameters); metric.checkpoint("prep"); int numAffectedRows = ps.executeUpdate(); metric.checkpoint("exec", numAffectedRows); if (expectedNumAffectedRows > 0 && numAffectedRows != expectedNumAffectedRows) { errorCode = options.generateErrorCode(); throw new WrongNumberOfRowsException("The number of affected rows was " + numAffectedRows + ", but " + expectedNumAffectedRows + " were expected." + "\n" + DebugSql.exceptionMessage(executeSql, parameters, errorCode, options)); } isSuccess = true; return numAffectedRows; } else { Integer numAffectedRows = mock.insert(executeSql, DebugSql.printDebugOnlySqlString(executeSql, parameters, options)); if (numAffectedRows == null) { // No mock behavior provided, be nice and assume the expected value log.debug("Setting numAffectedRows to expected"); numAffectedRows = expectedNumAffectedRows; } metric.checkpoint("stub", numAffectedRows); isSuccess = true; return numAffectedRows; } } catch (WrongNumberOfRowsException e) { throw e; } catch (Exception e) { errorCode = options.generateErrorCode(); logEx = e; throw DatabaseException.wrap(DebugSql.exceptionMessage(executeSql, parameters, errorCode, options), e); } finally { adaptor.closeQuietly(ps, log); metric.done("close"); if (isSuccess) { DebugSql.logSuccess("Insert", log, metric, executeSql, parameters, options); } else { DebugSql.logError("Insert", log, metric, errorCode, executeSql, parameters, options, logEx); } } } private Long updateInternal(int expectedNumAffectedRows, @Nonnull String pkToReturn) { if (batched != null) { throw new DatabaseException("Call insertBatch() if you are using the batch() feature"); } PreparedStatement ps = null; ResultSet rs = null; Metric metric = new Metric(log.isDebugEnabled()); String executeSql = sql; Object[] parameters = null; boolean isSuccess = false; String errorCode = null; Exception logEx = null; try { MixedParameterSql mpSql = new MixedParameterSql(sql, parameterList, parameterMap); executeSql = mpSql.getSqlToExecute(); parameters = mpSql.getArgs(); if (connection != null) { ps = connection.prepareStatement(executeSql, new String[] { pkToReturn }); adaptor.addParameters(ps, parameters); metric.checkpoint("prep"); int numAffectedRows = ps.executeUpdate(); metric.checkpoint("exec", numAffectedRows); if (expectedNumAffectedRows > 0 && numAffectedRows != expectedNumAffectedRows) { errorCode = options.generateErrorCode(); throw new WrongNumberOfRowsException("The number of affected rows was " + numAffectedRows + ", but " + expectedNumAffectedRows + " were expected." + "\n" + DebugSql.exceptionMessage(executeSql, parameters, errorCode, options)); } rs = ps.getGeneratedKeys(); Long pk = null; if (rs != null && rs.next()) { pk = rs.getLong(1); } isSuccess = true; return pk; } else { String debugSql = DebugSql.printDebugOnlySqlString(executeSql, parameters, options); Long pk = mock.insertReturningPk(executeSql, debugSql); if (pk == null) { // No mock behavior provided, default to something that could conceivably work log.debug("Setting pk to hash of debugSql"); pk = (long) debugSql.hashCode(); } metric.checkpoint("stub"); isSuccess = true; return pk; } } catch (WrongNumberOfRowsException e) { throw e; } catch (Exception e) { errorCode = options.generateErrorCode(); logEx = e; throw DatabaseException.wrap(DebugSql.exceptionMessage(executeSql, parameters, errorCode, options), e); } finally { adaptor.closeQuietly(rs, log); adaptor.closeQuietly(ps, log); metric.done("close"); if (isSuccess) { DebugSql.logSuccess("Insert", log, metric, executeSql, parameters, options); } else { DebugSql.logError("Insert", log, metric, errorCode, executeSql, parameters, options, logEx); } } } private <T> T updateInternal(int expectedNumAffectedRows, @Nonnull String pkToReturn, RowsHandler<T> handler, String... otherCols) { if (batched != null) { throw new DatabaseException("Call insertBatch() if you are using the batch() feature"); } PreparedStatement ps = null; ResultSet rs = null; Metric metric = new Metric(log.isDebugEnabled()); String executeSql = sql; Object[] parameters = null; boolean isSuccess = false; String errorCode = null; Exception logEx = null; try { MixedParameterSql mpSql = new MixedParameterSql(sql, parameterList, parameterMap); executeSql = mpSql.getSqlToExecute(); parameters = mpSql.getArgs(); String[] returnCols = new String[otherCols.length + 1]; returnCols[0] = pkToReturn; System.arraycopy(otherCols, 0, returnCols, 1, otherCols.length); if (connection != null) { ps = connection.prepareStatement(executeSql, returnCols); adaptor.addParameters(ps, parameters); metric.checkpoint("prep"); int numAffectedRows = ps.executeUpdate(); metric.checkpoint("exec", numAffectedRows); if (expectedNumAffectedRows > 0 && numAffectedRows != expectedNumAffectedRows) { errorCode = options.generateErrorCode(); throw new WrongNumberOfRowsException("The number of affected rows was " + numAffectedRows + ", but " + expectedNumAffectedRows + " were expected." + "\n" + DebugSql.exceptionMessage(executeSql, parameters, errorCode, options)); } rs = ps.getGeneratedKeys(); final ResultSet finalRs = rs; T result = handler.process(new RowsAdaptor(finalRs, options)); metric.checkpoint("read"); isSuccess = true; return result; } else { RowStub stub = mock.insertReturning(executeSql, DebugSql.printDebugOnlySqlString(executeSql, parameters, options)); if (stub == null) { stub = new RowStub(); } metric.checkpoint("stub"); T result = handler.process(stub.toRows()); metric.checkpoint("read"); isSuccess = true; return result; } } catch (WrongNumberOfRowsException e) { throw e; } catch (Exception e) { errorCode = options.generateErrorCode(); logEx = e; throw DatabaseException.wrap(DebugSql.exceptionMessage(executeSql, parameters, errorCode, options), e); } finally { adaptor.closeQuietly(rs, log); adaptor.closeQuietly(ps, log); metric.done("close"); if (isSuccess) { DebugSql.logSuccess("Insert", log, metric, executeSql, parameters, options); } else { DebugSql.logError("Insert", log, metric, errorCode, executeSql, parameters, options, logEx); } } } private SqlInsert positionalArg(Object arg) { if (parameterList == null) { parameterList = new ArrayList<>(); } parameterList.add(arg); return this; } private SqlInsert namedArg(String argName, Object arg) { if (parameterMap == null) { parameterMap = new HashMap<>(); } if (argName.startsWith(":")) { argName = argName.substring(1); } parameterMap.put(argName, arg); return this; } private String booleanToString(Boolean b) { return b == null ? null : b ? "Y" : "N"; } private class Batch { private List<Object> parameterList; // !null ==> traditional ? args private Map<String, Object> parameterMap; // !null ==> named :abc args public Batch(List<Object> parameterList, Map<String, Object> parameterMap) { this.parameterList = parameterList; this.parameterMap = parameterMap; } } }
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.servlet; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URI; import java.util.ArrayList; import java.util.List; import javax.servlet.AsyncContext; import javax.servlet.AsyncEvent; import javax.servlet.AsyncListener; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.RequestLog; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.handler.DefaultHandler; import org.eclipse.jetty.server.handler.ErrorHandler; import org.eclipse.jetty.server.handler.HandlerCollection; import org.eclipse.jetty.server.handler.RequestLogHandler; import org.eclipse.jetty.toolchain.test.IO; import org.eclipse.jetty.util.component.AbstractLifeCycle; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; /** * Servlet equivalent of the jetty-server's RequestLogHandlerTest, but with more ErrorHandler twists. */ @RunWith(Parameterized.class) @Ignore public class ServletRequestLogTest { private static final Logger LOG = Log.getLogger(ServletRequestLogTest.class); public static class CaptureLog extends AbstractLifeCycle implements RequestLog { public List<String> captured = new ArrayList<>(); @Override public void log(Request request, Response response) { captured.add(String.format("%s %s %s %03d",request.getMethod(),request.getUri().toString(),request.getProtocol(),response.getStatus())); } } @SuppressWarnings("serial") private static abstract class AbstractTestServlet extends HttpServlet { @Override public String toString() { return this.getClass().getSimpleName(); } } @SuppressWarnings("serial") private static class HelloServlet extends AbstractTestServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/plain"); response.getWriter().print("Hello World"); } } @SuppressWarnings("serial") private static class ResponseSendErrorServlet extends AbstractTestServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.sendError(500, "Whoops"); } } @SuppressWarnings("serial") private static class ServletExceptionServlet extends AbstractTestServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { throw new ServletException("Whoops"); } } @SuppressWarnings("serial") private static class IOExceptionServlet extends AbstractTestServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { throw new IOException("Whoops"); } } @SuppressWarnings("serial") private static class RuntimeExceptionServlet extends AbstractTestServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { throw new RuntimeException("Whoops"); } } @SuppressWarnings("serial") private static class AsyncOnTimeoutCompleteServlet extends AbstractTestServlet implements AsyncListener { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { AsyncContext ac = request.startAsync(); ac.setTimeout(1000); ac.addListener(this); } @Override public void onTimeout(AsyncEvent event) throws IOException { event.getAsyncContext().complete(); } @Override public void onStartAsync(AsyncEvent event) throws IOException { } @Override public void onError(AsyncEvent event) throws IOException { } @Override public void onComplete(AsyncEvent event) throws IOException { } } @SuppressWarnings("serial") private static class AsyncOnTimeoutDispatchServlet extends AbstractTestServlet implements AsyncListener { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if(request.getAttribute("deep") == null) { AsyncContext ac = request.startAsync(); ac.setTimeout(1000); ac.addListener(this); request.setAttribute("deep",true); } } @Override public void onTimeout(AsyncEvent event) throws IOException { event.getAsyncContext().dispatch(); } @Override public void onStartAsync(AsyncEvent event) throws IOException { } @Override public void onError(AsyncEvent event) throws IOException { } @Override public void onComplete(AsyncEvent event) throws IOException { } } @SuppressWarnings("serial") private static class AsyncOnStartIOExceptionServlet extends AbstractTestServlet implements AsyncListener { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { AsyncContext ac = request.startAsync(); ac.setTimeout(1000); ac.addListener(this); } @Override public void onTimeout(AsyncEvent event) throws IOException { } @Override public void onStartAsync(AsyncEvent event) throws IOException { event.getAsyncContext().complete(); throw new IOException("Whoops"); } @Override public void onError(AsyncEvent event) throws IOException { LOG.warn("onError() -> {}",event); } @Override public void onComplete(AsyncEvent event) throws IOException { } } @SuppressWarnings("serial") public static class CustomErrorServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // collect error details String reason = (response instanceof Response)?((Response)response).getReason():null; int status = response.getStatus(); // intentionally set response status to OK (this is a test to see what is actually logged) response.setStatus(200); response.setContentType("text/plain"); PrintWriter out = response.getWriter(); out.printf("Error %d: %s%n",status,reason); } } @Parameters(name="{0}") public static List<Object[]> data() { List<Object[]> data = new ArrayList<>(); data.add(new Object[] { new HelloServlet(), "/test/", "GET /test/ HTTP/1.1 200" }); data.add(new Object[] { new AsyncOnTimeoutCompleteServlet(), "/test/", "GET /test/ HTTP/1.1 200" }); data.add(new Object[] { new AsyncOnTimeoutDispatchServlet(), "/test/", "GET /test/ HTTP/1.1 200" }); data.add(new Object[] { new AsyncOnStartIOExceptionServlet(), "/test/", "GET /test/ HTTP/1.1 500" }); data.add(new Object[] { new ResponseSendErrorServlet(), "/test/", "GET /test/ HTTP/1.1 500" }); data.add(new Object[] { new ServletExceptionServlet(), "/test/", "GET /test/ HTTP/1.1 500" }); data.add(new Object[] { new IOExceptionServlet(), "/test/", "GET /test/ HTTP/1.1 500" }); data.add(new Object[] { new RuntimeExceptionServlet(), "/test/", "GET /test/ HTTP/1.1 500" }); return data; } @Parameter(0) public Servlet testServlet; @Parameter(1) public String requestPath; @Parameter(2) public String expectedLogEntry; /** * Test a RequestLogHandler at the end of a HandlerCollection. * This handler chain is setup to look like Jetty versions up to 9.2. * Default configuration. */ @Test(timeout=4000) public void testLogHandlerCollection() throws Exception { Server server = new Server(); ServerConnector connector = new ServerConnector(server); connector.setPort(0); server.setConnectors(new Connector[] { connector }); // First the behavior as defined in etc/jetty.xml // id="Handlers" HandlerCollection handlers = new HandlerCollection(); // id="Contexts" ContextHandlerCollection contexts = new ContextHandlerCollection(); // id="DefaultHandler" DefaultHandler defaultHandler = new DefaultHandler(); handlers.setHandlers(new Handler[] { contexts, defaultHandler }); server.setHandler(handlers); // Next the behavior as defined by etc/jetty-requestlog.xml // the id="RequestLog" RequestLogHandler requestLog = new RequestLogHandler(); CaptureLog captureLog = new CaptureLog(); requestLog.setRequestLog(captureLog); handlers.addHandler(requestLog); // Lastly, the behavior as defined by deployment of a webapp // Add the Servlet Context ServletContextHandler app = new ServletContextHandler(ServletContextHandler.SESSIONS); app.setContextPath("/"); contexts.addHandler(app); // Add the test servlet ServletHolder testHolder = new ServletHolder(testServlet); app.addServlet(testHolder,"/test"); try { server.start(); String host = connector.getHost(); if (host == null) { host = "localhost"; } int port = connector.getLocalPort(); URI serverUri = new URI("http",null,host,port,requestPath,null,null); // Make call to test handler HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection(); try { connection.setAllowUserInteraction(false); // log response status code int statusCode = connection.getResponseCode(); LOG.debug("Response Status Code: {}",statusCode); if (statusCode == 200) { // collect response message and log it String content = getResponseContent(connection); LOG.debug("Response Content: {}",content); } } finally { connection.disconnect(); } assertRequestLog(captureLog); } finally { server.stop(); } } /** * Test a RequestLogHandler at the end of a HandlerCollection. * and also with the default ErrorHandler as server bean in place. */ @Test(timeout=4000) public void testLogHandlerCollection_ErrorHandler_ServerBean() throws Exception { Server server = new Server(); ServerConnector connector = new ServerConnector(server); connector.setPort(0); server.setConnectors(new Connector[] { connector }); ErrorHandler errorHandler = new ErrorHandler(); server.addBean(errorHandler); // First the behavior as defined in etc/jetty.xml // id="Handlers" HandlerCollection handlers = new HandlerCollection(); // id="Contexts" ContextHandlerCollection contexts = new ContextHandlerCollection(); // id="DefaultHandler" DefaultHandler defaultHandler = new DefaultHandler(); handlers.setHandlers(new Handler[] { contexts, defaultHandler }); server.setHandler(handlers); // Next the behavior as defined by etc/jetty-requestlog.xml // the id="RequestLog" RequestLogHandler requestLog = new RequestLogHandler(); CaptureLog captureLog = new CaptureLog(); requestLog.setRequestLog(captureLog); handlers.addHandler(requestLog); // Lastly, the behavior as defined by deployment of a webapp // Add the Servlet Context ServletContextHandler app = new ServletContextHandler(ServletContextHandler.SESSIONS); app.setContextPath("/"); contexts.addHandler(app); // Add the test servlet ServletHolder testHolder = new ServletHolder(testServlet); app.addServlet(testHolder,"/test"); try { server.start(); String host = connector.getHost(); if (host == null) { host = "localhost"; } int port = connector.getLocalPort(); URI serverUri = new URI("http",null,host,port,requestPath,null,null); // Make call to test handler HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection(); try { connection.setAllowUserInteraction(false); // log response status code int statusCode = connection.getResponseCode(); LOG.debug("Response Status Code: {}",statusCode); if (statusCode == 200) { // collect response message and log it String content = getResponseContent(connection); LOG.debug("Response Content: {}",content); } } finally { connection.disconnect(); } assertRequestLog(captureLog); } finally { server.stop(); } } /** * Test a RequestLogHandler at the end of a HandlerCollection * using servlet specific error page mapping. */ @Test(timeout=4000) public void testLogHandlerCollection_SimpleErrorPageMapping() throws Exception { Server server = new Server(); ServerConnector connector = new ServerConnector(server); connector.setPort(0); server.setConnectors(new Connector[] { connector }); // First the behavior as defined in etc/jetty.xml // id="Handlers" HandlerCollection handlers = new HandlerCollection(); // id="Contexts" ContextHandlerCollection contexts = new ContextHandlerCollection(); // id="DefaultHandler" DefaultHandler defaultHandler = new DefaultHandler(); handlers.setHandlers(new Handler[] { contexts, defaultHandler }); server.setHandler(handlers); // Next the behavior as defined by etc/jetty-requestlog.xml // the id="RequestLog" RequestLogHandler requestLog = new RequestLogHandler(); CaptureLog captureLog = new CaptureLog(); requestLog.setRequestLog(captureLog); handlers.addHandler(requestLog); // Lastly, the behavior as defined by deployment of a webapp // Add the Servlet Context ServletContextHandler app = new ServletContextHandler(ServletContextHandler.SESSIONS); app.setContextPath("/"); contexts.addHandler(app); // Add the test servlet ServletHolder testHolder = new ServletHolder(testServlet); app.addServlet(testHolder,"/test"); app.addServlet(CustomErrorServlet.class,"/errorpage"); // Add error page mapping ErrorPageErrorHandler errorMapper = new ErrorPageErrorHandler(); errorMapper.addErrorPage(500,"/errorpage"); app.setErrorHandler(errorMapper); try { server.start(); String host = connector.getHost(); if (host == null) { host = "localhost"; } int port = connector.getLocalPort(); URI serverUri = new URI("http",null,host,port,requestPath,null,null); // Make call to test handler HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection(); try { connection.setAllowUserInteraction(false); // log response status code int statusCode = connection.getResponseCode(); LOG.debug("Response Status Code: {}",statusCode); if (statusCode == 200) { // collect response message and log it String content = getResponseContent(connection); LOG.debug("Response Content: {}",content); } } finally { connection.disconnect(); } assertRequestLog(captureLog); } finally { server.stop(); } } /** * Test an alternate (proposed) setup for using RequestLogHandler in a wrapped style */ @Test(timeout=4000) public void testLogHandlerWrapped() throws Exception { Server server = new Server(); ServerConnector connector = new ServerConnector(server); connector.setPort(0); server.setConnectors(new Connector[] { connector }); // First the behavior as defined in etc/jetty.xml (as is) // id="Handlers" HandlerCollection handlers = new HandlerCollection(); // id="Contexts" ContextHandlerCollection contexts = new ContextHandlerCollection(); // id="DefaultHandler" DefaultHandler defaultHandler = new DefaultHandler(); handlers.setHandlers(new Handler[] { contexts, defaultHandler }); server.setHandler(handlers); // Next the proposed behavioral change to etc/jetty-requestlog.xml // the id="RequestLog" RequestLogHandler requestLog = new RequestLogHandler(); CaptureLog captureLog = new CaptureLog(); requestLog.setRequestLog(captureLog); Handler origServerHandler = server.getHandler(); requestLog.setHandler(origServerHandler); server.setHandler(requestLog); // Lastly, the behavior as defined by deployment of a webapp // Add the Servlet Context ServletContextHandler app = new ServletContextHandler(ServletContextHandler.SESSIONS); app.setContextPath("/"); contexts.addHandler(app); // Add the test servlet ServletHolder testHolder = new ServletHolder(testServlet); app.addServlet(testHolder,"/test"); app.addServlet(CustomErrorServlet.class,"/errorpage"); // Add error page mapping ErrorPageErrorHandler errorMapper = new ErrorPageErrorHandler(); errorMapper.addErrorPage(500,"/errorpage"); app.setErrorHandler(errorMapper); try { server.start(); String host = connector.getHost(); if (host == null) { host = "localhost"; } int port = connector.getLocalPort(); URI serverUri = new URI("http",null,host,port,"/test",null,null); // Make call to test handler HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection(); try { connection.setAllowUserInteraction(false); // log response status code int statusCode = connection.getResponseCode(); LOG.info("Response Status Code: {}",statusCode); if (statusCode == 200) { // collect response message and log it String content = getResponseContent(connection); LOG.info("Response Content: {}",content); } } finally { connection.disconnect(); } assertRequestLog(captureLog); } finally { server.stop(); } } private void assertRequestLog(CaptureLog captureLog) { int captureCount = captureLog.captured.size(); if (captureCount != 1) { LOG.warn("Capture Log size is {}, expected to be 1",captureCount); if (captureCount > 1) { for (int i = 0; i < captureCount; i++) { LOG.warn("[{}] {}",i,captureLog.captured.get(i)); } } assertThat("Capture Log Entry Count",captureLog.captured.size(),is(1)); } String actual = captureLog.captured.get(0); assertThat("Capture Log",actual,is(expectedLogEntry)); } private String getResponseContent(HttpURLConnection connection) throws IOException { try (InputStream in = connection.getInputStream(); InputStreamReader reader = new InputStreamReader(in)) { StringWriter writer = new StringWriter(); IO.copy(reader,writer); return writer.toString(); } } }
package com.davidtpate.github.explore; import com.davidtpate.github.explore.exception.HaltProcessingException; import com.davidtpate.github.explore.model.Field; import com.davidtpate.github.explore.model.GithubExploreMessage; import com.davidtpate.github.explore.model.Repository; import com.davidtpate.github.explore.util.Strings; import com.davidtpate.github.explore.util.Util; import java.io.*; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.regex.Matcher; import java.util.regex.Pattern; public class GithubExploreMessageParser extends MessageParser<GithubExploreMessage> { /** * We want to restrict the messages we look at to only those that are claimed to be from Github. */ private static final String GITHUB_FROM = "GitHub <noreply@github.com>"; /** * We want to restrict processing to emails that have close enough subject, otherwise the processing is pointless. */ private static final String GITHUB_SUBJECT = "GitHub explore"; /** * Beginning part of the Plaintext section for "People you follow" */ private static final String PEOPLE_YOU_FOLLOW_PREFIX = "Stars from people you follow"; /** * Beginning part of the Plaintext section for "Trending" */ private static final String POPULAR_PREFIX = "Trending Repositories"; /** * Beginning part of the Plaintext section for "Stars from GitHub Staff" */ private static final String GITHUB_STAFF_PREFIX = "Stars from GitHub Staff"; GithubExploreMessage.Builder mMessageBuilder = new GithubExploreMessage.Builder(); Repository.Builder mRepositoryBuilder = new Repository.Builder(); /** * Pattern for matching the Plaintext repository list items. Matches strings like "1." and "12.", the period is required. */ private Pattern mPlainTextListItemPattern = Pattern.compile("^[\\d]+\\..*"); /** * Pattern for extracting the details of the first line of the repository. Matches strings like "1. https://github.com/person/example " and "2. https://github.com/person/example2 Java". * It separates the string into 3 pieces. The url, the repository name, and the repository type (if available). * For example, given the following: "2. https://github.com/person/example2 Java" it would resolve the parts as follows: * url: https://github.com/person/example2 * name: person/example2 * type: Java */ private Pattern mPlainTextRepositoryItemPattern = Pattern.compile("^[\\d]+\\. (https://github.com/(.*/.*)) (.*)?$"); public GithubExploreMessage parse(String path) throws FileNotFoundException, IllegalArgumentException { // If we don't have a path to anything, no point in continuing. if (Strings.isEmpty(path)) { throw new IllegalArgumentException("Path is Null or Blank"); } File file = new File(path); // If we don't have a file or it doesn't exist no point in continuing. if (file == null || !file.exists()) { throw new FileNotFoundException("File Null or Not Found"); } return parseMessage(new FileReader(path)); } private GithubExploreMessage parseMessage(FileReader fileReader) { // This can only be called internally so inputStream should never be null, but just in case. if (fileReader == null) { return null; } BufferedReader reader = new BufferedReader(fileReader); String line; boolean haltProcessing = false; try { while ((line = reader.readLine()) != null && !haltProcessing) { // If we are in the header. switch (readerLocation) { case HEADER: // If we are leaving the header and we encounter a new mime part figure out what it is. if (line.startsWith(MIME_PREFIX)) { handleMimePart(reader); } else { try { handleHeaderField(Field.parseField(line)); } catch (HaltProcessingException e) { haltProcessing = true; } catch (ParseException e) { e.printStackTrace(); } } break; case PLAINTEXT_BODY: // If we are leaving the header and we encounter a new mime part figure out what it is. if (line.startsWith(MIME_PREFIX)) { handleMimePart(reader); } else { // Find the repositories in the list and add them as "social" or "staff" repositories. if (line.startsWith(PEOPLE_YOU_FOLLOW_PREFIX)) { parseRepositoryList(reader, RepositoryListType.SOCIAL); } else if (line.startsWith(GITHUB_STAFF_PREFIX)) { // If the person has no friends that Star stuff, take a moment of silence and then parse the list. parseRepositoryList(reader, RepositoryListType.STAFF); } else if (line.startsWith(POPULAR_PREFIX)) { parseRepositoryList(reader, RepositoryListType.POPULAR); } } break; case HTML_BODY: // If we are leaving the header and we encounter a new mime part figure out what it is. if (line.startsWith(MIME_PREFIX)) { handleMimePart(reader); } else { // For this example we're only processing PlainText, so let's stop here. haltProcessing = true; } break; default: haltProcessing = true; break; } } } catch (IOException e) { e.printStackTrace(); } finally { Util.closeQuietly(reader); } return mMessageBuilder.build(); } private void parseRepositoryList(BufferedReader reader, RepositoryListType repositoryListType) throws IOException { String line; boolean stopProcessing = false; // Bubble up the exceptions, no need to handle them down here. while ((line = reader.readLine()) != null && !stopProcessing) { // If we hit the next MIME part, stop processing. if (line.startsWith(MIME_PREFIX)) { handleMimePart(reader); stopProcessing = true; } else if (line.startsWith(PEOPLE_YOU_FOLLOW_PREFIX)) { // If we run into the People You Follow section, head off to process it. This could happen if they are out of order. parseRepositoryList(reader, RepositoryListType.SOCIAL); stopProcessing = true; } else if (line.startsWith(GITHUB_STAFF_PREFIX)) { // If we run into the Staff section, head off to process it. parseRepositoryList(reader, RepositoryListType.STAFF); stopProcessing = true; } else if (line.startsWith(POPULAR_PREFIX)) { parseRepositoryList(reader, RepositoryListType.POPULAR); } // If the line begins with a number followed directly by a period assume it is a repository. else if (mPlainTextListItemPattern.matcher(line).matches()) { switch (repositoryListType) { case SOCIAL: mMessageBuilder.socialRepository(parseRepository(line, reader)); break; case STAFF: mMessageBuilder.staffRepository(parseRepository(line, reader)); break; case POPULAR: mMessageBuilder.popularRepository(parseRepository(line, reader)); break; default: break; } } } } private Repository parseRepository(String lastLine, BufferedReader reader) throws IOException { String line = lastLine; Matcher matcher = mPlainTextRepositoryItemPattern.matcher(line); if (matcher.find()) { mRepositoryBuilder.url(matcher.group(1)); mRepositoryBuilder.name(matcher.group(2)); String type = matcher.group(3); if (Strings.notEmpty(type)) { mRepositoryBuilder.type(type); } } // Grab the description which is always on the next line. Also, bubble up the exceptions, no need to handle them this low. line = reader.readLine(); mRepositoryBuilder.description(line); return mRepositoryBuilder.build(); } private void handleMimePart(BufferedReader reader) throws IOException { // Since we just shifted to a new section we want to read in the headers to determine which section it is. String headerLine; boolean stopParsingHeaders = false; while ((headerLine = reader.readLine()) != null && !stopParsingHeaders) { // If we've reached whitespace, then we are done with the headers. if (Strings.isEmpty(headerLine)) { // If we have reached a new MIME part but have no headers then we are likely at the end of the file. stopParsingHeaders = true; break; } Field field = Field.parseField(headerLine); if (field == null) { break; } Header header = Header.findHeader(field.getName()); if (header == null) { break; } // If we have the Content Type header, use it to figure out what's the MIME area contains. if (header == Header.CONTENT_TYPE) { if (field.getValue().equals(CONTENT_TYPE_PLAIN)) { readerLocation = ReaderLocation.PLAINTEXT_BODY; } else if (field.getValue().equals(CONTENT_TYPE_HTML)) { readerLocation = ReaderLocation.HTML_BODY; } } } } private void handleHeaderField(Field field) throws HaltProcessingException, ParseException { // If we don't have a field, no point continuing. if (field == null) { return; } Header header = Header.findHeader(field.getName()); // If we didn't resolve the header, then we don't need to pay attention to it. if (header == null) { return; } String value = field.getValue(); switch (header) { case TO: mMessageBuilder.to(value); break; case FROM: if (!GITHUB_FROM.equalsIgnoreCase(value)) { throw new HaltProcessingException(); } mMessageBuilder.from(value); break; case SUBJECT: if (!value.startsWith(GITHUB_SUBJECT)) { throw new HaltProcessingException(); } mMessageBuilder.subject(value); break; case DATE: SimpleDateFormat simpleDateFormat = new SimpleDateFormat("EEE, d MMM yyyy HH:mm:ss Z"); mMessageBuilder.date(simpleDateFormat.parse(value)); } } public enum RepositoryListType { SOCIAL, POPULAR, STAFF; } public enum Header { TO("Delivered-To"), FROM("From"), SUBJECT("Subject"), DATE("Date"), CONTENT_TYPE("Content-Type"); protected String mHeader; Header(String mHeader) { this.mHeader = mHeader; } public static Header findHeader(String value) { // The list is short for now, so don't need anything fancy at this point. for (Header header : Header.values()) { if (header.getHeader().equalsIgnoreCase(value)) { return header; } } return null; } public String getHeader() { return mHeader; } } }
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.nn.conf.layers.samediff; import lombok.Data; import org.deeplearning4j.nn.api.MaskState; import org.deeplearning4j.nn.api.TrainingConfig; import org.deeplearning4j.nn.conf.GradientNormalization; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.graph.GraphVertex; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.inputs.InvalidInputTypeException; import org.deeplearning4j.nn.conf.memory.MemoryReport; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.layers.samediff.SameDiffGraphVertex; import org.nd4j.autodiff.samediff.SDVariable; import org.nd4j.autodiff.samediff.SameDiff; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.learning.config.IUpdater; import org.nd4j.linalg.learning.regularization.Regularization; import org.nd4j.linalg.primitives.Pair; import org.nd4j.linalg.util.ArrayUtil; import java.util.List; import java.util.Map; /** * A SameDiff-based GraphVertex. May have multiple inputs, but only one output. Supports trainable parameters.<br> * To implement a SameDiff vertex, implement the following methods:<br> * - defineVertex: used to specify the vertex forward pass<br> * - defineParametersAndInputs: used to specify the parameters and the number of inputs to the vertex<br> * - initializeParameters: used to initialize (assign initial values to) the parameters * * @author Alex Black * @see SameDiffLayer * @see SameDiffOutputLayer */ @Data public abstract class SameDiffVertex extends GraphVertex implements TrainingConfig { private SDVertexParams vertexParams; private String name; protected List<Regularization> regularization; protected List<Regularization> regularizationBias; protected IUpdater updater; protected IUpdater biasUpdater; protected GradientNormalization gradientNormalization; protected double gradientNormalizationThreshold = Double.NaN; protected DataType dataType; /** * Define the vertex * @param sameDiff SameDiff instance * @param layerInput Input to the layer - keys as defined by {@link #defineParametersAndInputs(SDVertexParams)} * @param paramTable Parameter table - keys as defined by {@link #defineParametersAndInputs(SDVertexParams)} * @param maskVars Masks of input, if available - keys as defined by {@link #defineParametersAndInputs(SDVertexParams)} * @return The final layer variable corresponding to the activations/output from the forward pass */ public abstract SDVariable defineVertex(SameDiff sameDiff, Map<String, SDVariable> layerInput, Map<String, SDVariable> paramTable, Map<String, SDVariable> maskVars); /** * Define the parameters - and inputs - for the network. * Use {@link SDVertexParams#addWeightParam(String, long...)} and * {@link SDVertexParams#addBiasParam(String, long...)}. * Note also you must define (and optionally name) the inputs to the vertex. This is required so that * DL4J knows how many inputs exists for the vertex. * @param params Object used to set parameters for this layer */ public abstract void defineParametersAndInputs(SDVertexParams params); /** * Set the initial parameter values for this layer, if required * @param params Parameter arrays that may be initialized */ public abstract void initializeParameters(Map<String, INDArray> params); public SDVertexParams getVertexParams() { if (vertexParams == null) { vertexParams = new SDVertexParams(); defineParametersAndInputs(vertexParams); } return vertexParams; } @Override public GraphVertex clone() { throw new UnsupportedOperationException("Not yet implemented"); } @Override public long numParams(boolean backprop) { SDLayerParams params = getVertexParams(); long count = 0; for (long[] l : params.getParamShapes().values()) { count += ArrayUtil.prodLong(l); } return (int) count; } @Override public int minVertexInputs() { return 1; } @Override public int maxVertexInputs() { return -1; } @Override public org.deeplearning4j.nn.graph.vertex.GraphVertex instantiate(ComputationGraph graph, String name, int idx, INDArray paramsView, boolean initializeParams, DataType networkDatatype) { this.name = name; return new SameDiffGraphVertex(this, graph, name, idx, paramsView, initializeParams, networkDatatype); } @Override public InputType getOutputType(int layerIndex, InputType... vertexInputs) throws InvalidInputTypeException { throw new UnsupportedOperationException("Not yet implemented"); } public Pair<INDArray, MaskState> feedForwardMaskArrays(INDArray[] maskArrays, MaskState currentMaskState, int minibatchSize) { throw new UnsupportedOperationException("Not yet supported"); } /** * Validate input arrays to confirm that they fulfill the assumptions of the layer. If they don't, throw an exception. * @param input inputs to the layer */ public void validateInput(INDArray[] input){/* no-op */} @Override public MemoryReport getMemoryReport(InputType... inputTypes) { return null; } public char paramReshapeOrder(String paramName) { return 'c'; } public void applyGlobalConfig(NeuralNetConfiguration.Builder b) { if(regularization == null || regularization.isEmpty()){ regularization = b.getRegularization(); } if(regularizationBias == null || regularizationBias.isEmpty()){ regularizationBias = b.getRegularizationBias(); } if (updater == null) { updater = b.getIUpdater(); } if (biasUpdater == null) { biasUpdater = b.getBiasUpdater(); } if (gradientNormalization == null) { gradientNormalization = b.getGradientNormalization(); } if (Double.isNaN(gradientNormalizationThreshold)) { gradientNormalizationThreshold = b.getGradientNormalizationThreshold(); } applyGlobalConfigToLayer(b); } public void applyGlobalConfigToLayer(NeuralNetConfiguration.Builder globalConfig) { //Default implementation: no op } @Override public String getLayerName() { return name; } @Override public List<Regularization> getRegularizationByParam(String paramName){ if((regularization == null || regularization.isEmpty()) && (regularizationBias == null || regularizationBias.isEmpty())){ return null; } if (getVertexParams().isWeightParam(paramName)) { return regularization; } if (getVertexParams().isBiasParam(paramName)) { return regularizationBias; } throw new IllegalStateException("Unknown parameter name: " + paramName + " - not in weights (" + getVertexParams().getWeightParameterKeys() + ") or biases (" + getVertexParams().getBiasParameterKeys() + ")"); } @Override public boolean isPretrainParam(String paramName) { return false; } @Override public IUpdater getUpdaterByParam(String paramName) { if (getVertexParams().isWeightParam(paramName)) { return updater; } if (getVertexParams().isBiasParam(paramName)) { if (biasUpdater == null) { return updater; } return biasUpdater; } throw new IllegalStateException("Unknown parameter name: " + paramName + " - not in weights (" + getVertexParams().getWeightParameterKeys() + ") or biases (" + getVertexParams().getBiasParameterKeys() + ")"); } @Override public GradientNormalization getGradientNormalization() { return gradientNormalization; } @Override public double getGradientNormalizationThreshold() { return gradientNormalizationThreshold; } @Override public void setDataType(DataType dataType) { this.dataType = dataType; } }
package org.continuousassurance.swamp.session; import org.apache.http.cookie.Cookie; import org.continuousassurance.swamp.exceptions.NoJSONReturnedException; import org.continuousassurance.swamp.session.handlers.UserHandler; import org.continuousassurance.swamp.session.util.SWAMPServiceEnvironment; import java.io.Serializable; import java.util.HashMap; /** * This represents the session between the client and the server (as opposed to the local runtime * environment that is in {@link SWAMPServiceEnvironment}). * <p>Created by Jeff Gaynor<br> * on 11/18/14 at 3:17 PM */ public class Session implements Serializable, Cloneable { // private static final long serialVersionUID = -6470090944414208496L; public boolean isRequireSecureCookies() { return requireSecureCookies; } public void setRequireSecureCookies(boolean requireSecureCookies) { this.requireSecureCookies = requireSecureCookies; } boolean requireSecureCookies = true; public static final String USERNAME_KEY = "username"; public static final String PASSWORD_KEY = "password"; public static final String ENDPOINT_LOGIN = "login"; public static final String ENDPOINT_LOGOUT = "logout"; public static final String SWAMP_REG_SESSION = "swamp_reg_session"; public static final String SWAMP_CSA_SESSION = "swamp_csa_session"; private String userUID = null; /** * Active session flag, initially false. */ private boolean sessionActive = false; private SWAMPHttpClient client; private String host; private String sessionID; private String sessionKey; private String csaSessionKey; /** * The constructor. You must set the {@link SWAMPHttpClient} before using this session. * * @param host String with the host name. */ public Session(String host) { setHost(host); // client = new SWAMPHttpClient(this.host); sessionActive = false; } public String getHost() { return host; } public void setHost(String host) { if (host != null && !host.endsWith("/")) { this.host = host + "/"; } else { this.host = host; } } /** * Copy constructor: effectively clone this session. This is useful when talking to various SWAMP components which * share session state (key and id) but reside on different hosts. Set the host and point at this (active) * session. * * @param host The name of the host. * @param otherSession The other session object that we are copying. */ public Session(String host, Session otherSession) { this(host); setState(otherSession); } @Override protected Object clone() throws CloneNotSupportedException { Session session = new Session(getHost(), this); return session; } /** * Takes the state from another session and imports it to this session. * * @param otherSession */ protected void setState(Session otherSession) { setHost(otherSession.getHost()); this.setUserUID(otherSession.getUserUID()); this.setSessionKey(otherSession.getSessionKey()); this.setSessionID(otherSession.getSessionID()); this.setClient(otherSession.getClient()); this.setCsaSessionKey(otherSession.getCsaSessionKey()); } public void logout() { // only need to logout if the session is active if (sessionActive) { try { client.rawPost(createURL(ENDPOINT_LOGOUT), null); } catch (NoJSONReturnedException x) { if (x.getMessage() != null && x.getMessage().contains("SESSION_DESTROYED")) ; sessionActive = false; } } } public void logon(String username, String password) { if (sessionActive) { throw new IllegalStateException("Error: There is already and active logon. Please log out then try again."); } HashMap<String, Object> map = new HashMap<String, Object>(); map.put(USERNAME_KEY, username); map.put(PASSWORD_KEY, password); MyResponse myResponse = getClient().rawPost(createURL(ENDPOINT_LOGIN), map); for (Cookie cookie : myResponse.cookies) { if (isRequireSecureCookies() && !cookie.isSecure()) { throw new SecurityException("Error: cookie named \"" + cookie.getName() + "\" is not secure. Logon aborted"); } //DebugUtil.say(this, "response cookie:" + cookie.getName() + "+" + cookie.getValue()); if (cookie.getName().equals(SWAMP_REG_SESSION)) { setSessionKey(cookie.getValue()); } else { if (cookie.getName().equals(SWAMP_CSA_SESSION)) { setCsaSessionKey(cookie.getValue()); } else { setSessionID(cookie.getValue()); } } } setUserUID(myResponse.json.getString(UserHandler.USER_UID_KEY)); sessionActive = true; } public SWAMPHttpClient getClient() { if (client == null) { client = new SWAMPHttpClient(this.host); } return client; } public void setClient(SWAMPHttpClient client) { this.client = client; } /** * Given an endpoint for this host, return a valid url. * * @param endpoint * @return */ public String createURL(String endpoint) { if (host == null || host.length() == 0) { throw new IllegalStateException("Error: no host set for this session"); } return host + endpoint; } public boolean isLoggedOn() { return client != null; } public boolean isValid() { return sessionID != null && sessionKey != null; } public String getSessionID() { return sessionID; } public void setSessionID(String sessionID) { this.sessionID = sessionID; } public String getSessionKey() { return sessionKey; } public void setSessionKey(String sessionKey) { this.sessionKey = sessionKey; } public String getCsaSessionKey() { return csaSessionKey; } public void setCsaSessionKey(String csaSessionKey) { this.csaSessionKey = csaSessionKey; } public String getUserUID() { return userUID; } public void setUserUID(String userUID) { this.userUID = userUID; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server.coordinator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; import io.druid.server.coordination.DataSegmentChangeCallback; import io.druid.server.coordination.DataSegmentChangeHandler; import io.druid.server.coordination.DataSegmentChangeRequest; import io.druid.server.coordination.SegmentChangeRequestDrop; import io.druid.server.coordination.SegmentChangeRequestLoad; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.utils.ZKPaths; import org.joda.time.Duration; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; public class LoadQueuePeonTest extends CuratorTestBase { private static final String LOAD_QUEUE_PATH = "/druid/loadqueue/localhost:1234"; private final ObjectMapper jsonMapper = new DefaultObjectMapper(); private LoadQueuePeon loadQueuePeon; private PathChildrenCache loadQueueCache; @Before public void setUp() throws Exception { setupServerAndCurator(); curator.start(); curator.create().creatingParentsIfNeeded().forPath(LOAD_QUEUE_PATH); loadQueueCache = new PathChildrenCache( curator, LOAD_QUEUE_PATH, true, true, Execs.singleThreaded("load_queue_cache-%d") ); } @Test public void testMultipleLoadDropSegments() throws Exception { final AtomicInteger requestSignalIdx = new AtomicInteger(0); final AtomicInteger segmentSignalIdx = new AtomicInteger(0); loadQueuePeon = new LoadQueuePeon( curator, LOAD_QUEUE_PATH, jsonMapper, Execs.scheduledSingleThreaded("test_load_queue_peon_scheduled-%d"), Execs.singleThreaded("test_load_queue_peon-%d"), new TestDruidCoordinatorConfig(null, null, null, null, null, false, false) ); final CountDownLatch[] loadRequestSignal = new CountDownLatch[5]; final CountDownLatch[] dropRequestSignal = new CountDownLatch[5]; final CountDownLatch[] segmentLoadedSignal = new CountDownLatch[5]; final CountDownLatch[] segmentDroppedSignal = new CountDownLatch[5]; for (int i = 0; i < 5; ++i) { loadRequestSignal[i] = new CountDownLatch(1); dropRequestSignal[i] = new CountDownLatch(1); segmentLoadedSignal[i] = new CountDownLatch(1); segmentDroppedSignal[i] = new CountDownLatch(1); } final DataSegmentChangeHandler handler = new DataSegmentChangeHandler() { @Override public void addSegment(DataSegment segment, DataSegmentChangeCallback callback) { loadRequestSignal[requestSignalIdx.get()].countDown(); } @Override public void removeSegment(DataSegment segment, DataSegmentChangeCallback callback) { dropRequestSignal[requestSignalIdx.get()].countDown(); } }; final List<DataSegment> segmentToDrop = Lists.transform( ImmutableList.<String>of( "2014-10-26T00:00:00Z/P1D", "2014-10-25T00:00:00Z/P1D", "2014-10-24T00:00:00Z/P1D", "2014-10-23T00:00:00Z/P1D", "2014-10-22T00:00:00Z/P1D" ), new Function<String, DataSegment>() { @Override public DataSegment apply(String intervalStr) { return dataSegmentWithInterval(intervalStr); } } ); final List<DataSegment> segmentToLoad = Lists.transform( ImmutableList.<String>of( "2014-10-31T00:00:00Z/P1D", "2014-10-30T00:00:00Z/P1D", "2014-10-29T00:00:00Z/P1D", "2014-10-28T00:00:00Z/P1D", "2014-10-27T00:00:00Z/P1D" ), new Function<String, DataSegment>() { @Override public DataSegment apply(String intervalStr) { return dataSegmentWithInterval(intervalStr); } } ); loadQueueCache.getListenable().addListener( new PathChildrenCacheListener() { @Override public void childEvent( CuratorFramework client, PathChildrenCacheEvent event ) throws Exception { if (event.getType() == PathChildrenCacheEvent.Type.CHILD_ADDED) { DataSegmentChangeRequest request = jsonMapper.readValue( event.getData().getData(), DataSegmentChangeRequest.class ); request.go(handler, null); } } } ); loadQueueCache.start(); for (DataSegment segment : segmentToDrop) { loadQueuePeon.dropSegment( segment, new LoadPeonCallback() { @Override public void execute() { segmentDroppedSignal[segmentSignalIdx.get()].countDown(); } } ); } for (DataSegment segment : segmentToLoad) { loadQueuePeon.loadSegment( segment, new LoadPeonCallback() { @Override public void execute() { segmentLoadedSignal[segmentSignalIdx.get()].countDown(); } } ); } Assert.assertEquals(6000, loadQueuePeon.getLoadQueueSize()); Assert.assertEquals(5, loadQueuePeon.getSegmentsToLoad().size()); Assert.assertEquals(5, loadQueuePeon.getSegmentsToDrop().size()); for (DataSegment segment : segmentToDrop) { String dropRequestPath = ZKPaths.makePath(LOAD_QUEUE_PATH, segment.getIdentifier()); Assert.assertTrue(timing.forWaiting().awaitLatch(dropRequestSignal[requestSignalIdx.get()])); Assert.assertNotNull(curator.checkExists().forPath(dropRequestPath)); Assert.assertEquals( segment, ((SegmentChangeRequestDrop) jsonMapper.readValue( curator.getData() .decompressed() .forPath(dropRequestPath), DataSegmentChangeRequest.class )).getSegment() ); if (requestSignalIdx.get() == 4) { requestSignalIdx.set(0); } else { requestSignalIdx.incrementAndGet(); } // simulate completion of drop request by historical curator.delete().guaranteed().forPath(dropRequestPath); Assert.assertTrue(timing.forWaiting().awaitLatch(segmentDroppedSignal[segmentSignalIdx.get()])); int expectedNumSegmentToDrop = 5 - segmentSignalIdx.get() - 1; Assert.assertEquals(expectedNumSegmentToDrop, loadQueuePeon.getSegmentsToDrop().size()); if (segmentSignalIdx.get() == 4) { segmentSignalIdx.set(0); } else { segmentSignalIdx.incrementAndGet(); } } for (DataSegment segment : segmentToLoad) { String loadRequestPath = ZKPaths.makePath(LOAD_QUEUE_PATH, segment.getIdentifier()); Assert.assertTrue(timing.forWaiting().awaitLatch(loadRequestSignal[requestSignalIdx.get()])); Assert.assertNotNull(curator.checkExists().forPath(loadRequestPath)); Assert.assertEquals( segment, ((SegmentChangeRequestLoad) jsonMapper.readValue( curator.getData() .decompressed() .forPath(loadRequestPath), DataSegmentChangeRequest.class )).getSegment() ); requestSignalIdx.incrementAndGet(); // simulate completion of load request by historical curator.delete().guaranteed().forPath(loadRequestPath); Assert.assertTrue(timing.forWaiting().awaitLatch(segmentLoadedSignal[segmentSignalIdx.get()])); int expectedNumSegmentToLoad = 5 - segmentSignalIdx.get() - 1; Assert.assertEquals(1200 * expectedNumSegmentToLoad, loadQueuePeon.getLoadQueueSize()); Assert.assertEquals(expectedNumSegmentToLoad, loadQueuePeon.getSegmentsToLoad().size()); segmentSignalIdx.incrementAndGet(); } } @Test public void testFailAssign() throws Exception { final DataSegment segment = dataSegmentWithInterval("2014-10-22T00:00:00Z/P1D"); final CountDownLatch loadRequestSignal = new CountDownLatch(1); final CountDownLatch segmentLoadedSignal = new CountDownLatch(1); loadQueuePeon = new LoadQueuePeon( curator, LOAD_QUEUE_PATH, jsonMapper, Execs.scheduledSingleThreaded("test_load_queue_peon_scheduled-%d"), Execs.singleThreaded("test_load_queue_peon-%d"), // set time-out to 1 ms so that LoadQueuePeon will fail the assignment quickly new TestDruidCoordinatorConfig(null, null, null, new Duration(1), null, false, false) ); loadQueueCache.getListenable().addListener( new PathChildrenCacheListener() { @Override public void childEvent( CuratorFramework client, PathChildrenCacheEvent event ) throws Exception { if (event.getType() == PathChildrenCacheEvent.Type.CHILD_ADDED) { loadRequestSignal.countDown(); } } } ); loadQueueCache.start(); loadQueuePeon.loadSegment( segment, new LoadPeonCallback() { @Override public void execute() { segmentLoadedSignal.countDown(); } } ); String loadRequestPath = ZKPaths.makePath(LOAD_QUEUE_PATH, segment.getIdentifier()); Assert.assertTrue(timing.forWaiting().awaitLatch(loadRequestSignal)); Assert.assertNotNull(curator.checkExists().forPath(loadRequestPath)); Assert.assertEquals( segment, ((SegmentChangeRequestLoad) jsonMapper.readValue( curator.getData() .decompressed() .forPath(loadRequestPath), DataSegmentChangeRequest.class )).getSegment() ); // don't simulate completion of load request here Assert.assertTrue(timing.forWaiting().awaitLatch(segmentLoadedSignal)); Assert.assertEquals(0, loadQueuePeon.getSegmentsToLoad().size()); Assert.assertEquals(0L, loadQueuePeon.getLoadQueueSize()); } private DataSegment dataSegmentWithInterval(String intervalStr) { return DataSegment.builder() .dataSource("test_load_queue_peon") .interval(new Interval(intervalStr)) .loadSpec(ImmutableMap.<String, Object>of()) .version("2015-05-27T03:38:35.683Z") .dimensions(ImmutableList.<String>of()) .metrics(ImmutableList.<String>of()) .shardSpec(new NoneShardSpec()) .binaryVersion(9) .size(1200) .build(); } @After public void tearDown() throws Exception { loadQueueCache.close(); loadQueuePeon.stop(); tearDownServerAndCurator(); } }
/* * Copyright 2012 GitHub Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.mobile.ui.ref; import static com.github.mobile.Intents.EXTRA_BASE; import static com.github.mobile.Intents.EXTRA_HEAD; import static com.github.mobile.Intents.EXTRA_PATH; import static com.github.mobile.Intents.EXTRA_REPOSITORY; import static com.github.mobile.util.PreferenceUtils.RENDER_MARKDOWN; import static com.github.mobile.util.PreferenceUtils.WRAP; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.LoaderManager.LoaderCallbacks; import android.support.v4.content.Loader; import android.text.TextUtils; import android.util.Log; import android.webkit.WebView; import android.widget.ProgressBar; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuItem; import com.github.kevinsawicki.wishlist.ViewUtils; import com.github.mobile.Intents.Builder; import com.github.mobile.R; import com.github.mobile.core.code.RefreshBlobTask; import com.github.mobile.core.commit.CommitUtils; import com.github.mobile.ui.BaseActivity; import com.github.mobile.ui.MarkdownLoader; import com.github.mobile.util.AvatarLoader; import com.github.mobile.util.HttpImageGetter; import com.github.mobile.util.MarkdownUtils; import com.github.mobile.util.PreferenceUtils; import com.github.mobile.util.ShareUtils; import com.github.mobile.util.SourceEditor; import com.github.mobile.util.ToastUtils; import com.google.inject.Inject; import org.eclipse.egit.github.core.Blob; import org.eclipse.egit.github.core.IRepositoryIdProvider; import org.eclipse.egit.github.core.Repository; import org.eclipse.egit.github.core.util.EncodingUtils; /** * Activity to view a file on a branch */ public class BranchFileViewActivity extends BaseActivity implements LoaderCallbacks<CharSequence> { private static final String TAG = "BranchFileViewActivity"; private static final String ARG_TEXT = "text"; private static final String ARG_REPO = "repo"; /** * Create intent to show file in commit * * @param repository * @param branch * @param file * @param blobSha * @return intent */ public static Intent createIntent(Repository repository, String branch, String file, String blobSha) { Builder builder = new Builder("branch.file.VIEW"); builder.repo(repository); builder.add(EXTRA_BASE, blobSha); builder.add(EXTRA_PATH, file); builder.add(EXTRA_HEAD, branch); return builder.toIntent(); } private Repository repo; private String sha; private String path; private String file; private String branch; private boolean isMarkdownFile; private String renderedMarkdown; private Blob blob; private ProgressBar loadingBar; private WebView codeView; private SourceEditor editor; private MenuItem markdownItem; @Inject private AvatarLoader avatars; @Inject private HttpImageGetter imageGetter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.commit_file_view); repo = getSerializableExtra(EXTRA_REPOSITORY); sha = getStringExtra(EXTRA_BASE); path = getStringExtra(EXTRA_PATH); branch = getStringExtra(EXTRA_HEAD); loadingBar = finder.find(R.id.pb_loading); codeView = finder.find(R.id.wv_code); file = CommitUtils.getName(path); isMarkdownFile = MarkdownUtils.isMarkdown(file); editor = new SourceEditor(codeView); editor.setWrap(PreferenceUtils.getCodePreferences(this).getBoolean( WRAP, false)); ActionBar actionBar = getSupportActionBar(); actionBar.setTitle(file); actionBar.setSubtitle(branch); avatars.bind(actionBar, repo.getOwner()); loadContent(); } @Override public boolean onCreateOptionsMenu(final Menu optionsMenu) { getSupportMenuInflater().inflate(R.menu.file_view, optionsMenu); MenuItem wrapItem = optionsMenu.findItem(R.id.m_wrap); if (PreferenceUtils.getCodePreferences(this).getBoolean(WRAP, false)) wrapItem.setTitle(R.string.disable_wrapping); else wrapItem.setTitle(R.string.enable_wrapping); markdownItem = optionsMenu.findItem(R.id.m_render_markdown); if (isMarkdownFile) { markdownItem.setEnabled(blob != null); markdownItem.setVisible(true); if (PreferenceUtils.getCodePreferences(this).getBoolean( RENDER_MARKDOWN, true)) markdownItem.setTitle(R.string.show_raw_markdown); else markdownItem.setTitle(R.string.render_markdown); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.m_wrap: if (editor.getWrap()) item.setTitle(R.string.enable_wrapping); else item.setTitle(R.string.disable_wrapping); editor.toggleWrap(); PreferenceUtils.save(PreferenceUtils.getCodePreferences(this) .edit().putBoolean(WRAP, editor.getWrap())); return true; case R.id.m_share: shareFile(); return true; case R.id.m_render_markdown: if (editor.isMarkdown()) { item.setTitle(R.string.render_markdown); editor.toggleMarkdown(); editor.setSource(file, blob); } else { item.setTitle(R.string.show_raw_markdown); editor.toggleMarkdown(); if (renderedMarkdown != null) editor.setSource(file, renderedMarkdown, false); else loadMarkdown(); } PreferenceUtils.save(PreferenceUtils.getCodePreferences(this) .edit().putBoolean(RENDER_MARKDOWN, editor.isMarkdown())); return true; default: return super.onOptionsItemSelected(item); } } @Override public Loader<CharSequence> onCreateLoader(int loader, Bundle args) { final String raw = args.getString(ARG_TEXT); final IRepositoryIdProvider repo = (IRepositoryIdProvider) args .getSerializable(ARG_REPO); return new MarkdownLoader(this, repo, raw, imageGetter, false); } @Override public void onLoadFinished(Loader<CharSequence> loader, CharSequence rendered) { if (rendered == null) ToastUtils.show(this, R.string.error_rendering_markdown); ViewUtils.setGone(loadingBar, true); ViewUtils.setGone(codeView, false); if (!TextUtils.isEmpty(rendered)) { renderedMarkdown = rendered.toString(); if (markdownItem != null) markdownItem.setEnabled(true); editor.setMarkdown(true).setSource(file, renderedMarkdown, false); } } @Override public void onLoaderReset(Loader<CharSequence> loader) { } private void shareFile() { String id = repo.generateId(); startActivity(ShareUtils.create(path + " at " + branch + " on " + id, "https://github.com/" + id + "/blob/" + branch + '/' + path)); } private void loadMarkdown() { ViewUtils.setGone(loadingBar, false); ViewUtils.setGone(codeView, true); String markdown = new String( EncodingUtils.fromBase64(blob.getContent())); Bundle args = new Bundle(); args.putCharSequence(ARG_TEXT, markdown); args.putSerializable(ARG_REPO, repo); getSupportLoaderManager().restartLoader(0, args, this); } private void loadContent() { ViewUtils.setGone(loadingBar, false); ViewUtils.setGone(codeView, true); new RefreshBlobTask(repo, sha, this) { @Override protected void onSuccess(Blob blob) throws Exception { super.onSuccess(blob); BranchFileViewActivity.this.blob = blob; if (markdownItem != null) markdownItem.setEnabled(true); if (isMarkdownFile && PreferenceUtils.getCodePreferences( BranchFileViewActivity.this).getBoolean( RENDER_MARKDOWN, true)) loadMarkdown(); else { ViewUtils.setGone(loadingBar, true); ViewUtils.setGone(codeView, false); editor.setMarkdown(false).setSource(file, blob); } } @Override protected void onException(Exception e) throws RuntimeException { super.onException(e); Log.d(TAG, "Loading file contents failed", e); ViewUtils.setGone(loadingBar, true); ViewUtils.setGone(codeView, false); ToastUtils.show(BranchFileViewActivity.this, e, R.string.error_file_load); } }.execute(); } }
/** * Copyright 2014 Google Inc. * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.matthewmitchell.nubitsj.wallet; import com.matthewmitchell.nubitsj.core.Utils; import com.matthewmitchell.nubitsj.crypto.*; import com.matthewmitchell.nubitsj.store.UnreadableWalletException; import com.google.common.base.Charsets; import com.google.common.base.Splitter; import org.spongycastle.crypto.params.KeyParameter; import javax.annotation.Nullable; import java.security.SecureRandom; import java.util.List; import static com.google.common.base.Preconditions.checkArgument; import static com.matthewmitchell.nubitsj.core.Utils.HEX; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** * Holds the seed bytes for the BIP32 deterministic wallet algorithm, inside a * {@link DeterministicKeyChain}. The purpose of this wrapper is to simplify the encryption * code. */ public class DeterministicSeed implements EncryptableItem { // It would take more than 10^12 years to brute-force a 128 bit seed using $1B worth of computing equipment. public static final int DEFAULT_SEED_ENTROPY_BITS = 128; public static final int MAX_SEED_ENTROPY_BITS = 512; @Nullable private final byte[] seed; @Nullable private final List<String> mnemonicCode; // only one of mnemonicCode/encryptedMnemonicCode will be set @Nullable private final EncryptedData encryptedMnemonicCode; @Nullable private EncryptedData encryptedSeed; private final long creationTimeSeconds; public DeterministicSeed(String mnemonicCode, byte[] seed, String passphrase, long creationTimeSeconds) throws UnreadableWalletException { this(decodeMnemonicCode(mnemonicCode), seed, passphrase, creationTimeSeconds); } public DeterministicSeed(byte[] seed, List<String> mnemonic, long creationTimeSeconds) { this.seed = checkNotNull(seed); this.mnemonicCode = checkNotNull(mnemonic); this.encryptedMnemonicCode = null; this.creationTimeSeconds = creationTimeSeconds; } public DeterministicSeed(EncryptedData encryptedMnemonic, @Nullable EncryptedData encryptedSeed, long creationTimeSeconds) { this.seed = null; this.mnemonicCode = null; this.encryptedMnemonicCode = checkNotNull(encryptedMnemonic); this.encryptedSeed = encryptedSeed; this.creationTimeSeconds = creationTimeSeconds; } /** * Constructs a seed from a BIP 39 mnemonic code. See {@link com.matthewmitchell.nubitsj.crypto.MnemonicCode} for more * details on this scheme. * @param mnemonicCode A list of words. * @param seed The derived seed, or pass null to derive it from mnemonicCode (slow) * @param passphrase A user supplied passphrase, or an empty string if there is no passphrase * @param creationTimeSeconds When the seed was originally created, UNIX time. */ public DeterministicSeed(List<String> mnemonicCode, @Nullable byte[] seed, String passphrase, long creationTimeSeconds) { this((seed != null ? seed : MnemonicCode.toSeed(mnemonicCode, checkNotNull(passphrase))), mnemonicCode, creationTimeSeconds); } /** * Constructs a seed from a BIP 39 mnemonic code. See {@link com.matthewmitchell.nubitsj.crypto.MnemonicCode} for more * details on this scheme. * @param random Entropy source * @param bits number of bits, must be divisible by 32 * @param passphrase A user supplied passphrase, or an empty string if there is no passphrase * @param creationTimeSeconds When the seed was originally created, UNIX time. */ public DeterministicSeed(SecureRandom random, int bits, String passphrase, long creationTimeSeconds) { this(getEntropy(random, bits), checkNotNull(passphrase), creationTimeSeconds); } /** * Constructs a seed from a BIP 39 mnemonic code. See {@link com.matthewmitchell.nubitsj.crypto.MnemonicCode} for more * details on this scheme. * @param entropy entropy bits, length must be divisible by 32 * @param passphrase A user supplied passphrase, or an empty string if there is no passphrase * @param creationTimeSeconds When the seed was originally created, UNIX time. */ public DeterministicSeed(byte[] entropy, String passphrase, long creationTimeSeconds) { checkArgument(entropy.length % 4 == 0, "entropy size in bits not divisible by 32"); checkArgument(entropy.length * 8 >= DEFAULT_SEED_ENTROPY_BITS, "entropy size too small"); checkNotNull(passphrase); try { this.mnemonicCode = MnemonicCode.INSTANCE.toMnemonic(entropy); } catch (MnemonicException.MnemonicLengthException e) { // cannot happen throw new RuntimeException(e); } this.seed = MnemonicCode.toSeed(mnemonicCode, passphrase); this.encryptedMnemonicCode = null; this.creationTimeSeconds = creationTimeSeconds; } private static byte[] getEntropy(SecureRandom random, int bits) { checkArgument(bits <= MAX_SEED_ENTROPY_BITS, "requested entropy size too large"); byte[] seed = new byte[bits / 8]; random.nextBytes(seed); return seed; } @Override public boolean isEncrypted() { checkState(mnemonicCode != null || encryptedMnemonicCode != null); return encryptedMnemonicCode != null; } @Override public String toString() { return isEncrypted() ? "DeterministicSeed [encrypted]" : "DeterministicSeed " + toHexString() + " " + Utils.join(mnemonicCode); } /** Returns the seed as hex or null if encrypted. */ @Nullable public String toHexString() { return seed != null ? HEX.encode(seed) : null; } @Nullable @Override public byte[] getSecretBytes() { return getMnemonicAsBytes(); } @Nullable public byte[] getSeedBytes() { return seed; } @Nullable @Override public EncryptedData getEncryptedData() { return encryptedMnemonicCode; } @Override public Protos.Wallet.EncryptionType getEncryptionType() { return Protos.Wallet.EncryptionType.ENCRYPTED_SCRYPT_AES; } @Nullable public EncryptedData getEncryptedSeedData() { return encryptedSeed; } @Override public long getCreationTimeSeconds() { return creationTimeSeconds; } public DeterministicSeed encrypt(KeyCrypter keyCrypter, KeyParameter aesKey) { checkState(encryptedMnemonicCode == null, "Trying to encrypt seed twice"); checkState(mnemonicCode != null, "Mnemonic missing so cannot encrypt"); EncryptedData encryptedMnemonic = keyCrypter.encrypt(getMnemonicAsBytes(), aesKey); EncryptedData encryptedSeed = keyCrypter.encrypt(seed, aesKey); return new DeterministicSeed(encryptedMnemonic, encryptedSeed, creationTimeSeconds); } private byte[] getMnemonicAsBytes() { return Utils.join(mnemonicCode).getBytes(Charsets.UTF_8); } public DeterministicSeed decrypt(KeyCrypter crypter, String passphrase, KeyParameter aesKey) { checkState(isEncrypted()); checkNotNull(encryptedMnemonicCode); List<String> mnemonic = decodeMnemonicCode(crypter.decrypt(encryptedMnemonicCode, aesKey)); byte[] seed = encryptedSeed == null ? null : crypter.decrypt(encryptedSeed, aesKey); return new DeterministicSeed(mnemonic, seed, passphrase, creationTimeSeconds); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeterministicSeed seed = (DeterministicSeed) o; if (creationTimeSeconds != seed.creationTimeSeconds) return false; if (encryptedMnemonicCode != null) { if (seed.encryptedMnemonicCode == null) return false; if (!encryptedMnemonicCode.equals(seed.encryptedMnemonicCode)) return false; } else { if (!mnemonicCode.equals(seed.mnemonicCode)) return false; } return true; } @Override public int hashCode() { int result = encryptedMnemonicCode != null ? encryptedMnemonicCode.hashCode() : mnemonicCode.hashCode(); result = 31 * result + (int) (creationTimeSeconds ^ (creationTimeSeconds >>> 32)); return result; } /** * Check if our mnemonic is a valid mnemonic phrase for our word list. * Does nothing if we are encrypted. * * @throws com.matthewmitchell.nubitsj.crypto.MnemonicException if check fails */ public void check() throws MnemonicException { if (mnemonicCode != null) MnemonicCode.INSTANCE.check(mnemonicCode); } byte[] getEntropyBytes() throws MnemonicException { return MnemonicCode.INSTANCE.toEntropy(mnemonicCode); } /** Get the mnemonic code, or null if unknown. */ @Nullable public List<String> getMnemonicCode() { return mnemonicCode; } private static List<String> decodeMnemonicCode(byte[] mnemonicCode) { return decodeMnemonicCode(Utils.toString(mnemonicCode, "UTF-8")); } private static List<String> decodeMnemonicCode(String mnemonicCode) { return Splitter.on(" ").splitToList(mnemonicCode); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.spring.injection.annot; import org.apache.wicket.Page; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.proxy.ILazyInitProxy; import org.apache.wicket.spring.Bean; import org.apache.wicket.spring.SpringBeanLocator; import org.apache.wicket.spring.test.ApplicationContextMock; import org.apache.wicket.util.tester.DummyHomePage; import org.apache.wicket.util.tester.WicketTester; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; /** * Test for SpringBean. * * @author Andrea Del Bene */ public class SpringBeanTest { private WicketTester tester; private ApplicationContextMock ctx; /** * @throws Exception */ @BeforeEach public void before() throws Exception { tester = new WicketTester(); ctx = new ApplicationContextMock(); SpringComponentInjector springInjector = new SpringComponentInjector( tester.getApplication(), ctx); tester.getApplication().getComponentInstantiationListeners().add(springInjector); } /** * @throws Exception */ @Test public void beanExists() throws Exception { // add dependency bean ctx.putBean("bean", new Bean()); AnnotatedBeanRequired page; // first test with standard behavior (required = true) tester.startPage(page = new AnnotatedBeanRequired()); assertNotNull(page.getBean()); // now test with required = false AnnotatedBeanNotRequired notRequiredpage; tester.startPage(notRequiredpage = new AnnotatedBeanNotRequired()); assertNotNull(notRequiredpage.getBean()); // both page must have the same bean instance assertTrue(page.getBean() == notRequiredpage.getBean()); } /** * @throws Exception */ @Test public void beanNotExists() throws Exception { // with required = true we get IllegalStateException try { tester.startPage(new AnnotatedBeanRequired()); fail(); } catch (IllegalStateException e) { } // with required = false everything is fine AnnotatedBeanNotRequired page; tester.startPage(page = new AnnotatedBeanNotRequired()); assertNull(page.getBean()); // with name = something, required = false everything is fine AnnotatedBeanWithSameNameRequired page2; tester.startPage(page2 = new AnnotatedBeanWithSameNameRequired()); assertNull(page2.getBean()); } /** * @throws Exception */ @Test public void beanExistsDifferentName() throws Exception { // add dependency beans of the same type ctx.putBean("mrBean", new Bean()); ctx.putBean("theBean", new Bean()); // with no name specified we get IllegalStateException try { tester.startPage(new AnnotatedBeanRequired()); fail(); } catch (IllegalStateException e) { } // we must inject bean with name "mrBean" AnnotatedBeanNotRequiredDifferentName page; tester.startPage(page = new AnnotatedBeanNotRequiredDifferentName()); SpringBeanLocator locator = (SpringBeanLocator)((ILazyInitProxy)page.getBean()).getObjectLocator(); assertTrue(locator.getBeanName().equals("mrBean")); } /** * https://issues.apache.org/jira/browse/WICKET-4149 */ @Test public void beanInjectedInBehavior() { ctx.putBean("mrBean", new Bean()); // with no name specified we get IllegalStateException Page page = tester.startPage(new AnnotatedFieldInBehaviorPage()); TestBehavior behavior = page.getBehaviors(TestBehavior.class).get(0); assertNotNull(behavior.getBean()); } } class AnnotatedBeanRequired extends DummyHomePage { @SpringBean private Bean bean; public Bean getBean() { return bean; } } class AnnotatedBeanWithSameNameRequired extends DummyHomePage { @SpringBean(name = "bean", required = false) private Bean bean; public Bean getBean() { return bean; } } class AnnotatedBeanNotRequired extends DummyHomePage { @SpringBean(required = false) private Bean bean; public Bean getBean() { return bean; } } class AnnotatedBeanNotRequiredDifferentName extends DummyHomePage { @SpringBean(required = false, name = "mrBean") private Bean bean; public Bean getBean() { return bean; } } /** * A behavior which will be automatically processed for @SpringBean annotation */ class TestBehavior extends Behavior { private static final long serialVersionUID = 1L; @SpringBean() private Bean bean; public Bean getBean() { return bean; } } /** * A test page with a behavior which will be processed for @SpringBean annotations */ class AnnotatedFieldInBehaviorPage extends DummyHomePage { private static final long serialVersionUID = 1L; public AnnotatedFieldInBehaviorPage() { add(new TestBehavior()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.script.ScriptService; import java.util.Collection; import java.util.Map; /** * A static factory for simple "import static" usage. */ public abstract class QueryBuilders { /** * A query that match on all documents. */ public static MatchAllQueryBuilder matchAllQuery() { return new MatchAllQueryBuilder(); } /** * Creates a match query with type "BOOLEAN" for the provided field name and text. * * @param name The field name. * @param text The query text (to be analyzed). */ public static MatchQueryBuilder matchQuery(String name, Object text) { return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.BOOLEAN); } /** * Creates a common query for the provided field name and text. * * @param name The field name. * @param text The query text (to be analyzed). */ public static CommonTermsQueryBuilder commonTermsQuery(String name, Object text) { return new CommonTermsQueryBuilder(name, text); } /** * Creates a match query with type "BOOLEAN" for the provided field name and text. * * @param fieldNames The field names. * @param text The query text (to be analyzed). */ public static MultiMatchQueryBuilder multiMatchQuery(Object text, String... fieldNames) { return new MultiMatchQueryBuilder(text, fieldNames); // BOOLEAN is the default } /** * Creates a text query with type "PHRASE" for the provided field name and text. * * @param name The field name. * @param text The query text (to be analyzed). */ public static MatchQueryBuilder matchPhraseQuery(String name, Object text) { return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE); } /** * Creates a match query with type "PHRASE_PREFIX" for the provided field name and text. * * @param name The field name. * @param text The query text (to be analyzed). */ public static MatchQueryBuilder matchPhrasePrefixQuery(String name, Object text) { return new MatchQueryBuilder(name, text).type(MatchQueryBuilder.Type.PHRASE_PREFIX); } /** * A query that generates the union of documents produced by its sub-queries, and that scores each document * with the maximum score for that document as produced by any sub-query, plus a tie breaking increment for any * additional matching sub-queries. */ public static DisMaxQueryBuilder disMaxQuery() { return new DisMaxQueryBuilder(); } /** * Constructs a query that will match only specific ids within types. * * @param types The mapping/doc type */ public static IdsQueryBuilder idsQuery(@Nullable String... types) { return new IdsQueryBuilder(types); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, String value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, int value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, long value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, float value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, double value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, boolean value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents containing a term. * * @param name The name of the field * @param value The value of the term */ public static TermQueryBuilder termQuery(String name, Object value) { return new TermQueryBuilder(name, value); } /** * A Query that matches documents using fuzzy query. * * @param name The name of the field * @param value The value of the term */ public static FuzzyQueryBuilder fuzzyQuery(String name, String value) { return new FuzzyQueryBuilder(name, value); } /** * A Query that matches documents using fuzzy query. * * @param name The name of the field * @param value The value of the term */ public static FuzzyQueryBuilder fuzzyQuery(String name, Object value) { return new FuzzyQueryBuilder(name, value); } /** * A Query that matches documents containing terms with a specified prefix. * * @param name The name of the field * @param prefix The prefix query */ public static PrefixQueryBuilder prefixQuery(String name, String prefix) { return new PrefixQueryBuilder(name, prefix); } /** * A Query that matches documents within an range of terms. * * @param name The field name */ public static RangeQueryBuilder rangeQuery(String name) { return new RangeQueryBuilder(name); } /** * Implements the wildcard search query. Supported wildcards are <tt>*</tt>, which * matches any character sequence (including the empty one), and <tt>?</tt>, * which matches any single character. Note this query can be slow, as it * needs to iterate over many terms. In order to prevent extremely slow WildcardQueries, * a Wildcard term should not start with one of the wildcards <tt>*</tt> or * <tt>?</tt>. * * @param name The field name * @param query The wildcard query string */ public static WildcardQueryBuilder wildcardQuery(String name, String query) { return new WildcardQueryBuilder(name, query); } /** * A Query that matches documents containing terms with a specified regular expression. * * @param name The name of the field * @param regexp The regular expression */ public static RegexpQueryBuilder regexpQuery(String name, String regexp) { return new RegexpQueryBuilder(name, regexp); } /** * A query that parses a query string and runs it. There are two modes that this operates. The first, * when no field is added (using {@link QueryStringQueryBuilder#field(String)}, will run the query once and non prefixed fields * will use the {@link QueryStringQueryBuilder#defaultField(String)} set. The second, when one or more fields are added * (using {@link QueryStringQueryBuilder#field(String)}), will run the parsed query against the provided fields, and combine * them either using DisMax or a plain boolean query (see {@link QueryStringQueryBuilder#useDisMax(boolean)}). * * @param queryString The query string to run */ public static QueryStringQueryBuilder queryStringQuery(String queryString) { return new QueryStringQueryBuilder(queryString); } /** * A query that acts similar to a query_string query, but won't throw * exceptions for any weird string syntax. See * {@link org.apache.lucene.queryparser.XSimpleQueryParser} for the full * supported syntax. */ public static SimpleQueryStringBuilder simpleQueryStringQuery(String queryString) { return new SimpleQueryStringBuilder(queryString); } /** * The BoostingQuery class can be used to effectively demote results that match a given query. * Unlike the "NOT" clause, this still selects documents that contain undesirable terms, * but reduces their overall score: */ public static BoostingQueryBuilder boostingQuery() { return new BoostingQueryBuilder(); } /** * A Query that matches documents matching boolean combinations of other queries. */ public static BoolQueryBuilder boolQuery() { return new BoolQueryBuilder(); } public static SpanTermQueryBuilder spanTermQuery(String name, String value) { return new SpanTermQueryBuilder(name, value); } public static SpanTermQueryBuilder spanTermQuery(String name, int value) { return new SpanTermQueryBuilder(name, value); } public static SpanTermQueryBuilder spanTermQuery(String name, long value) { return new SpanTermQueryBuilder(name, value); } public static SpanTermQueryBuilder spanTermQuery(String name, float value) { return new SpanTermQueryBuilder(name, value); } public static SpanTermQueryBuilder spanTermQuery(String name, double value) { return new SpanTermQueryBuilder(name, value); } public static SpanFirstQueryBuilder spanFirstQuery(SpanQueryBuilder match, int end) { return new SpanFirstQueryBuilder(match, end); } public static SpanNearQueryBuilder spanNearQuery() { return new SpanNearQueryBuilder(); } public static SpanNotQueryBuilder spanNotQuery() { return new SpanNotQueryBuilder(); } public static SpanOrQueryBuilder spanOrQuery() { return new SpanOrQueryBuilder(); } /** * Creates a {@link SpanQueryBuilder} which allows having a sub query * which implements {@link MultiTermQueryBuilder}. This is useful for * having e.g. wildcard or fuzzy queries inside spans. * * @param multiTermQueryBuilder The {@link MultiTermQueryBuilder} that * backs the created builder. * @return */ public static SpanMultiTermQueryBuilder spanMultiTermQueryBuilder(MultiTermQueryBuilder multiTermQueryBuilder) { return new SpanMultiTermQueryBuilder(multiTermQueryBuilder); } public static FieldMaskingSpanQueryBuilder fieldMaskingSpanQuery(SpanQueryBuilder query, String field) { return new FieldMaskingSpanQueryBuilder(query, field); } /** * A query that applies a filter to the results of another query. * * @param queryBuilder The query to apply the filter to * @param filterBuilder The filter to apply on the query */ public static FilteredQueryBuilder filteredQuery(@Nullable QueryBuilder queryBuilder, @Nullable FilterBuilder filterBuilder) { return new FilteredQueryBuilder(queryBuilder, filterBuilder); } /** * A query that wraps a filter and simply returns a constant score equal to the * query boost for every document in the filter. * * @param filterBuilder The filter to wrap in a constant score query */ public static ConstantScoreQueryBuilder constantScoreQuery(FilterBuilder filterBuilder) { return new ConstantScoreQueryBuilder(filterBuilder); } /** * A query that wraps another query and simply returns a constant score equal to the * query boost for every document in the query. * * @param queryBuilder The query to wrap in a constant score query */ public static ConstantScoreQueryBuilder constantScoreQuery(QueryBuilder queryBuilder) { return new ConstantScoreQueryBuilder(queryBuilder); } /** * A query that allows to define a custom scoring function. * * @param queryBuilder The query to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(QueryBuilder queryBuilder) { return new FunctionScoreQueryBuilder(queryBuilder); } /** * A query that allows to define a custom scoring function. */ public static FunctionScoreQueryBuilder functionScoreQuery() { return new FunctionScoreQueryBuilder(); } /** * A query that allows to define a custom scoring function. * * @param function The function builder used to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(ScoreFunctionBuilder function) { return new FunctionScoreQueryBuilder(function); } /** * A query that allows to define a custom scoring function. * * @param queryBuilder The query to custom score * @param function The function builder used to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(QueryBuilder queryBuilder, ScoreFunctionBuilder function) { return (new FunctionScoreQueryBuilder(queryBuilder)).add(function); } /** * A query that allows to define a custom scoring function. * * @param filterBuilder The query to custom score * @param function The function builder used to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(FilterBuilder filterBuilder, ScoreFunctionBuilder function) { return (new FunctionScoreQueryBuilder(filterBuilder)).add(function); } /** * A query that allows to define a custom scoring function. * * @param filterBuilder The filterBuilder to custom score */ public static FunctionScoreQueryBuilder functionScoreQuery(FilterBuilder filterBuilder) { return new FunctionScoreQueryBuilder(filterBuilder); } /** * A more like this query that finds documents that are "like" the provided {@link MoreLikeThisQueryBuilder#likeText(String)} * which is checked against the fields the query is constructed with. * * @param fields The fields to run the query against */ public static MoreLikeThisQueryBuilder moreLikeThisQuery(String... fields) { return new MoreLikeThisQueryBuilder(fields); } /** * A more like this query that finds documents that are "like" the provided {@link MoreLikeThisQueryBuilder#likeText(String)} * which is checked against the "_all" field. */ public static MoreLikeThisQueryBuilder moreLikeThisQuery() { return new MoreLikeThisQueryBuilder(); } /** * A fuzzy like this query that finds documents that are "like" the provided {@link FuzzyLikeThisQueryBuilder#likeText(String)} * which is checked against the fields the query is constructed with. * * @param fields The fields to run the query against */ public static FuzzyLikeThisQueryBuilder fuzzyLikeThisQuery(String... fields) { return new FuzzyLikeThisQueryBuilder(fields); } /** * A fuzzy like this query that finds documents that are "like" the provided {@link FuzzyLikeThisQueryBuilder#likeText(String)} * which is checked against the "_all" field. */ public static FuzzyLikeThisQueryBuilder fuzzyLikeThisQuery() { return new FuzzyLikeThisQueryBuilder(); } /** * A fuzzy like this query that finds documents that are "like" the provided {@link FuzzyLikeThisFieldQueryBuilder#likeText(String)}. */ public static FuzzyLikeThisFieldQueryBuilder fuzzyLikeThisFieldQuery(String name) { return new FuzzyLikeThisFieldQueryBuilder(name); } /** * Constructs a new scoring child query, with the child type and the query to run on the child documents. The * results of this query are the parent docs that those child docs matched. * * @param type The child type. * @param query The query. */ public static TopChildrenQueryBuilder topChildrenQuery(String type, QueryBuilder query) { return new TopChildrenQueryBuilder(type, query); } /** * Constructs a new NON scoring child query, with the child type and the query to run on the child documents. The * results of this query are the parent docs that those child docs matched. * * @param type The child type. * @param query The query. */ public static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder query) { return new HasChildQueryBuilder(type, query); } /** * Constructs a new NON scoring parent query, with the parent type and the query to run on the parent documents. The * results of this query are the children docs that those parent docs matched. * * @param type The parent type. * @param query The query. */ public static HasParentQueryBuilder hasParentQuery(String type, QueryBuilder query) { return new HasParentQueryBuilder(type, query); } public static NestedQueryBuilder nestedQuery(String path, QueryBuilder query) { return new NestedQueryBuilder(path, query); } public static NestedQueryBuilder nestedQuery(String path, FilterBuilder filter) { return new NestedQueryBuilder(path, filter); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, String... values) { return new TermsQueryBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, int... values) { return new TermsQueryBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, long... values) { return new TermsQueryBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, float... values) { return new TermsQueryBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, double... values) { return new TermsQueryBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, Object... values) { return new TermsQueryBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsQueryBuilder termsQuery(String name, Collection<?> values) { return new TermsQueryBuilder(name, values); } /** * A query that will execute the wrapped query only for the specified indices, and "match_all" when * it does not match those indices. */ public static IndicesQueryBuilder indicesQuery(QueryBuilder queryBuilder, String... indices) { return new IndicesQueryBuilder(queryBuilder, indices); } /** * A Query builder which allows building a query thanks to a JSON string or binary data. */ public static WrapperQueryBuilder wrapperQuery(String source) { return new WrapperQueryBuilder(source); } /** * A Query builder which allows building a query thanks to a JSON string or binary data. */ public static WrapperQueryBuilder wrapperQuery(byte[] source, int offset, int length) { return new WrapperQueryBuilder(source, offset, length); } /** * Query that matches Documents based on the relationship between the given shape and * indexed shapes * * @param name The shape field name * @param shape Shape to use in the Query */ public static GeoShapeQueryBuilder geoShapeQuery(String name, ShapeBuilder shape) { return new GeoShapeQueryBuilder(name, shape); } public static GeoShapeQueryBuilder geoShapeQuery(String name, String indexedShapeId, String indexedShapeType) { return new GeoShapeQueryBuilder(name, indexedShapeId, indexedShapeType); } /** * Facilitates creating template query requests using an inline script */ public static TemplateQueryBuilder templateQuery(String template, Map<String, Object> vars) { return new TemplateQueryBuilder(template, vars); } /** * Facilitates creating template query requests */ public static TemplateQueryBuilder templateQuery(String template, ScriptService.ScriptType templateType, Map<String, Object> vars) { return new TemplateQueryBuilder(template, templateType, vars); } private QueryBuilders() { } }
/* * MIT License * * Copyright (c) 2017 Jan Heinrich Reimer * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.heinrichreimersoftware.materialdrawerdemo; import android.content.Intent; import android.content.res.Configuration; import android.graphics.drawable.BitmapDrawable; import android.net.Uri; import android.os.Bundle; import android.support.v4.content.ContextCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Toast; import com.heinrichreimersoftware.materialdrawer.DrawerView; import com.heinrichreimersoftware.materialdrawer.structure.DrawerHeaderItem; import com.heinrichreimersoftware.materialdrawer.structure.DrawerItem; import com.heinrichreimersoftware.materialdrawer.structure.DrawerProfile; public class MainActivity extends AppCompatActivity { private DrawerView drawer; private ActionBarDrawerToggle drawerToggle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); DrawerLayout drawerLayout = (DrawerLayout) findViewById(R.id.drawerLayout); drawer = (DrawerView) findViewById(R.id.drawer); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); drawerToggle = new ActionBarDrawerToggle( this, drawerLayout, toolbar, R.string.drawer_open, R.string.drawer_close ) { public void onDrawerClosed(View view) { invalidateOptionsMenu(); } public void onDrawerOpened(View drawerView) { invalidateOptionsMenu(); } }; drawerLayout.setStatusBarBackgroundColor(ContextCompat.getColor(this, R.color.color_primary_dark)); drawerLayout.addDrawerListener(drawerToggle); drawerLayout.closeDrawer(drawer); drawer.addItem(new DrawerItem() .setTextPrimary(getString(R.string.lorem_ipsum_short)) .setTextSecondary(getString(R.string.lorem_ipsum_long)) ); drawer.addItem(new DrawerItem() .setImage(ContextCompat.getDrawable(this, R.drawable.ic_email)) .setTextPrimary(getString(R.string.lorem_ipsum_short)) .setTextSecondary(getString(R.string.lorem_ipsum_long)) ); drawer.addDivider(); drawer.addItem(new DrawerItem() .setRoundedImage((BitmapDrawable) ContextCompat.getDrawable(this, R.drawable.cat_1)) .setTextPrimary(getString(R.string.lorem_ipsum_short)) .setTextSecondary(getString(R.string.lorem_ipsum_long)) ); drawer.addItem(new DrawerHeaderItem().setTitle(getString(R.string.lorem_ipsum_short))); drawer.addItem(new DrawerItem() .setTextPrimary(getString(R.string.lorem_ipsum_short)) ); drawer.addItem(new DrawerItem() .setRoundedImage((BitmapDrawable) ContextCompat.getDrawable(this, R.drawable.cat_2), DrawerItem.SMALL_AVATAR) .setTextPrimary(getString(R.string.lorem_ipsum_short)) .setTextSecondary(getString(R.string.lorem_ipsum_long), DrawerItem.THREE_LINE) ); drawer.selectItem(1); drawer.setOnItemClickListener(new DrawerItem.OnItemClickListener() { @Override public void onClick(DrawerItem item, long id, int position) { drawer.selectItem(position); Toast.makeText(MainActivity.this, "Clicked item #" + position, Toast.LENGTH_SHORT).show(); } }); drawer.addFixedItem(new DrawerItem() .setRoundedImage((BitmapDrawable) ContextCompat.getDrawable(this, R.drawable.cat_2), DrawerItem.SMALL_AVATAR) .setTextPrimary(getString(R.string.lorem_ipsum_short)) ); drawer.addFixedItem(new DrawerItem() .setImage(ContextCompat.getDrawable(this, R.drawable.ic_flag)) .setTextPrimary(getString(R.string.lorem_ipsum_short)) ); drawer.setOnFixedItemClickListener(new DrawerItem.OnItemClickListener() { @Override public void onClick(DrawerItem item, long id, int position) { drawer.selectFixedItem(position); Toast.makeText(MainActivity.this, "Clicked fixed item #" + position, Toast.LENGTH_SHORT).show(); } }); drawer.addProfile(new DrawerProfile() .setId(1) .setRoundedAvatar((BitmapDrawable) ContextCompat.getDrawable(this, R.drawable.cat_1)) .setBackground(ContextCompat.getDrawable(this, R.drawable.cat_wide_1)) .setName(getString(R.string.lorem_ipsum_short)) .setDescription(getString(R.string.lorem_ipsum_medium)) ); drawer.addProfile(new DrawerProfile() .setId(2) .setRoundedAvatar((BitmapDrawable) ContextCompat.getDrawable(this, R.drawable.cat_2)) .setBackground(ContextCompat.getDrawable(this, R.drawable.cat_wide_1)) .setName(getString(R.string.lorem_ipsum_short)) ); drawer.addProfile(new DrawerProfile() .setId(3) .setRoundedAvatar((BitmapDrawable) ContextCompat.getDrawable(this, R.drawable.cat_1)) .setBackground(ContextCompat.getDrawable(this, R.drawable.cat_wide_2)) .setName(getString(R.string.lorem_ipsum_short)) .setDescription(getString(R.string.lorem_ipsum_medium)) ); drawer.setOnProfileClickListener(new DrawerProfile.OnProfileClickListener() { @Override public void onClick(DrawerProfile profile, long id) { Toast.makeText(MainActivity.this, "Clicked profile *" + id, Toast.LENGTH_SHORT).show(); } }); drawer.setOnProfileSwitchListener(new DrawerProfile.OnProfileSwitchListener() { @Override public void onSwitch(DrawerProfile oldProfile, long oldId, DrawerProfile newProfile, long newId) { Toast.makeText(MainActivity.this, "Switched from profile *" + oldId + " to profile *" + newId, Toast.LENGTH_SHORT).show(); } }); } public void openDrawerFrameLayout(View view) { Intent intent = new Intent(this, MainActivity2.class); startActivity(intent); } public void openDrawerActivity(View view) { Intent intent = new Intent(this, MainActivity3.class); startActivity(intent); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { if (drawerToggle.onOptionsItemSelected(item)) { return true; } switch (item.getItemId()) { case R.id.action_github: String url = "https://github.com/HeinrichReimer/material-drawer"; Intent i = new Intent(Intent.ACTION_VIEW); i.setData(Uri.parse(url)); startActivity(i); break; } return super.onOptionsItemSelected(item); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); drawerToggle.onConfigurationChanged(newConfig); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); drawerToggle.syncState(); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package dev.kumpulatd.logic; import dev.kumpulatd.objects.Enemy; import dev.kumpulatd.objects.Freshman; import dev.kumpulatd.objects.GoalLocation; import static dev.kumpulatd.logic.TestingHelper.testIfClose; import java.util.ArrayList; import java.util.List; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author kummi */ public class TestingHelperTest { PathFinding path; PathFinder finder; GoalLocation location; List<Enemy> enemies; TestingHelper test; /** * */ public TestingHelperTest() { } /** * */ @BeforeClass public static void setUpClass() { } /** * */ @AfterClass public static void tearDownClass() { } /** * */ @Before public void setUp() { path = new PathFinding(); finder = new PathFinder(); location = new GoalLocation(10, 10); enemies = new ArrayList<>(); test = new TestingHelper(); enemies.add(new Freshman(2, 2, null)); } /** * */ @After public void tearDown() { } /** * */ @Test public void move1() { path.addPoint(1, 1); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } /** * */ @Test public void move2() { path.addPoint(1, 2); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } // @Test // public void move3() { // path.addPoint(1, 3); // assertTrue(testIfClose(group.getMembers().get(0), path)); // } /** * */ @Test public void move4() { path.addPoint(2, 1); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } /** * */ @Test public void move5() { path.addPoint(2, 2); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } /** * */ @Test public void move6() { path.addPoint(2, 3); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } // @Test // public void move7() { // path.addPoint(3, 1); // assertTrue(testIfClose(group.getMembers().get(0), path)); // } /** * */ @Test public void move8() { path.addPoint(3, 2); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } /** * */ @Test public void move9() { path.addPoint(3, 3); path.addPoint(10, 10); assertTrue(testIfClose(enemies.get(0), path)); finder.testForPathFinding(enemies, location, path); assertEquals(1, enemies.get(0).currentTarget()); } /** * */ @Test public void moveFail() { path.addPoint(4, 4); path.addPoint(10, 10); assertFalse(testIfClose(enemies.get(0), path)); assertEquals(0, enemies.get(0).currentTarget()); } /** * */ // @Test // public void endGame() { // Window window = new Window(); // GameView view = new GameView(window, "kumpula"); // loseGame(view); // assertNotNull(window.getFrame()); // // } /** * */ // @Test // public void endGame1() { // Window window = new Window(); // GameView view = new GameView(window, "kumpula"); // winGame(view); // assertEquals(window.getFrame(), null); // // } // TODO add test methods here. // The methods must be annotated with annotation @Test. For example: // // @Test // public void hello() {} }
package com.mapswithme.maps; import android.annotation.SuppressLint; import android.content.ContentResolver; import android.content.Intent; import android.graphics.Color; import android.location.Location; import android.net.Uri; import android.os.Bundle; import android.support.annotation.CallSuper; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.text.TextUtils; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import com.mapswithme.maps.MwmActivity.MapTask; import com.mapswithme.maps.MwmActivity.OpenUrlTask; import com.mapswithme.maps.api.Const; import com.mapswithme.maps.api.ParsedMwmRequest; import com.mapswithme.maps.base.BaseMwmFragmentActivity; import com.mapswithme.maps.bookmarks.data.BookmarkManager; import com.mapswithme.maps.downloader.CountryItem; import com.mapswithme.maps.downloader.MapManager; import com.mapswithme.maps.location.LocationHelper; import com.mapswithme.maps.location.LocationListener; import com.mapswithme.maps.search.SearchEngine; import com.mapswithme.util.ConnectionState; import com.mapswithme.util.Constants; import com.mapswithme.util.StringUtils; import com.mapswithme.util.UiUtils; import com.mapswithme.util.Utils; import com.mapswithme.util.concurrency.ThreadPool; import com.mapswithme.util.log.Logger; import com.mapswithme.util.log.LoggerFactory; import com.mapswithme.util.statistics.Statistics; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.util.List; @SuppressLint("StringFormatMatches") public class DownloadResourcesLegacyActivity extends BaseMwmFragmentActivity { private static final Logger LOGGER = LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.DOWNLOADER); private static final String TAG = DownloadResourcesLegacyActivity.class.getName(); static final String EXTRA_COUNTRY = "country"; static final String EXTRA_AUTODOWNLOAD = "autodownload"; // Error codes, should match the same codes in JNI private static final int ERR_DOWNLOAD_SUCCESS = 0; private static final int ERR_NOT_ENOUGH_MEMORY = -1; private static final int ERR_NOT_ENOUGH_FREE_SPACE = -2; private static final int ERR_STORAGE_DISCONNECTED = -3; private static final int ERR_DOWNLOAD_ERROR = -4; private static final int ERR_NO_MORE_FILES = -5; private static final int ERR_FILE_IN_PROGRESS = -6; private TextView mTvMessage; private TextView mTvLocation; private ProgressBar mProgress; private Button mBtnDownload; private CheckBox mChbDownloadCountry; private String mCurrentCountry; private MapTask mMapTaskToForward; private boolean mIsReadingAttachment; private boolean mAreResourcesDownloaded; private static final int DOWNLOAD = 0; private static final int PAUSE = 1; private static final int RESUME = 2; private static final int TRY_AGAIN = 3; private static final int PROCEED_TO_MAP = 4; private static final int BTN_COUNT = 5; private View.OnClickListener mBtnListeners[]; private String mBtnNames[]; private int mCountryDownloadListenerSlot; @SuppressWarnings("unused") private interface Listener { void onProgress(int percent); void onFinish(int errorCode); } private final IntentProcessor[] mIntentProcessors = { new GeoIntentProcessor(), new HttpGe0IntentProcessor(), new Ge0IntentProcessor(), new MapsWithMeIntentProcessor(), new GoogleMapsIntentProcessor(), new LeadUrlIntentProcessor(), new OpenCountryTaskProcessor(), new KmzKmlProcessor(), new ShowOnMapProcessor(), new BuildRouteProcessor() }; private final LocationListener mLocationListener = new LocationListener.Simple() { @Override public void onLocationUpdated(Location location) { if (mCurrentCountry != null) return; final double lat = location.getLatitude(); final double lon = location.getLongitude(); mCurrentCountry = MapManager.nativeFindCountry(lat, lon); if (TextUtils.isEmpty(mCurrentCountry)) { mCurrentCountry = null; return; } int status = MapManager.nativeGetStatus(mCurrentCountry); String name = MapManager.nativeGetName(mCurrentCountry); UiUtils.show(mTvLocation); if (status == CountryItem.STATUS_DONE) mTvLocation.setText(String.format(getString(R.string.download_location_map_up_to_date), name)); else { final CheckBox checkBox = (CheckBox) findViewById(R.id.chb__download_country); UiUtils.show(checkBox); String locationText; String checkBoxText; if (status == CountryItem.STATUS_UPDATABLE) { locationText = getString(R.string.download_location_update_map_proposal); checkBoxText = String.format(getString(R.string.update_country_ask), name); } else { locationText = getString(R.string.download_location_map_proposal); checkBoxText = String.format(getString(R.string.download_country_ask), name); } mTvLocation.setText(locationText); checkBox.setText(checkBoxText); } LocationHelper.INSTANCE.removeListener(this); } }; private final Listener mResourcesDownloadListener = new Listener() { @Override public void onProgress(final int percent) { if (!isFinishing()) mProgress.setProgress(percent); } @Override public void onFinish(final int errorCode) { if (isFinishing()) return; if (errorCode == ERR_DOWNLOAD_SUCCESS) { final int res = nativeStartNextFileDownload(mResourcesDownloadListener); if (res == ERR_NO_MORE_FILES) finishFilesDownload(res); } else finishFilesDownload(errorCode); } }; private final MapManager.StorageCallback mCountryDownloadListener = new MapManager.StorageCallback() { @Override public void onStatusChanged(List<MapManager.StorageCallbackData> data) { for (MapManager.StorageCallbackData item : data) { if (!item.isLeafNode) continue; switch (item.newStatus) { case CountryItem.STATUS_DONE: mAreResourcesDownloaded = true; showMap(); return; case CountryItem.STATUS_FAILED: MapManager.showError(DownloadResourcesLegacyActivity.this, item, null); return; } } } @Override public void onProgress(String countryId, long localSize, long remoteSize) { mProgress.setProgress((int)localSize); } }; @CallSuper @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_download_resources); initViewsAndListeners(); if (prepareFilesDownload(false)) { Utils.keepScreenOn(true, getWindow()); suggestRemoveLiteOrSamsung(); setAction(DOWNLOAD); if (ConnectionState.isWifiConnected()) onDownloadClicked(); return; } dispatchIntent(); showMap(); } @Override protected void onDestroy() { super.onDestroy(); Utils.keepScreenOn(false, getWindow()); if (mCountryDownloadListenerSlot != 0) { MapManager.nativeUnsubscribe(mCountryDownloadListenerSlot); mCountryDownloadListenerSlot = 0; } } @CallSuper @Override protected void onResume() { super.onResume(); if (!isFinishing()) LocationHelper.INSTANCE.addListener(mLocationListener, true); } @Override protected void onPause() { super.onPause(); LocationHelper.INSTANCE.removeListener(mLocationListener); } private void suggestRemoveLiteOrSamsung() { if (Utils.isPackageInstalled(Constants.Package.MWM_LITE_PACKAGE) || Utils.isPackageInstalled(Constants.Package.MWM_SAMSUNG_PACKAGE)) Toast.makeText(this, R.string.suggest_uninstall_lite, Toast.LENGTH_LONG).show(); } private void setDownloadMessage(int bytesToDownload) { mTvMessage.setText(getString(R.string.download_resources, StringUtils.getFileSizeString(bytesToDownload))); } private boolean prepareFilesDownload(boolean showMap) { final int bytes = nativeGetBytesToDownload(); if (bytes == 0) { mAreResourcesDownloaded = true; if (showMap) showMap(); return false; } if (bytes > 0) { setDownloadMessage(bytes); mProgress.setMax(bytes); mProgress.setProgress(0); } else finishFilesDownload(bytes); return true; } private void initViewsAndListeners() { mTvMessage = (TextView) findViewById(R.id.tv__download_message); mProgress = (ProgressBar) findViewById(R.id.pb__download_resources); mBtnDownload = (Button) findViewById(R.id.btn__download_resources); mChbDownloadCountry = (CheckBox) findViewById(R.id.chb__download_country); mTvLocation = (TextView) findViewById(R.id.tv__location); mBtnListeners = new View.OnClickListener[BTN_COUNT]; mBtnNames = new String[BTN_COUNT]; mBtnListeners[DOWNLOAD] = new View.OnClickListener() { @Override public void onClick(View v) { onDownloadClicked(); } }; mBtnNames[DOWNLOAD] = getString(R.string.download); mBtnListeners[PAUSE] = new View.OnClickListener() { @Override public void onClick(View v) { onPauseClicked(); } }; mBtnNames[PAUSE] = getString(R.string.pause); mBtnListeners[RESUME] = new View.OnClickListener() { @Override public void onClick(View v) { onResumeClicked(); } }; mBtnNames[RESUME] = getString(R.string.continue_download); mBtnListeners[TRY_AGAIN] = new View.OnClickListener() { @Override public void onClick(View v) { onTryAgainClicked(); } }; mBtnNames[TRY_AGAIN] = getString(R.string.try_again); mBtnListeners[PROCEED_TO_MAP] = new View.OnClickListener() { @Override public void onClick(View v) { onProceedToMapClicked(); } }; mBtnNames[PROCEED_TO_MAP] = getString(R.string.download_resources_continue); } private void setAction(int action) { mBtnDownload.setOnClickListener(mBtnListeners[action]); mBtnDownload.setText(mBtnNames[action]); } private void doDownload() { if (nativeStartNextFileDownload(mResourcesDownloadListener) == ERR_NO_MORE_FILES) finishFilesDownload(ERR_NO_MORE_FILES); } private void onDownloadClicked() { setAction(PAUSE); doDownload(); } private void onPauseClicked() { setAction(RESUME); nativeCancelCurrentFile(); } private void onResumeClicked() { setAction(PAUSE); doDownload(); } private void onTryAgainClicked() { if (prepareFilesDownload(true)) { setAction(PAUSE); doDownload(); } } private void onProceedToMapClicked() { mAreResourcesDownloaded = true; showMap(); } private static @StringRes int getErrorMessage(int res) { switch (res) { case ERR_NOT_ENOUGH_FREE_SPACE: return R.string.not_enough_free_space_on_sdcard; case ERR_STORAGE_DISCONNECTED: return R.string.disconnect_usb_cable; case ERR_DOWNLOAD_ERROR: return (ConnectionState.isConnected() ? R.string.download_has_failed : R.string.common_check_internet_connection_dialog); default: return R.string.not_enough_memory; } } private void showMap() { if (mIsReadingAttachment || !mAreResourcesDownloaded) return; final Intent intent = new Intent(this, MwmActivity.class); // Disable animation because MwmActivity should appear exactly over this one intent.addFlags(Intent.FLAG_ACTIVITY_NO_ANIMATION | Intent.FLAG_ACTIVITY_CLEAR_TOP); // Add saved task to forward to map activity. if (mMapTaskToForward != null) { intent.putExtra(MwmActivity.EXTRA_TASK, mMapTaskToForward); intent.putExtra(MwmActivity.EXTRA_LAUNCH_BY_DEEP_LINK, mMapTaskToForward instanceof OpenUrlTask); mMapTaskToForward = null; } startActivity(intent); finish(); } private void finishFilesDownload(int result) { if (result == ERR_NO_MORE_FILES) { // World and WorldCoasts has been downloaded, we should register maps again to correctly add them to the model and generate indexes etc. // TODO fix the hack when separate download of World-s will be removed or refactored Framework.nativeDeregisterMaps(); Framework.nativeRegisterMaps(); if (mCurrentCountry != null && mChbDownloadCountry.isChecked()) { CountryItem item = CountryItem.fill(mCurrentCountry); UiUtils.hide(mChbDownloadCountry, mTvLocation); mTvMessage.setText(getString(R.string.downloading_country_can_proceed, item.name)); mProgress.setMax((int)item.totalSize); mProgress.setProgress(0); mCountryDownloadListenerSlot = MapManager.nativeSubscribe(mCountryDownloadListener); MapManager.nativeDownload(mCurrentCountry); setAction(PROCEED_TO_MAP); } else { mAreResourcesDownloaded = true; showMap(); } } else { mTvMessage.setText(getErrorMessage(result)); mTvMessage.setTextColor(Color.RED); setAction(TRY_AGAIN); } } private boolean dispatchIntent() { final Intent intent = getIntent(); if (intent == null) return false; final Intent extra = intent.getParcelableExtra(SplashActivity.EXTRA_INTENT); if (extra == null) return false; for (final IntentProcessor ip : mIntentProcessors) if (ip.isSupported(extra) && ip.process(extra)) return true; return false; } private class GeoIntentProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { return (intent.getData() != null && "geo".equals(intent.getScheme())); } @Override public boolean process(Intent intent) { final String url = intent.getData().toString(); LOGGER.i(TAG, "Query = " + url); mMapTaskToForward = new OpenUrlTask(url); org.alohalytics.Statistics.logEvent("GeoIntentProcessor::process", url); return true; } } private class Ge0IntentProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { return (intent.getData() != null && "ge0".equals(intent.getScheme())); } @Override public boolean process(Intent intent) { final String url = intent.getData().toString(); LOGGER.i(TAG, "URL = " + url); mMapTaskToForward = new OpenUrlTask(url); org.alohalytics.Statistics.logEvent("Ge0IntentProcessor::process", url); return true; } } private class HttpGe0IntentProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { if ("http".equalsIgnoreCase(intent.getScheme())) { final Uri data = intent.getData(); if (data != null) return "ge0.me".equals(data.getHost()); } return false; } @Override public boolean process(Intent intent) { final Uri data = intent.getData(); LOGGER.i(TAG, "URL = " + data.toString()); final String ge0Url = "ge0:/" + data.getPath(); mMapTaskToForward = new OpenUrlTask(ge0Url); org.alohalytics.Statistics.logEvent("HttpGe0IntentProcessor::process", ge0Url); return true; } } /** * Use this to invoke API task. */ private class MapsWithMeIntentProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { return Const.ACTION_MWM_REQUEST.equals(intent.getAction()); } @Override public boolean process(final Intent intent) { final String apiUrl = intent.getStringExtra(Const.EXTRA_URL); org.alohalytics.Statistics.logEvent("MapsWithMeIntentProcessor::process", apiUrl == null ? "null" : apiUrl); if (apiUrl != null) { SearchEngine.nativeCancelInteractiveSearch(); final ParsedMwmRequest request = ParsedMwmRequest.extractFromIntent(intent); ParsedMwmRequest.setCurrentRequest(request); Statistics.INSTANCE.trackApiCall(request); if (!ParsedMwmRequest.isPickPointMode()) mMapTaskToForward = new OpenUrlTask(apiUrl); return true; } return false; } } private class GoogleMapsIntentProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { final Uri data = intent.getData(); return (data != null && "maps.google.com".equals(data.getHost())); } @Override public boolean process(Intent intent) { final String url = intent.getData().toString(); LOGGER.i(TAG, "URL = " + url); mMapTaskToForward = new OpenUrlTask(url); org.alohalytics.Statistics.logEvent("GoogleMapsIntentProcessor::process", url); return true; } } private class LeadUrlIntentProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { final Uri data = intent.getData(); if (data == null) return false; String scheme = intent.getScheme(); String host = data.getHost(); if (TextUtils.isEmpty(scheme) || TextUtils.isEmpty(host)) return false; return (scheme.equals("mapsme") || scheme.equals("mapswithme")) && "lead".equals(host); } @Override public boolean process(Intent intent) { final String url = intent.getData().toString(); LOGGER.i(TAG, "URL = " + url); mMapTaskToForward = new OpenUrlTask(url); org.alohalytics.Statistics.logEvent("LeadUrlIntentProcessor::process", url); return true; } } private class OpenCountryTaskProcessor implements IntentProcessor { @Override public boolean isSupported(Intent intent) { return intent.hasExtra(EXTRA_COUNTRY); } @Override public boolean process(Intent intent) { String countryId = intent.getStringExtra(EXTRA_COUNTRY); final boolean autoDownload = intent.getBooleanExtra(EXTRA_AUTODOWNLOAD, false); if (autoDownload) Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOAD_COUNTRY_NOTIFICATION_CLICKED); mMapTaskToForward = new MwmActivity.ShowCountryTask(countryId, autoDownload); org.alohalytics.Statistics.logEvent("OpenCountryTaskProcessor::process", new String[] { "autoDownload", String.valueOf(autoDownload) }, LocationHelper.INSTANCE.getSavedLocation()); return true; } } private class KmzKmlProcessor implements IntentProcessor { private Uri mData; @Override public boolean isSupported(Intent intent) { mData = intent.getData(); return mData != null; } @Override public boolean process(Intent intent) { mIsReadingAttachment = true; ThreadPool.getStorage().execute(new Runnable() { @Override public void run() { final boolean result = readKmzFromIntent(); runOnUiThread(new Runnable() { @Override public void run() { Utils.toastShortcut(DownloadResourcesLegacyActivity.this, result ? R.string.load_kmz_successful : R.string.load_kmz_failed); mIsReadingAttachment = false; showMap(); } }); } }); return true; } private boolean readKmzFromIntent() { String path = null; File tmpFile = null; final String scheme = mData.getScheme(); if (scheme != null && !scheme.equalsIgnoreCase(ContentResolver.SCHEME_FILE)) { // scheme is "content" or "http" - need to download or read file first InputStream input = null; OutputStream output = null; try { final ContentResolver resolver = getContentResolver(); final String ext = getExtensionFromMime(resolver.getType(mData)); if (ext != null) { final String filePath = MwmApplication.get().getTempPath() + "Attachment" + ext; tmpFile = new File(filePath); output = new FileOutputStream(tmpFile); input = resolver.openInputStream(mData); final byte buffer[] = new byte[Constants.MB / 2]; int read; while ((read = input.read(buffer)) != -1) output.write(buffer, 0, read); output.flush(); path = filePath; } } catch (final Exception ex) { LOGGER.w(TAG, "Attachment not found or io error: " + ex, ex); } finally { Utils.closeStream(input); Utils.closeStream(output); } } else path = mData.getPath(); boolean result = false; if (path != null) { LOGGER.d(TAG, "Loading bookmarks file from: " + path); result = BookmarkManager.nativeLoadKmzFile(path); } else LOGGER.w(TAG, "Can't get bookmarks file from URI: " + mData); if (tmpFile != null) //noinspection ResultOfMethodCallIgnored tmpFile.delete(); return result; } private String getExtensionFromMime(String mime) { final int i = mime.lastIndexOf('.'); if (i == -1) return null; mime = mime.substring(i + 1); if (mime.equalsIgnoreCase("kmz")) return ".kmz"; else if (mime.equalsIgnoreCase("kml+xml")) return ".kml"; else return null; } } private class ShowOnMapProcessor implements IntentProcessor { private static final String ACTION_SHOW_ON_MAP = "com.mapswithme.maps.pro.action.SHOW_ON_MAP"; private static final String EXTRA_LAT = "lat"; private static final String EXTRA_LON = "lon"; @Override public boolean isSupported(Intent intent) { return ACTION_SHOW_ON_MAP.equals(intent.getAction()); } @Override public boolean process(Intent intent) { if (!intent.hasExtra(EXTRA_LAT) || !intent.hasExtra(EXTRA_LON)) return false; double lat = getCoordinateFromIntent(intent, EXTRA_LAT); double lon = getCoordinateFromIntent(intent, EXTRA_LON); mMapTaskToForward = new MwmActivity.ShowPointTask(lat, lon); return true; } } private class BuildRouteProcessor implements IntentProcessor { private static final String ACTION_BUILD_ROUTE = "com.mapswithme.maps.pro.action.BUILD_ROUTE"; private static final String EXTRA_LAT_TO = "lat_to"; private static final String EXTRA_LON_TO = "lon_to"; private static final String EXTRA_LAT_FROM = "lat_from"; private static final String EXTRA_LON_FROM = "lon_from"; private static final String EXTRA_ROUTER = "router"; @Override public boolean isSupported(Intent intent) { return ACTION_BUILD_ROUTE.equals(intent.getAction()); } @Override public boolean process(Intent intent) { if (!intent.hasExtra(EXTRA_LAT_TO) || !intent.hasExtra(EXTRA_LON_TO)) return false; double latTo = getCoordinateFromIntent(intent, EXTRA_LAT_TO); double lonTo = getCoordinateFromIntent(intent, EXTRA_LON_TO); boolean hasFrom = intent.hasExtra(EXTRA_LAT_FROM) && intent.hasExtra(EXTRA_LON_FROM); boolean hasRouter = intent.hasExtra(EXTRA_ROUTER); if (hasFrom && hasRouter) { double latFrom = getCoordinateFromIntent(intent, EXTRA_LAT_FROM); double lonFrom = getCoordinateFromIntent(intent, EXTRA_LON_FROM); mMapTaskToForward = new MwmActivity.BuildRouteTask(latTo, lonTo, latFrom,lonFrom, intent.getStringExtra(EXTRA_ROUTER)); } else if (hasFrom) { double latFrom = getCoordinateFromIntent(intent, EXTRA_LAT_FROM); double lonFrom = getCoordinateFromIntent(intent, EXTRA_LON_FROM); mMapTaskToForward = new MwmActivity.BuildRouteTask(latTo, lonTo, latFrom,lonFrom); } else { mMapTaskToForward = new MwmActivity.BuildRouteTask(latTo, lonTo); } return true; } } private static double getCoordinateFromIntent(@NonNull Intent intent, @NonNull String key) { double value = intent.getDoubleExtra(key, 0.0); if (Double.compare(value, 0.0) == 0) value = intent.getFloatExtra(key, 0.0f); return value; } private static native int nativeGetBytesToDownload(); private static native int nativeStartNextFileDownload(Listener listener); private static native void nativeCancelCurrentFile(); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.core.data; /** * This is the Key used to store and access individual values in Accumulo. A Key is a tuple composed of a row, column family, column qualifier, * column visibility, timestamp, and delete marker. * * Keys are comparable and therefore have a sorted order defined by {@link #compareTo(Key)}. * */ import static org.apache.accumulo.core.util.ByteBufferUtil.toBytes; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.data.thrift.TKey; import org.apache.accumulo.core.data.thrift.TKeyValue; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableUtils; public class Key implements WritableComparable<Key>, Cloneable { protected byte[] row; protected byte[] colFamily; protected byte[] colQualifier; protected byte[] colVisibility; protected long timestamp; protected boolean deleted; @Override public boolean equals(Object o) { if (o instanceof Key) return this.equals((Key) o, PartialKey.ROW_COLFAM_COLQUAL_COLVIS_TIME_DEL); return false; } private static final byte EMPTY_BYTES[] = new byte[0]; private byte[] copyIfNeeded(byte ba[], int off, int len, boolean copyData) { if (len == 0) return EMPTY_BYTES; if (!copyData && ba.length == len && off == 0) return ba; byte[] copy = new byte[len]; System.arraycopy(ba, off, copy, 0, len); return copy; } private final void init(byte r[], int rOff, int rLen, byte cf[], int cfOff, int cfLen, byte cq[], int cqOff, int cqLen, byte cv[], int cvOff, int cvLen, long ts, boolean del, boolean copy) { row = copyIfNeeded(r, rOff, rLen, copy); colFamily = copyIfNeeded(cf, cfOff, cfLen, copy); colQualifier = copyIfNeeded(cq, cqOff, cqLen, copy); colVisibility = copyIfNeeded(cv, cvOff, cvLen, copy); timestamp = ts; deleted = del; } /** * Creates a key with empty row, empty column family, empty column qualifier, empty column visibility, timestamp {@link Long#MAX_VALUE}, and delete marker * false. */ public Key() { row = EMPTY_BYTES; colFamily = EMPTY_BYTES; colQualifier = EMPTY_BYTES; colVisibility = EMPTY_BYTES; timestamp = Long.MAX_VALUE; deleted = false; } /** * Creates a key with the specified row, empty column family, empty column qualifier, empty column visibility, timestamp {@link Long#MAX_VALUE}, and delete * marker false. */ public Key(Text row) { init(row.getBytes(), 0, row.getLength(), EMPTY_BYTES, 0, 0, EMPTY_BYTES, 0, 0, EMPTY_BYTES, 0, 0, Long.MAX_VALUE, false, true); } /** * Creates a key with the specified row, empty column family, empty column qualifier, empty column visibility, the specified timestamp, and delete marker * false. */ public Key(Text row, long ts) { this(row); timestamp = ts; } public Key(byte row[], int rOff, int rLen, byte cf[], int cfOff, int cfLen, byte cq[], int cqOff, int cqLen, byte cv[], int cvOff, int cvLen, long ts) { init(row, rOff, rLen, cf, cfOff, cfLen, cq, cqOff, cqLen, cv, cvOff, cvLen, ts, false, true); } public Key(byte[] row, byte[] colFamily, byte[] colQualifier, byte[] colVisibility, long timestamp) { this(row, colFamily, colQualifier, colVisibility, timestamp, false, true); } public Key(byte[] row, byte[] cf, byte[] cq, byte[] cv, long ts, boolean deleted) { this(row, cf, cq, cv, ts, deleted, true); } public Key(byte[] row, byte[] cf, byte[] cq, byte[] cv, long ts, boolean deleted, boolean copy) { init(row, 0, row.length, cf, 0, cf.length, cq, 0, cq.length, cv, 0, cv.length, ts, deleted, copy); } /** * Creates a key with the specified row, the specified column family, empty column qualifier, empty column visibility, timestamp {@link Long#MAX_VALUE}, and * delete marker false. */ public Key(Text row, Text cf) { init(row.getBytes(), 0, row.getLength(), cf.getBytes(), 0, cf.getLength(), EMPTY_BYTES, 0, 0, EMPTY_BYTES, 0, 0, Long.MAX_VALUE, false, true); } /** * Creates a key with the specified row, the specified column family, the specified column qualifier, empty column visibility, timestamp * {@link Long#MAX_VALUE}, and delete marker false. */ public Key(Text row, Text cf, Text cq) { init(row.getBytes(), 0, row.getLength(), cf.getBytes(), 0, cf.getLength(), cq.getBytes(), 0, cq.getLength(), EMPTY_BYTES, 0, 0, Long.MAX_VALUE, false, true); } /** * Creates a key with the specified row, the specified column family, the specified column qualifier, the specified column visibility, timestamp * {@link Long#MAX_VALUE}, and delete marker false. */ public Key(Text row, Text cf, Text cq, Text cv) { init(row.getBytes(), 0, row.getLength(), cf.getBytes(), 0, cf.getLength(), cq.getBytes(), 0, cq.getLength(), cv.getBytes(), 0, cv.getLength(), Long.MAX_VALUE, false, true); } /** * Creates a key with the specified row, the specified column family, the specified column qualifier, empty column visibility, the specified timestamp, and * delete marker false. */ public Key(Text row, Text cf, Text cq, long ts) { init(row.getBytes(), 0, row.getLength(), cf.getBytes(), 0, cf.getLength(), cq.getBytes(), 0, cq.getLength(), EMPTY_BYTES, 0, 0, ts, false, true); } /** * Creates a key with the specified row, the specified column family, the specified column qualifier, the specified column visibility, the specified * timestamp, and delete marker false. */ public Key(Text row, Text cf, Text cq, Text cv, long ts) { init(row.getBytes(), 0, row.getLength(), cf.getBytes(), 0, cf.getLength(), cq.getBytes(), 0, cq.getLength(), cv.getBytes(), 0, cv.getLength(), ts, false, true); } /** * Creates a key with the specified row, the specified column family, the specified column qualifier, the specified column visibility, the specified * timestamp, and delete marker false. */ public Key(Text row, Text cf, Text cq, ColumnVisibility cv, long ts) { byte[] expr = cv.getExpression(); init(row.getBytes(), 0, row.getLength(), cf.getBytes(), 0, cf.getLength(), cq.getBytes(), 0, cq.getLength(), expr, 0, expr.length, ts, false, true); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text)}. */ public Key(CharSequence row) { this(new Text(row.toString())); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text,Text)}. */ public Key(CharSequence row, CharSequence cf) { this(new Text(row.toString()), new Text(cf.toString())); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text,Text,Text)}. */ public Key(CharSequence row, CharSequence cf, CharSequence cq) { this(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString())); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text,Text,Text,Text)}. */ public Key(CharSequence row, CharSequence cf, CharSequence cq, CharSequence cv) { this(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()), new Text(cv.toString())); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text,Text,Text,long)}. */ public Key(CharSequence row, CharSequence cf, CharSequence cq, long ts) { this(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()), ts); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text,Text,Text,Text,long)}. */ public Key(CharSequence row, CharSequence cf, CharSequence cq, CharSequence cv, long ts) { this(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()), new Text(cv.toString()), ts); } /** * Converts CharSequence to Text and creates a Key using {@link #Key(Text,Text,Text,ColumnVisibility,long)}. */ public Key(CharSequence row, CharSequence cf, CharSequence cq, ColumnVisibility cv, long ts) { this(new Text(row.toString()), new Text(cf.toString()), new Text(cq.toString()), new Text(cv.getExpression()), ts); } private byte[] followingArray(byte ba[]) { byte[] fba = new byte[ba.length + 1]; System.arraycopy(ba, 0, fba, 0, ba.length); fba[ba.length] = (byte) 0x00; return fba; } /** * Returns a key that will sort immediately after this key. * * @param part * PartialKey except {@link PartialKey#ROW_COLFAM_COLQUAL_COLVIS_TIME_DEL} */ public Key followingKey(PartialKey part) { Key returnKey = new Key(); switch (part) { case ROW: returnKey.row = followingArray(row); break; case ROW_COLFAM: returnKey.row = row; returnKey.colFamily = followingArray(colFamily); break; case ROW_COLFAM_COLQUAL: returnKey.row = row; returnKey.colFamily = colFamily; returnKey.colQualifier = followingArray(colQualifier); break; case ROW_COLFAM_COLQUAL_COLVIS: // This isn't useful for inserting into accumulo, but may be useful for lookups. returnKey.row = row; returnKey.colFamily = colFamily; returnKey.colQualifier = colQualifier; returnKey.colVisibility = followingArray(colVisibility); break; case ROW_COLFAM_COLQUAL_COLVIS_TIME: returnKey.row = row; returnKey.colFamily = colFamily; returnKey.colQualifier = colQualifier; returnKey.colVisibility = colVisibility; returnKey.setTimestamp(timestamp - 1); returnKey.deleted = false; break; default: throw new IllegalArgumentException("Partial key specification " + part + " disallowed"); } return returnKey; } /** * Creates a key with the same row, column family, column qualifier, column visibility, timestamp, and delete marker as the given key. */ public Key(Key other) { set(other); } public Key(TKey tkey) { this.row = toBytes(tkey.row); this.colFamily = toBytes(tkey.colFamily); this.colQualifier = toBytes(tkey.colQualifier); this.colVisibility = toBytes(tkey.colVisibility); this.timestamp = tkey.timestamp; this.deleted = false; if (row == null) { throw new IllegalArgumentException("null row"); } if (colFamily == null) { throw new IllegalArgumentException("null column family"); } if (colQualifier == null) { throw new IllegalArgumentException("null column qualifier"); } if (colVisibility == null) { throw new IllegalArgumentException("null column visibility"); } } /** * This method gives users control over allocation of Text objects by copying into the passed in text. * * @param r * the key's row will be copied into this Text * @return the Text that was passed in */ public Text getRow(Text r) { r.set(row, 0, row.length); return r; } /** * This method returns a pointer to the keys internal data and does not copy it. * * @return ByteSequence that points to the internal key row data. */ public ByteSequence getRowData() { return new ArrayByteSequence(row); } /** * This method allocates a Text object and copies into it. * * @return Text containing the row field */ public Text getRow() { return getRow(new Text()); } /** * Efficiently compare the the row of a key w/o allocating a text object and copying the row into it. * * @param r * row to compare to keys row * @return same as {@link #getRow()}.compareTo(r) */ public int compareRow(Text r) { return WritableComparator.compareBytes(row, 0, row.length, r.getBytes(), 0, r.getLength()); } /** * This method returns a pointer to the keys internal data and does not copy it. * * @return ByteSequence that points to the internal key column family data. */ public ByteSequence getColumnFamilyData() { return new ArrayByteSequence(colFamily); } /** * This method gives users control over allocation of Text objects by copying into the passed in text. * * @param cf * the key's column family will be copied into this Text * @return the Text that was passed in */ public Text getColumnFamily(Text cf) { cf.set(colFamily, 0, colFamily.length); return cf; } /** * This method allocates a Text object and copies into it. * * @return Text containing the column family field */ public Text getColumnFamily() { return getColumnFamily(new Text()); } /** * Efficiently compare the the column family of a key w/o allocating a text object and copying the column qualifier into it. * * @param cf * column family to compare to keys column family * @return same as {@link #getColumnFamily()}.compareTo(cf) */ public int compareColumnFamily(Text cf) { return WritableComparator.compareBytes(colFamily, 0, colFamily.length, cf.getBytes(), 0, cf.getLength()); } /** * This method returns a pointer to the keys internal data and does not copy it. * * @return ByteSequence that points to the internal key column qualifier data. */ public ByteSequence getColumnQualifierData() { return new ArrayByteSequence(colQualifier); } /** * This method gives users control over allocation of Text objects by copying into the passed in text. * * @param cq * the key's column qualifier will be copied into this Text * @return the Text that was passed in */ public Text getColumnQualifier(Text cq) { cq.set(colQualifier, 0, colQualifier.length); return cq; } /** * This method allocates a Text object and copies into it. * * @return Text containing the column qualifier field */ public Text getColumnQualifier() { return getColumnQualifier(new Text()); } /** * Efficiently compare the the column qualifier of a key w/o allocating a text object and copying the column qualifier into it. * * @param cq * column family to compare to keys column qualifier * @return same as {@link #getColumnQualifier()}.compareTo(cq) */ public int compareColumnQualifier(Text cq) { return WritableComparator.compareBytes(colQualifier, 0, colQualifier.length, cq.getBytes(), 0, cq.getLength()); } public void setTimestamp(long ts) { this.timestamp = ts; } public long getTimestamp() { return timestamp; } public boolean isDeleted() { return deleted; } public void setDeleted(boolean deleted) { this.deleted = deleted; } /** * This method returns a pointer to the keys internal data and does not copy it. * * @return ByteSequence that points to the internal key column visibility data. */ public ByteSequence getColumnVisibilityData() { return new ArrayByteSequence(colVisibility); } /** * This method allocates a Text object and copies into it. * * @return Text containing the column visibility field */ public final Text getColumnVisibility() { return getColumnVisibility(new Text()); } /** * This method gives users control over allocation of Text objects by copying into the passed in text. * * @param cv * the key's column visibility will be copied into this Text * @return the Text that was passed in */ public final Text getColumnVisibility(Text cv) { cv.set(colVisibility, 0, colVisibility.length); return cv; } /** * This method creates a new ColumnVisibility representing the column visibility for this key * * WARNING: using this method may inhibit performance since a new ColumnVisibility object is created on every call. * * @return A new object representing the column visibility field * @since 1.5.0 */ public final ColumnVisibility getColumnVisibilityParsed() { return new ColumnVisibility(colVisibility); } /** * Sets this key's row, column family, column qualifier, column visibility, timestamp, and delete marker to be the same as another key's. */ public void set(Key k) { row = k.row; colFamily = k.colFamily; colQualifier = k.colQualifier; colVisibility = k.colVisibility; timestamp = k.timestamp; deleted = k.deleted; } @Override public void readFields(DataInput in) throws IOException { // this method is a little screwy so it will be compatible with older // code that serialized data int colFamilyOffset = WritableUtils.readVInt(in); int colQualifierOffset = WritableUtils.readVInt(in); int colVisibilityOffset = WritableUtils.readVInt(in); int totalLen = WritableUtils.readVInt(in); row = new byte[colFamilyOffset]; colFamily = new byte[colQualifierOffset - colFamilyOffset]; colQualifier = new byte[colVisibilityOffset - colQualifierOffset]; colVisibility = new byte[totalLen - colVisibilityOffset]; in.readFully(row); in.readFully(colFamily); in.readFully(colQualifier); in.readFully(colVisibility); timestamp = WritableUtils.readVLong(in); deleted = in.readBoolean(); } @Override public void write(DataOutput out) throws IOException { int colFamilyOffset = row.length; int colQualifierOffset = colFamilyOffset + colFamily.length; int colVisibilityOffset = colQualifierOffset + colQualifier.length; int totalLen = colVisibilityOffset + colVisibility.length; WritableUtils.writeVInt(out, colFamilyOffset); WritableUtils.writeVInt(out, colQualifierOffset); WritableUtils.writeVInt(out, colVisibilityOffset); WritableUtils.writeVInt(out, totalLen); out.write(row); out.write(colFamily); out.write(colQualifier); out.write(colVisibility); WritableUtils.writeVLong(out, timestamp); out.writeBoolean(deleted); } /** * Compare part of a key. For example compare just the row and column family, and if those are equal then return true. * */ public boolean equals(Key other, PartialKey part) { switch (part) { case ROW: return isEqual(row, other.row); case ROW_COLFAM: return isEqual(row, other.row) && isEqual(colFamily, other.colFamily); case ROW_COLFAM_COLQUAL: return isEqual(row, other.row) && isEqual(colFamily, other.colFamily) && isEqual(colQualifier, other.colQualifier); case ROW_COLFAM_COLQUAL_COLVIS: return isEqual(row, other.row) && isEqual(colFamily, other.colFamily) && isEqual(colQualifier, other.colQualifier) && isEqual(colVisibility, other.colVisibility); case ROW_COLFAM_COLQUAL_COLVIS_TIME: return isEqual(row, other.row) && isEqual(colFamily, other.colFamily) && isEqual(colQualifier, other.colQualifier) && isEqual(colVisibility, other.colVisibility) && timestamp == other.timestamp; case ROW_COLFAM_COLQUAL_COLVIS_TIME_DEL: return isEqual(row, other.row) && isEqual(colFamily, other.colFamily) && isEqual(colQualifier, other.colQualifier) && isEqual(colVisibility, other.colVisibility) && timestamp == other.timestamp && deleted == other.deleted; default: throw new IllegalArgumentException("Unrecognized partial key specification " + part); } } /** * Compare elements of a key given by a {@link PartialKey}. For example, for {@link PartialKey#ROW_COLFAM}, compare just the row and column family. If the * rows are not equal, return the result of the row comparison; otherwise, return the result of the column family comparison. * * @see #compareTo(Key) */ public int compareTo(Key other, PartialKey part) { // check for matching row int result = WritableComparator.compareBytes(row, 0, row.length, other.row, 0, other.row.length); if (result != 0 || part.equals(PartialKey.ROW)) return result; // check for matching column family result = WritableComparator.compareBytes(colFamily, 0, colFamily.length, other.colFamily, 0, other.colFamily.length); if (result != 0 || part.equals(PartialKey.ROW_COLFAM)) return result; // check for matching column qualifier result = WritableComparator.compareBytes(colQualifier, 0, colQualifier.length, other.colQualifier, 0, other.colQualifier.length); if (result != 0 || part.equals(PartialKey.ROW_COLFAM_COLQUAL)) return result; // check for matching column visibility result = WritableComparator.compareBytes(colVisibility, 0, colVisibility.length, other.colVisibility, 0, other.colVisibility.length); if (result != 0 || part.equals(PartialKey.ROW_COLFAM_COLQUAL_COLVIS)) return result; // check for matching timestamp if (timestamp < other.timestamp) result = 1; else if (timestamp > other.timestamp) result = -1; else result = 0; if (result != 0 || part.equals(PartialKey.ROW_COLFAM_COLQUAL_COLVIS_TIME)) return result; // check for matching deleted flag if (deleted) result = other.deleted ? 0 : -1; else result = other.deleted ? 1 : 0; return result; } /** * Compare all elements of a key. The elements (row, column family, column qualifier, column visibility, timestamp, and delete marker) are compared in order * until an unequal element is found. If the row is equal, then compare the column family, etc. The row, column family, column qualifier, and column * visibility are compared lexographically and sorted ascending. The timestamps are compared numerically and sorted descending so that the most recent data * comes first. Lastly, a delete marker of true sorts before a delete marker of false. */ @Override public int compareTo(Key other) { return compareTo(other, PartialKey.ROW_COLFAM_COLQUAL_COLVIS_TIME_DEL); } @Override public int hashCode() { return WritableComparator.hashBytes(row, row.length) + WritableComparator.hashBytes(colFamily, colFamily.length) + WritableComparator.hashBytes(colQualifier, colQualifier.length) + WritableComparator.hashBytes(colVisibility, colVisibility.length) + (int) (timestamp ^ (timestamp >>> 32)); } public static String toPrintableString(byte ba[], int offset, int len, int maxLen) { return appendPrintableString(ba, offset, len, maxLen, new StringBuilder()).toString(); } public static StringBuilder appendPrintableString(byte ba[], int offset, int len, int maxLen, StringBuilder sb) { int plen = Math.min(len, maxLen); for (int i = 0; i < plen; i++) { int c = 0xff & ba[offset + i]; if (c >= 32 && c <= 126) sb.append((char) c); else sb.append("%" + String.format("%02x;", c)); } if (len > maxLen) { sb.append("... TRUNCATED"); } return sb; } private StringBuilder rowColumnStringBuilder() { StringBuilder sb = new StringBuilder(); appendPrintableString(row, 0, row.length, Constants.MAX_DATA_TO_PRINT, sb); sb.append(" "); appendPrintableString(colFamily, 0, colFamily.length, Constants.MAX_DATA_TO_PRINT, sb); sb.append(":"); appendPrintableString(colQualifier, 0, colQualifier.length, Constants.MAX_DATA_TO_PRINT, sb); sb.append(" ["); appendPrintableString(colVisibility, 0, colVisibility.length, Constants.MAX_DATA_TO_PRINT, sb); sb.append("]"); return sb; } @Override public String toString() { StringBuilder sb = rowColumnStringBuilder(); sb.append(" "); sb.append(Long.toString(timestamp)); sb.append(" "); sb.append(deleted); return sb.toString(); } public String toStringNoTime() { return rowColumnStringBuilder().toString(); } /** * Returns the sums of the lengths of the row, column family, column qualifier, and visibility. * * @return row.length + colFamily.length + colQualifier.length + colVisibility.length; */ public int getLength() { return row.length + colFamily.length + colQualifier.length + colVisibility.length; } /** * Same as {@link #getLength()}. */ public int getSize() { return getLength(); } private static boolean isEqual(byte a1[], byte a2[]) { if (a1 == a2) return true; int last = a1.length; if (last != a2.length) return false; if (last == 0) return true; // since sorted data is usually compared in accumulo, // the prefixes will normally be the same... so compare // the last two charachters first.. the most likely place // to have disorder is at end of the strings when the // data is sorted... if those are the same compare the rest // of the data forward... comparing backwards is slower // (compiler and cpu optimized for reading data forward).. // do not want slower comparisons when data is equal... // sorting brings equals data together last--; if (a1[last] == a2[last]) { for (int i = 0; i < last; i++) if (a1[i] != a2[i]) return false; } else { return false; } return true; } /** * Use this to compress a list of keys before sending them via thrift. * * @param param * a list of key/value pairs */ public static List<TKeyValue> compress(List<? extends KeyValue> param) { List<TKeyValue> tkvl = Arrays.asList(new TKeyValue[param.size()]); if (param.size() > 0) tkvl.set(0, new TKeyValue(param.get(0).getKey().toThrift(), ByteBuffer.wrap(param.get(0).getValue().get()))); for (int i = param.size() - 1; i > 0; i--) { Key prevKey = param.get(i - 1).getKey(); KeyValue kv = param.get(i); Key key = kv.getKey(); TKey newKey = null; if (isEqual(prevKey.row, key.row)) { newKey = key.toThrift(); newKey.row = null; } if (isEqual(prevKey.colFamily, key.colFamily)) { if (newKey == null) newKey = key.toThrift(); newKey.colFamily = null; } if (isEqual(prevKey.colQualifier, key.colQualifier)) { if (newKey == null) newKey = key.toThrift(); newKey.colQualifier = null; } if (isEqual(prevKey.colVisibility, key.colVisibility)) { if (newKey == null) newKey = key.toThrift(); newKey.colVisibility = null; } if (newKey == null) newKey = key.toThrift(); tkvl.set(i, new TKeyValue(newKey, ByteBuffer.wrap(kv.getValue().get()))); } return tkvl; } /** * Use this to decompress a list of keys received from thrift. */ public static void decompress(List<TKeyValue> param) { for (int i = 1; i < param.size(); i++) { TKey prevKey = param.get(i - 1).key; TKey key = param.get(i).key; if (key.row == null) { key.row = prevKey.row; } if (key.colFamily == null) { key.colFamily = prevKey.colFamily; } if (key.colQualifier == null) { key.colQualifier = prevKey.colQualifier; } if (key.colVisibility == null) { key.colVisibility = prevKey.colVisibility; } } } byte[] getRowBytes() { return row; } byte[] getColFamily() { return colFamily; } byte[] getColQualifier() { return colQualifier; } byte[] getColVisibility() { return colVisibility; } public TKey toThrift() { return new TKey(ByteBuffer.wrap(row), ByteBuffer.wrap(colFamily), ByteBuffer.wrap(colQualifier), ByteBuffer.wrap(colVisibility), timestamp); } @Override public Object clone() throws CloneNotSupportedException { Key r = (Key) super.clone(); r.row = Arrays.copyOf(row, row.length); r.colFamily = Arrays.copyOf(colFamily, colFamily.length); r.colQualifier = Arrays.copyOf(colQualifier, colQualifier.length); r.colVisibility = Arrays.copyOf(colVisibility, colVisibility.length); return r; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.split; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.tez.common.ReflectionUtils; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.tez.common.Preconditions; /** * An InputFormat that provides a generic grouping around * the splits of a real InputFormat */ @Public @Evolving public class TezGroupedSplitsInputFormat<K, V> extends InputFormat<K, V> implements Configurable{ private static final Logger LOG = LoggerFactory.getLogger(TezGroupedSplitsInputFormat.class); InputFormat<K, V> wrappedInputFormat; int desiredNumSplits = 0; Configuration conf; SplitSizeEstimator estimator; SplitLocationProvider locationProvider; public TezGroupedSplitsInputFormat() { } public void setInputFormat(InputFormat<K, V> wrappedInputFormat) { this.wrappedInputFormat = wrappedInputFormat; if (LOG.isDebugEnabled()) { LOG.debug("wrappedInputFormat: " + wrappedInputFormat.getClass().getName()); } } public void setDesiredNumberOfSplits(int num) { Preconditions.checkArgument(num >= 0); this.desiredNumSplits = num; if (LOG.isDebugEnabled()) { LOG.debug("desiredNumSplits: " + desiredNumSplits); } } public void setSplitSizeEstimator(SplitSizeEstimator estimator) { Preconditions.checkArgument(estimator != null); this.estimator = estimator; if (LOG.isDebugEnabled()) { LOG.debug("Split size estimator : " + estimator); } } public void setSplitLocationProvider(SplitLocationProvider locationProvider) { Preconditions.checkArgument(locationProvider != null); this.locationProvider = locationProvider; if (LOG.isDebugEnabled()) { LOG.debug("Split location provider : " + locationProvider); } } @Override public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException { List<InputSplit> originalSplits = wrappedInputFormat.getSplits(context); TezMapReduceSplitsGrouper grouper = new TezMapReduceSplitsGrouper(); String wrappedInputFormatName = wrappedInputFormat.getClass().getName(); return grouper .getGroupedSplits(conf, originalSplits, desiredNumSplits, wrappedInputFormatName, estimator, locationProvider); } @Override public RecordReader<K, V> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { TezGroupedSplit groupedSplit = (TezGroupedSplit) split; try { initInputFormatFromSplit(groupedSplit); } catch (TezException e) { throw new IOException(e); } return new TezGroupedSplitsRecordReader(groupedSplit, context); } @SuppressWarnings({ "rawtypes", "unchecked" }) void initInputFormatFromSplit(TezGroupedSplit split) throws TezException { if (wrappedInputFormat == null) { Class<? extends InputFormat> clazz = (Class<? extends InputFormat>) getClassFromName(split.wrappedInputFormatName); try { wrappedInputFormat = org.apache.hadoop.util.ReflectionUtils.newInstance(clazz, conf); } catch (Exception e) { throw new TezException(e); } } } static Class<?> getClassFromName(String name) throws TezException { return ReflectionUtils.getClazz(name); } public class TezGroupedSplitsRecordReader extends RecordReader<K, V> { TezGroupedSplit groupedSplit; TaskAttemptContext context; int idx = 0; long progress; RecordReader<K, V> curReader; public TezGroupedSplitsRecordReader(TezGroupedSplit split, TaskAttemptContext context) throws IOException { this.groupedSplit = split; this.context = context; } public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { if (this.groupedSplit != split) { throw new TezUncheckedException("Splits dont match"); } if (this.context != context) { throw new TezUncheckedException("Contexts dont match"); } initNextRecordReader(); } public boolean nextKeyValue() throws IOException, InterruptedException { while ((curReader == null) || !curReader.nextKeyValue()) { // false return finishes. true return loops back for nextKeyValue() if (!initNextRecordReader()) { return false; } } return true; } public K getCurrentKey() throws IOException, InterruptedException { return curReader.getCurrentKey(); } public V getCurrentValue() throws IOException, InterruptedException { return curReader.getCurrentValue(); } public void close() throws IOException { if (curReader != null) { curReader.close(); curReader = null; } } protected boolean initNextRecordReader() throws IOException { if (curReader != null) { curReader.close(); curReader = null; if (idx > 0) { try { progress += groupedSplit.wrappedSplits.get(idx-1).getLength(); } catch (InterruptedException e) { throw new TezUncheckedException(e); } } } // if all chunks have been processed, nothing more to do. if (idx == groupedSplit.wrappedSplits.size()) { return false; } // get a record reader for the idx-th chunk try { curReader = wrappedInputFormat.createRecordReader( groupedSplit.wrappedSplits.get(idx), context); curReader.initialize(groupedSplit.wrappedSplits.get(idx), context); } catch (Exception e) { throw new RuntimeException (e); } idx++; return true; } /** * return progress based on the amount of data processed so far. */ public float getProgress() throws IOException, InterruptedException { long subprogress = 0; // bytes processed in current split if (null != curReader) { // idx is always one past the current subsplit's true index. subprogress = (long) (curReader.getProgress() * groupedSplit.wrappedSplits .get(idx - 1).getLength()); } return Math.min(1.0f, (progress + subprogress)/(float)(groupedSplit.getLength())); } } @Override public void setConf(Configuration conf) { this.conf = conf; } @Override public Configuration getConf() { return conf; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.plugins; import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Properties; public class PluginInfo implements Streamable, ToXContent { public static final String ES_PLUGIN_PROPERTIES = "plugin-descriptor.properties"; static final class Fields { static final XContentBuilderString NAME = new XContentBuilderString("name"); static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description"); static final XContentBuilderString URL = new XContentBuilderString("url"); static final XContentBuilderString SITE = new XContentBuilderString("site"); static final XContentBuilderString VERSION = new XContentBuilderString("version"); static final XContentBuilderString JVM = new XContentBuilderString("jvm"); static final XContentBuilderString CLASSNAME = new XContentBuilderString("classname"); static final XContentBuilderString ISOLATED = new XContentBuilderString("isolated"); } private String name; private String description; private boolean site; private String version; private boolean jvm; private String classname; private boolean isolated; public PluginInfo() { } /** * Information about plugins * * @param name Its name * @param description Its description * @param site true if it's a site plugin * @param jvm true if it's a jvm plugin * @param version Version number */ PluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { this.name = name; this.description = description; this.site = site; this.jvm = jvm; this.version = version; this.classname = classname; this.isolated = isolated; } /** reads (and validates) plugin metadata descriptor file */ public static PluginInfo readFromProperties(Path dir) throws IOException { Path descriptor = dir.resolve(ES_PLUGIN_PROPERTIES); Properties props = new Properties(); try (InputStream stream = Files.newInputStream(descriptor)) { props.load(stream); } String name = props.getProperty("name"); if (name == null || name.isEmpty()) { throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]"); } PluginManager.checkForForbiddenName(name); String description = props.getProperty("description"); if (description == null) { throw new IllegalArgumentException("Property [description] is missing for plugin [" + name + "]"); } String version = props.getProperty("version"); if (version == null) { throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]"); } boolean jvm = Boolean.parseBoolean(props.getProperty("jvm")); boolean site = Boolean.parseBoolean(props.getProperty("site")); if (jvm == false && site == false) { throw new IllegalArgumentException("Plugin [" + name + "] must be at least a jvm or site plugin"); } boolean isolated = true; String classname = "NA"; if (jvm) { String esVersionString = props.getProperty("elasticsearch.version"); if (esVersionString == null) { throw new IllegalArgumentException("Property [elasticsearch.version] is missing for jvm plugin [" + name + "]"); } Version esVersion = Version.fromString(esVersionString); if (esVersion.equals(Version.CURRENT) == false) { throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() + "]. Was designed for version [" + esVersionString + "]"); } String javaVersionString = props.getProperty("java.version"); if (javaVersionString == null) { throw new IllegalArgumentException("Property [java.version] is missing for jvm plugin [" + name + "]"); } JarHell.checkVersionFormat(javaVersionString); JarHell.checkJavaVersion(name, javaVersionString); isolated = Boolean.parseBoolean(props.getProperty("isolated", "true")); classname = props.getProperty("classname"); if (classname == null) { throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]"); } } if (site) { if (!Files.exists(dir.resolve("_site"))) { throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory"); } } return new PluginInfo(name, description, site, version, jvm, classname, isolated); } /** * @return Plugin's name */ public String getName() { return name; } /** * @return Plugin's description if any */ public String getDescription() { return description; } /** * @return true if it's a site plugin */ public boolean isSite() { return site; } /** * @return true if it's a plugin running in the jvm */ public boolean isJvm() { return jvm; } /** * @return true if jvm plugin has isolated classloader */ public boolean isIsolated() { return isolated; } /** * @return jvm plugin's classname */ public String getClassname() { return classname; } /** * We compute the URL for sites: "/_plugin/" + name + "/" * * @return relative URL for site plugin */ public String getUrl() { if (site) { return ("/_plugin/" + name + "/"); } else { return null; } } /** * @return Version number for the plugin */ public String getVersion() { return version; } public static PluginInfo readFromStream(StreamInput in) throws IOException { PluginInfo info = new PluginInfo(); info.readFrom(in); return info; } @Override public void readFrom(StreamInput in) throws IOException { this.name = in.readString(); this.description = in.readString(); this.site = in.readBoolean(); this.jvm = in.readBoolean(); this.version = in.readString(); this.classname = in.readString(); this.isolated = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(description); out.writeBoolean(site); out.writeBoolean(jvm); out.writeString(version); out.writeString(classname); out.writeBoolean(isolated); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.NAME, name); builder.field(Fields.VERSION, version); builder.field(Fields.DESCRIPTION, description); if (site) { builder.field(Fields.URL, getUrl()); } builder.field(Fields.JVM, jvm); if (jvm) { builder.field(Fields.CLASSNAME, classname); builder.field(Fields.ISOLATED, isolated); } builder.field(Fields.SITE, site); builder.endObject(); return builder; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PluginInfo that = (PluginInfo) o; if (!name.equals(that.name)) return false; if (version != null ? !version.equals(that.version) : that.version != null) return false; return true; } @Override public int hashCode() { return name.hashCode(); } @Override public String toString() { final StringBuilder information = new StringBuilder() .append("- Plugin information:\n") .append("Name: ").append(name).append("\n") .append("Description: ").append(description).append("\n") .append("Site: ").append(site).append("\n") .append("Version: ").append(version).append("\n") .append("JVM: ").append(jvm).append("\n"); if (jvm) { information.append(" * Classname: ").append(classname).append("\n"); information.append(" * Isolated: ").append(isolated); } return information.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.algebricks.core.algebra.operators.logical; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.mutable.Mutable; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder; import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator; import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext; import org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator; import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag; import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable; import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment; import org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector; import org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements; import org.apache.hyracks.algebricks.core.algebra.properties.TypePropagationPolicy; import org.apache.hyracks.algebricks.core.algebra.typing.ITypeEnvPointer; import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext; import org.apache.hyracks.algebricks.core.algebra.typing.OpRefTypeEnvPointer; import org.apache.hyracks.algebricks.core.algebra.typing.PropagatingTypeEnvironment; import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenContext; public abstract class AbstractLogicalOperator implements ILogicalOperator { /********************************************************************* * UNPARTITIONED, the input data is not partitioned * PARTITIONED, the input data is partitioned, the operator is executed on * each partition and may receive input from other partitions (e.g. if it is * a join or an aggregate) * LOCAL, the input data is partitioned, the operator is executed on each * partition and only processes data from that partition */ public static enum ExecutionMode { UNPARTITIONED, PARTITIONED, LOCAL } private AbstractLogicalOperator.ExecutionMode mode = AbstractLogicalOperator.ExecutionMode.UNPARTITIONED; protected IPhysicalOperator physicalOperator; private final Map<String, Object> annotations = new HashMap<String, Object>(); private boolean bJobGenEnabled = true; final protected List<Mutable<ILogicalOperator>> inputs; // protected List<LogicalOperatorReference> outputs; protected List<LogicalVariable> schema; public AbstractLogicalOperator() { inputs = new ArrayList<Mutable<ILogicalOperator>>(); } @Override public abstract LogicalOperatorTag getOperatorTag(); @Override public ExecutionMode getExecutionMode() { return mode; } public void setExecutionMode(ExecutionMode mode) { this.mode = mode; } @Override public List<LogicalVariable> getSchema() { return schema; } public void setSchema(List<LogicalVariable> schema) { if (schema == null) { return; } this.schema = new ArrayList<>(); this.schema.addAll(schema); } public void setPhysicalOperator(IPhysicalOperator physicalOp) { this.physicalOperator = physicalOp; } public IPhysicalOperator getPhysicalOperator() { return physicalOperator; } /** * @return for each child, one vector of required physical properties */ @Override public final PhysicalRequirements getRequiredPhysicalPropertiesForChildren( IPhysicalPropertiesVector requiredProperties, IOptimizationContext context) { return physicalOperator.getRequiredPropertiesForChildren(this, requiredProperties, context); } /** * @return the physical properties that this operator delivers, based on * what its children deliver */ @Override public final IPhysicalPropertiesVector getDeliveredPhysicalProperties() { return physicalOperator.getDeliveredProperties(); } @Override public final void computeDeliveredPhysicalProperties(IOptimizationContext context) throws AlgebricksException { physicalOperator.computeDeliveredProperties(this, context); } @Override public final List<Mutable<ILogicalOperator>> getInputs() { return inputs; } // @Override // public final List<LogicalOperatorReference> getOutputs() { // return outputs; // } @Override public final boolean hasInputs() { return !inputs.isEmpty(); } public boolean hasNestedPlans() { return false; } @Override public Map<String, Object> getAnnotations() { return annotations; } @Override public void removeAnnotation(String annotationName) { annotations.remove(annotationName); } @Override public final void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { if (bJobGenEnabled) { if (physicalOperator == null) { throw new AlgebricksException("Physical operator not set for operator: " + this); } physicalOperator.contributeRuntimeOperator(builder, context, this, propagatedSchema, inputSchemas, outerPlanSchema); } } public void disableJobGen() { bJobGenEnabled = false; } public boolean isJobGenEnabled() { return bJobGenEnabled; } @Override public IVariableTypeEnvironment computeInputTypeEnvironment(ITypingContext ctx) throws AlgebricksException { return createPropagatingAllInputsTypeEnvironment(ctx); } protected PropagatingTypeEnvironment createPropagatingAllInputsTypeEnvironment(ITypingContext ctx) { int n = inputs.size(); ITypeEnvPointer[] envPointers = new ITypeEnvPointer[n]; for (int i = 0; i < n; i++) { envPointers[i] = new OpRefTypeEnvPointer(inputs.get(i), ctx); } return new PropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getNullableTypeComputer(), ctx.getMetadataProvider(), TypePropagationPolicy.ALL, envPointers); } @Override public boolean requiresVariableReferenceExpressions() { return true; } }
package cz.metacentrum.perun.webgui.json.servicesManager; import com.google.gwt.cell.client.FieldUpdater; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.user.cellview.client.CellTable; import com.google.gwt.user.cellview.client.ColumnSortEvent.ListHandler; import com.google.gwt.view.client.DefaultSelectionEventManager; import com.google.gwt.view.client.ListDataProvider; import com.google.gwt.view.client.MultiSelectionModel; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.resources.TableSorter; import cz.metacentrum.perun.webgui.json.*; import cz.metacentrum.perun.webgui.json.keyproviders.GeneralKeyProvider; import cz.metacentrum.perun.webgui.model.PerunError; import cz.metacentrum.perun.webgui.model.Service; import cz.metacentrum.perun.webgui.widgets.AjaxLoaderImage; import cz.metacentrum.perun.webgui.widgets.PerunTable; import java.util.ArrayList; /** * Ajax query to get all services on facility * * @author Pavel Zlamal <256627@mail.muni.cz> */ public class GetFacilityAssignedServices implements JsonCallback, JsonCallbackTable<Service> { // Session private PerunWebSession session = PerunWebSession.getInstance(); // JSON URL static private final String JSON_URL = "servicesManager/getAssignedServices"; // External events private JsonCallbackEvents events = new JsonCallbackEvents(); // Table field updater private FieldUpdater<Service, String> tableFieldUpdater; // data providers private ListDataProvider<Service> dataProvider = new ListDataProvider<Service>(); private ArrayList<Service> list = new ArrayList<Service>(); private PerunTable<Service> table; // Selection model final MultiSelectionModel<Service> selectionModel = new MultiSelectionModel<Service>(new GeneralKeyProvider<Service>()); private int facilityId = 0; private AjaxLoaderImage loaderImage = new AjaxLoaderImage(); /** * New instance of callback * * @param facilityId ID of facility to get services for */ public GetFacilityAssignedServices(int facilityId) { this.facilityId = facilityId; } /** * New instance of callback with custom events * * @param facilityId ID of facility to get services for * @param events custom events */ public GetFacilityAssignedServices(int facilityId,JsonCallbackEvents events) { this.events = events; this.facilityId = facilityId; } /** * Returns table of assigned services on facility with custom onClick * * @param fu custom onClick (field updater) */ public CellTable<Service> getTable(FieldUpdater<Service, String> fu){ this.tableFieldUpdater = fu; return this.getTable(); } /** * Return table with assigned services on facility * * @return table widget */ public CellTable<Service> getTable() { retrieveData(); // Table data provider. dataProvider = new ListDataProvider<Service>(list); // Cell table table = new PerunTable<Service>(list); // Connect the table to the data provider. dataProvider.addDataDisplay(table); // Sorting ListHandler<Service> columnSortHandler = new ListHandler<Service>(dataProvider.getList()); table.addColumnSortHandler(columnSortHandler); // table selection table.setSelectionModel(selectionModel, DefaultSelectionEventManager.<Service> createCheckboxManager()); // set empty content & loader table.setEmptyTableWidget(loaderImage); // checkbox column column table.addCheckBoxColumn(); table.addIdColumn("Service Id", tableFieldUpdater, 110); table.addNameColumn(tableFieldUpdater); return table; } /** * Retrieve data from RPC */ public void retrieveData() { JsonClient js = new JsonClient(); js.retrieveData(JSON_URL, "facility="+facilityId, this); } /** * Sorts table by objects Name */ public void sortTable() { list = new TableSorter<Service>().sortByName(getList()); dataProvider.flush(); dataProvider.refresh(); } /** * Add object as new row to table * * @param object Service to be added as new row */ public void addToTable(Service object) { list.add(object); dataProvider.flush(); dataProvider.refresh(); } /** * Removes object as row from table * * @param object Service to be removed as row */ public void removeFromTable(Service object) { list.remove(object); selectionModel.getSelectedSet().remove(object); dataProvider.flush(); dataProvider.refresh(); } /** * Clear all table content */ public void clearTable(){ loaderImage.loadingStart(); list.clear(); selectionModel.clear(); dataProvider.flush(); dataProvider.refresh(); } /** * Clears list of selected items */ public void clearTableSelectedSet(){ selectionModel.clear(); } /** * Return selected items from list * * @return return list of checked items */ public ArrayList<Service> getTableSelectedList(){ return JsonUtils.setToList(selectionModel.getSelectedSet()); } /** * Called, when an error occurs */ public void onError(PerunError error) { session.getUiElements().setLogErrorText("Error while loading facility services."); loaderImage.loadingError(error); events.onError(error); } /** * Called, when loading starts */ public void onLoadingStart() { session.getUiElements().setLogText("Loading facility services started."); events.onLoadingStart(); } /** * Called, when operation finishes successfully. */ public void onFinished(JavaScriptObject jso) { setList(JsonUtils.<Service>jsoAsList(jso)); sortTable(); session.getUiElements().setLogText("Facility services loaded: " + list.size()); events.onFinished(jso); loaderImage.loadingFinished(); } public void insertToTable(int index, Service object) { list.add(index, object); dataProvider.flush(); dataProvider.refresh(); } public void setEditable(boolean editable) { // TODO Auto-generated method stub } public void setCheckable(boolean checkable) { // TODO Auto-generated method stub } public void setList(ArrayList<Service> list) { clearTable(); this.list.addAll(list); dataProvider.flush(); dataProvider.refresh(); } public ArrayList<Service> getList() { return this.list; } /** * Sets different facility ID for callback after creation * * @param facilityId new facility ID */ public void setFacility(int facilityId) { this.facilityId = facilityId; } /** * Sets events after callback creation * * @param events external events */ public void setEvents(JsonCallbackEvents events) { this.events = events; } }
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/sam/tags/sakai-10.1/samigo-app/src/java/org/sakaiproject/tool/assessment/jsf/tag/PagerButtonTag.java $ * $Id: PagerButtonTag.java 106463 2012-04-02 12:20:09Z david.horwitz@uct.ac.za $ *********************************************************************************** * * Copyright (c) 2004, 2005, 2006, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.assessment.jsf.tag; import javax.faces.component.UIComponent; import javax.faces.webapp.UIComponentTag; /** * * <p>Description:<br /> * This class is the tag handler for a next/previous control for a paging a dataTable. * This displays a set of labels but does not control the dataTable directly. * It posts the form indicated by form id.</p> * <p> * Usage: Designed to get its parameters via value references in a backing bean from the back end, using Hibernate partial result sets. required: formId this is the form id you are in, and it will post to it dataTableId this is a unique value for each data table you control to make the controls unique firstItem the first item number (e.g. 1) lastItem the last item number (e.g. 10) prevText e.g "Previous" from a resource bundle nextText e.g. "Next" from a bundle (these are the only parts that can be localized) numItems number of items to show at one time totalItems total number of items optional: prevDisabled if set to "true" will disable button nextDisabled if set to "true" will disable button Note that if you are on "1" it will automatically disable the previous button, and if you are on totalItems it will automatically disable the next. Disabled buttons are greyed out. </p> * <p>Copyright: Copyright (c) 2004</p> * <p>Organization: Sakai Project</p> * @author Ed Smiley * @version $Id: PagerButtonTag.java 106463 2012-04-02 12:20:09Z david.horwitz@uct.ac.za $ */ public class PagerButtonTag extends UIComponentTag { private String formId; private String firstItem; private String lastItem; private String dataTableId; private String prevText; private String nextText; private String numItems; private String prevDisabled; private String nextDisabled; private String totalItems; public String getComponentType() { return ("javax.faces.Output"); } public String getRendererType() { return "PagerButton"; } protected void setProperties(UIComponent component) { super.setProperties(component); if (!"true".equals(prevDisabled)) prevDisabled = "false"; if (!"true".equals(nextDisabled)) nextDisabled = "false"; component.getAttributes().put("dataTableId", dataTableId); component.getAttributes().put("formId", formId); TagUtil.setString(component, "name", "value"); TagUtil.setString(component, "firstItem", firstItem); TagUtil.setString(component, "lastItem", lastItem); TagUtil.setString(component, "prevText", prevText); TagUtil.setString(component, "nextText", nextText); TagUtil.setString(component, "numItems", numItems); TagUtil.setString(component, "totalItems", totalItems); // we explicitly disable prev/next at end ranges if ("1".equals(firstItem)) prevDisabled = "true"; if (("" + totalItems).equals(lastItem)) nextDisabled = "true"; TagUtil.setString(component, "prevDisabled", prevDisabled); TagUtil.setString(component, "nextDisabled", nextDisabled); } /** * id of form to post to, placed in the name of the prev/next controls * @return the id */ public String getFormId() { return formId; } /** * id of form to post to, placed in the name of the controls * @param formId id of form to post to */ public void setFormId(String formId) { this.formId = formId; } /** * number of first item displayed * @return typically, a numeric string */ public String getFirstItem() { return firstItem; } /** * number of first item displayed * @param firstItem number of first item displayed */ public void setFirstItem(String firstItem) { this.firstItem = firstItem; } /** * number of last item displayed * @return number of first item displayed, typically numeric string */ public String getLastItem() { return lastItem; } /** * number of first item displayed * @param lastItem number of first item displayed */ public void setLastItem(String lastItem) { this.lastItem = lastItem; } /** * data table id, placed in the name of the controls for uniqueness * technically could be anything, but using the id of the dataTable is * best practice * @return data table id */ public String getDataTableId() { return dataTableId; } /** * data table id, placed in the name of the controls for uniqueness * @param dataTableId data table id, placed in the name of the controls */ public void setDataTableId(String dataTableId) { this.dataTableId = dataTableId; } /** * text for "Previous" * @return text for "Previous" */ public String getPrevText() { return prevText; } /** * text for "Previous" * @param prevText text for "Previous" */ public void setPrevText(String prevText) { this.prevText = prevText; } /** * text for "Next" * @return text for "Next" */ public String getNextText() { return nextText; } /** * text for "Next" * @param nextText text for "Next" */ public void setNextText(String nextText) { this.nextText = nextText; } /** * number of items string, how many iems in the datatable, selected option * @return number of items string */ public String getNumItems() { return numItems; } /** * number of items string, how many iems in the datatable, selected option * @param numItems number of items string */ public void setNumItems(String numItems) { this.numItems = numItems; } /** * "true" if previous control disabled * @return "true" if previous control disabled */ public String getPrevDisabled() { return prevDisabled; } /** * "true" if previous control to be disabled, otehrwise ignored * @param prevDisabled "true" if previous control disabled */ public void setPrevDisabled(String prevDisabled) { this.prevDisabled = prevDisabled; } /** * "true" if next control disabled * @return "true" if next control disabled */ public String getNextDisabled() { return nextDisabled; } /** * "true" if next control disabled * @param nextDisabled "true" if next control disabled */ public void setNextDisabled(String nextDisabled) { this.nextDisabled = nextDisabled; } /** * display total items * @return total items, typically a numeric string */ public String getTotalItems() { return totalItems; } /** * total items to display as "out of n total items" * @param totalItems total items, typically a numeric string */ public void setTotalItems(String totalItems) { this.totalItems = totalItems; } }
package com.klogicapps.tv.services; import android.content.Context; import android.content.Intent; import android.database.Cursor; import com.klogicapps.tv.activities.AuthActivity; import com.klogicapps.tv.database.DatabaseAccess; import com.klogicapps.tv.database.DatabaseTables; import com.klogicapps.tv.database.YouTubeData; import com.klogicapps.tv.misc.AppUtils; import com.klogicapps.tv.misc.BusEvents; import com.klogicapps.tv.misc.DUtils; import com.klogicapps.tv.youtube.YouTubeAPI; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import de.greenrobot.event.EventBus; public class ListServiceTask { private Context context; public ListServiceTask(Context context, final ListServiceRequest request, final boolean hasFetchedData, boolean refresh) { this.context = context; if (!refresh) { if (!hasFetchedData) { DatabaseAccess access = new DatabaseAccess(context, request.databaseTable()); Cursor cursor = access.getCursor(DatabaseTables.ALL_ITEMS, request.requestIdentifier()); if (!cursor.moveToFirst()) refresh = true; cursor.close(); } } if (refresh) { YouTubeAPI helper = new YouTubeAPI(context, false, true, new YouTubeAPI.YouTubeAPIListener() { @Override public void handleAuthIntent(final Intent authIntent) { AuthActivity.show(ListServiceTask.this.context, authIntent, request.toBundle()); } }); updateDataFromInternet(request, helper); } // notify that we handled an intent so pull to refresh can stop it's animation and other stuff EventBus.getDefault().post(new BusEvents.YouTubeFragmentDataReady()); } private List<YouTubeData> prepareDataFromNet(List<YouTubeData> inList, Set<String> currentListSavedData, String requestID) { for (YouTubeData data : inList) { // set the request id data.mRequest = requestID; if (currentListSavedData != null && currentListSavedData.size() > 0) { String videoOrPl = data.mVideo == null ? data.mPlaylist : data.mVideo; if (videoOrPl != null) { if (currentListSavedData.contains(videoOrPl)) { currentListSavedData.remove(videoOrPl); // faster? data.setHidden(true); } } } } return inList; } private Set<String> saveExistingListState(DatabaseAccess database, String requestIdentifier) { Set<String> result = null; // ask the database for the hidden items List<YouTubeData> hiddenItems = database.getItems(DatabaseTables.HIDDEN_ITEMS, requestIdentifier, 0); if (hiddenItems != null) { result = new HashSet<String>(); for (YouTubeData data : hiddenItems) { String videoOrPl = data.mVideo == null ? data.mPlaylist : data.mVideo; if (videoOrPl != null) { result.add(videoOrPl); } } } return result; } private void updateDataFromInternet(ListServiceRequest request, YouTubeAPI helper) { String playlistID; boolean removeAllFromDB = true; List<YouTubeData> resultList = null; // do we have internet access? if (!AppUtils.instance(context).hasNetworkConnection()) { DUtils.log("No internet connection. Method: " + DUtils.currentMethod()); return; } YouTubeAPI.BaseListResults listResults = null; switch (request.type()) { case RELATED: YouTubeAPI.RelatedPlaylistType type = request.relatedType(); String channelID = request.channel(); playlistID = helper.relatedPlaylistID(type, channelID); if (playlistID != null) // probably needed authorization and failed resultList = retrieveVideoList(request, helper, playlistID, null, request.maxResults()); removeAllFromDB = false; break; case VIDEOS: playlistID = request.playlist(); // can't use request.maxResults() since we have to get everything and sort it resultList = retrieveVideoList(request, helper, playlistID, null, 0); removeAllFromDB = false; break; case SEARCH: String query = request.query(); listResults = helper.searchListResults(query, false); break; case LIKED: listResults = helper.likedVideosListResults(); break; case PLAYLISTS: String channel = request.channel(); resultList = retrieveVideoList(request, helper, null, channel, request.maxResults()); // remove any playlists with 0 videos Iterator<YouTubeData> iterator = resultList.iterator(); while (iterator.hasNext()) { YouTubeData data = iterator.next(); if (data.mItemCount == 0) { iterator.remove(); } } removeAllFromDB = false; break; case SUBSCRIPTIONS: listResults = helper.subscriptionListResults(false); break; case CATEGORIES: listResults = helper.categoriesListResults("US"); break; } if (resultList == null) { if (listResults != null) { resultList = listResults.getAllItems(request.maxResults()); } } if (resultList != null) { DatabaseAccess database = new DatabaseAccess(context, request.databaseTable()); Set currentListSavedData = saveExistingListState(database, request.requestIdentifier()); resultList = prepareDataFromNet(resultList, currentListSavedData, request.requestIdentifier()); if (removeAllFromDB) database.deleteAllRows(request.requestIdentifier()); database.insertItems(resultList); } } private List<YouTubeData> retrieveVideoList(ListServiceRequest request, YouTubeAPI helper, String playlistID, String channelID, int maxResults) { List<YouTubeData> result = new ArrayList<YouTubeData>(); YouTubeAPI.BaseListResults videoResults; if (playlistID != null) videoResults = helper.videosFromPlaylistResults(playlistID); else videoResults = helper.channelPlaylistsResults(channelID, false); if (videoResults != null) { List<YouTubeData> videoData = videoResults.getAllItems(maxResults); // extract just the video ids from list List<String> videoIds = YouTubeData.contentIdsList(videoData); // remove videos that we already have... videoIds = removeVideosWeAlreadyHave(request, videoIds); final int limit = YouTubeAPI.youTubeMaxResultsLimit(); for (int n = 0; n < videoIds.size(); n += limit) { int chunkSize = Math.min(videoIds.size(), n + limit); List<String> chunk = videoIds.subList(n, chunkSize); if (playlistID != null) videoResults = helper.videoInfoListResults(chunk); else videoResults = helper.playlistInfoListResults(chunk); result.addAll(videoResults.getItems(0)); } } return result; } private List<String> removeVideosWeAlreadyHave(ListServiceRequest request, List<String> newVideoIds) { List<String> result = newVideoIds; // return same list if not modified DatabaseAccess database = new DatabaseAccess(context, request.databaseTable()); List<YouTubeData> existingItems = database.getItems(DatabaseTables.CONTENT_ONLY, request.requestIdentifier(), 0); if (existingItems != null) { Set existingIds = new HashSet<String>(existingItems.size()); for (YouTubeData data : existingItems) { String videoOrPl = data.mVideo == null ? data.mPlaylist : data.mVideo; if (videoOrPl != null) { existingIds.add(videoOrPl); } } if (existingIds.size() > 0) { result = new ArrayList<String>(newVideoIds.size()); for (String videoId : newVideoIds) { if (!existingIds.contains(videoId)) { result.add(videoId); } } } } boolean debugging = false; if (debugging) { DUtils.log("removed: " + (newVideoIds.size() - result.size())); DUtils.log("returning: " + result.size()); } return result; } }
package topicmodels.correspondenceModels; import java.io.File; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.*; import structures.MyPriorityQueue; import structures._ChildDoc; import structures._Corpus; import structures._Doc; import structures._ParentDoc; import structures._ParentDoc4DCM; import structures._RankItem; import structures._Stn; import structures._Word; import utils.Utils; /** * Created by jetcai1900 on 1/13/17. */ public class PriorCorrLDA_test extends PriorCorrLDA { public PriorCorrLDA_test(int number_of_iteration, double converge, double beta, _Corpus c, double lambda, int number_of_topics, double alpha, double alpha_c, double burnIn, int lag, double ksi, double tau) { super(number_of_iteration, converge, beta, c, lambda, number_of_topics, alpha, alpha_c, burnIn, lag); } public void printTopWords(int k, String betaFile) { double loglikelihood = calculate_log_likelihood(); System.out.format("Final log likelihood %.3f\t", loglikelihood); String filePrefix = betaFile.replace("topWords.txt", ""); debugOutput(k, filePrefix); Arrays.fill(m_sstat, 0); System.out.println("print top words"); for (_Doc d : m_trainSet) { for (int i = 0; i < number_of_topics; i++) m_sstat[i] += m_logSpace ? Math.exp(d.m_topics[i]) : d.m_topics[i]; } Utils.L1Normalization(m_sstat); try { System.out.println("beta file"); PrintWriter betaOut = new PrintWriter(new File(betaFile)); for (int i = 0; i < topic_term_probabilty.length; i++) { MyPriorityQueue<_RankItem> fVector = new MyPriorityQueue<_RankItem>( k); for (int j = 0; j < vocabulary_size; j++) fVector.add(new _RankItem(m_corpus.getFeature(j), topic_term_probabilty[i][j])); betaOut.format("Topic %d(%.3f):\t", i, m_sstat[i]); for (_RankItem it : fVector) { betaOut.format("%s(%.3f)\t", it.m_name, m_logSpace ? Math.exp(it.m_value) : it.m_value); System.out.format("%s(%.3f)\t", it.m_name, m_logSpace ? Math.exp(it.m_value) : it.m_value); } betaOut.println(); System.out.println(); } betaOut.flush(); betaOut.close(); } catch (Exception ex) { System.err.print("File Not Found"); } } protected void debugOutput(int topK, String filePrefix) { File parentTopicFolder = new File(filePrefix + "parentTopicAssignment"); File childTopicFolder = new File(filePrefix + "childTopicAssignment"); if (!parentTopicFolder.exists()) { System.out.println("creating directory" + parentTopicFolder); parentTopicFolder.mkdir(); } if (!childTopicFolder.exists()) { System.out.println("creating directory" + childTopicFolder); childTopicFolder.mkdir(); } File parentWordTopicDistributionFolder = new File(filePrefix + "wordTopicDistribution"); if (!parentWordTopicDistributionFolder.exists()) { System.out.println("creating word topic distribution folder\t" + parentWordTopicDistributionFolder); parentWordTopicDistributionFolder.mkdir(); } for (_Doc d : m_trainSet) { if (d instanceof _ParentDoc) { printParentTopicAssignment(d, parentTopicFolder); } else { printChildTopicAssignment(d, childTopicFolder); } } String parentParameterFile = filePrefix + "parentParameter.txt"; String childParameterFile = filePrefix + "childParameter.txt"; printParameter(parentParameterFile, childParameterFile, m_trainSet); printTopKChild4Stn(filePrefix, topK); printTopKChild4Parent(filePrefix, topK); int randomNum = 5; selectStn(filePrefix, topK, randomNum); printTopStn4ParentByNormLikelihood(filePrefix, topK); printTopStn4ParentByMajorTopic(filePrefix, topK); } protected void selectStn(String filePrefix, int topK, int randomNum){ topK = 5; int docSize = m_trainSet.size(); int[] selectedArray = new int[randomNum]; for(int i=0; i<randomNum; i++) selectedArray[i]=m_rand.nextInt(500); System.out.println("printing sentence"); String sentenceFile = filePrefix+"selectedStn.txt"; String sentenceIndexFile = filePrefix + "stnIndex.txt"; try { PrintWriter stnOut = new PrintWriter(new File(sentenceFile)); PrintWriter stnIndexOut = new PrintWriter(new File(sentenceIndexFile)); for (_Doc d : m_trainSet) { if (d instanceof _ParentDoc) { _ParentDoc4DCM pDoc = (_ParentDoc4DCM) d; if(pDoc.getSenetenceSize()<topK){ continue; } ArrayList<Integer> mergedStnIndexList = new ArrayList<Integer>(); ArrayList<Integer> normLikelihoodStnList = new ArrayList<Integer>(); ArrayList<Integer> majorTopicStnList = new ArrayList<Integer>(); ArrayList<Integer> parentLikelihoodStnList = new ArrayList<Integer>(); estimateTopicProb4Words(pDoc); topStn4ParentByNormLikelihood(pDoc, topK, normLikelihoodStnList, mergedStnIndexList); topStn4ParentByMajorTopic(pDoc, topK, majorTopicStnList, mergedStnIndexList); topStn4ParentByParentLikelihood(pDoc, topK, parentLikelihoodStnList, mergedStnIndexList); stnOut.print(pDoc.getName()); for (int stnIndex : mergedStnIndexList) { stnOut.print("\t"+stnIndex); } stnOut.println(); stnIndexOut.print(pDoc.getName()); stnIndexOut.print("\tnormLikelihood"); for (int stnIndex : normLikelihoodStnList) { stnIndexOut.print("\t"+stnIndex); } stnIndexOut.print("\tmajorTopic"); for (int stnIndex : majorTopicStnList) { stnIndexOut.print("\t" + stnIndex); } stnIndexOut.print("\tparentLikelihood"); for (int stnIndex : parentLikelihoodStnList) { stnIndexOut.print("\t"+stnIndex); } stnIndexOut.println(); } } stnIndexOut.flush(); stnIndexOut.close(); stnOut.flush(); stnOut.close(); } catch (Exception e) { e.printStackTrace(); } } protected void topStn4ParentByNormLikelihood(_ParentDoc4DCM pDoc, int topK, ArrayList<Integer> stnList, ArrayList<Integer> mergedStnList){ HashMap<Integer, Double> stnNormLikelihoodMap = new HashMap<Integer, Double>(); for (_Stn stnObj : pDoc.getSentences()) { double likelihood = rankStn4ParentByNormLikelihood(stnObj, pDoc); likelihood = likelihood/(stnObj.getLength()*1.0); stnNormLikelihoodMap.put(stnObj.getIndex(), likelihood); } List<Map.Entry<Integer, Double>> stnNormLikelihoodList = new ArrayList<Map.Entry<Integer, Double>>(stnNormLikelihoodMap.entrySet()); Collections.sort(stnNormLikelihoodList, new Comparator<Map.Entry<Integer, Double>>() { public int compare(Map.Entry<Integer, Double> o1, Map.Entry<Integer, Double> o2) { return (o2.getValue()).toString().compareTo(o1.getValue().toString()); } }); for(int i=0; i<topK; i++){ int selectedKey = stnNormLikelihoodList.get(i).getKey(); stnList.add(selectedKey); if(!mergedStnList.contains(selectedKey)){ mergedStnList.add(selectedKey); } } } protected void topStn4ParentByMajorTopic(_ParentDoc4DCM pDoc, int topK, ArrayList<Integer> stnList, ArrayList<Integer> mergedStnList){ int maxTopicIndex = 0; double maxTopicProportion = 0; for(int k=0; k<number_of_topics; k++){ if(pDoc.m_topics[k]>maxTopicProportion) { maxTopicIndex = k; maxTopicProportion = pDoc.m_topics[k]; } } int stnNum = 1; for (_Stn stnObj : pDoc.getSentences()) { int stnMajorTopic = rankStn4ParentByMajorTopic(stnObj, pDoc); if(stnMajorTopic==maxTopicIndex) { if(stnNum>topK) break; stnList.add(stnObj.getIndex()); if (!mergedStnList.contains(stnObj.getIndex())) { mergedStnList.add(stnObj.getIndex()); } stnNum += 1; } } } protected void topStn4ParentByParentLikelihood(_ParentDoc4DCM pDoc, int topK, ArrayList<Integer> stnList, ArrayList<Integer> mergedStnList){ HashMap<Integer, Double> stnParentLikelihoodMap = new HashMap<Integer, Double>(); for (_Stn stnObj : pDoc.getSentences()) { double likelihood = rankStn4ParentByParentLikelihood(stnObj, pDoc); stnParentLikelihoodMap.put(stnObj.getIndex(), likelihood); } List<Map.Entry<Integer, Double>> stnParentLikelihoodList = new ArrayList<Map.Entry<Integer, Double>>(stnParentLikelihoodMap.entrySet()); Collections.sort(stnParentLikelihoodList, new Comparator<Map.Entry<Integer, Double>>() { public int compare(Map.Entry<Integer, Double> o1, Map.Entry<Integer, Double> o2) { return (o2.getValue()).toString().compareTo(o1.getValue().toString()); } }); for(int i=0; i<topK; i++){ int selectedKey = stnParentLikelihoodList.get(i).getKey(); stnList.add(selectedKey); if(!mergedStnList.contains(selectedKey)){ mergedStnList.add(selectedKey); } } } protected void printParentTopicAssignment(_Doc d, File topicFolder) { _ParentDoc pDoc = (_ParentDoc) d; String topicAssignmentFile = pDoc.getName() + ".txt"; try { PrintWriter pw = new PrintWriter(new File(topicFolder, topicAssignmentFile)); for (_Word w : pDoc.getWords()) { int index = w.getIndex(); int topic = w.getTopic(); String featureName = m_corpus.getFeature(index); pw.print(featureName + ":" + topic + "\t"); } pw.flush(); pw.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } } protected void printChildTopicAssignment(_Doc d, File topicFolder) { String topicAssignmentFile = d.getName() + ".txt"; try { PrintWriter pw = new PrintWriter(new File(topicFolder, topicAssignmentFile)); for (_Word w : d.getWords()) { int index = w.getIndex(); int topic = w.getTopic(); String featureName = m_corpus.getFeature(index); pw.print(featureName + ":" + topic + "\t"); } pw.flush(); pw.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } } protected void printParameter(String parentParameterFile, String childParameterFile, ArrayList<_Doc> docList) { System.out.println("printing parameter"); try { System.out.println(parentParameterFile); System.out.println(childParameterFile); PrintWriter parentParaOut = new PrintWriter(new File( parentParameterFile)); PrintWriter childParaOut = new PrintWriter(new File( childParameterFile)); for (_Doc d : docList) { if (d instanceof _ParentDoc) { parentParaOut.print(d.getName() + "\t"); parentParaOut.print("topicProportion\t"); for (int k = 0; k < number_of_topics; k++) { parentParaOut.print(d.m_topics[k] + "\t"); } parentParaOut.println(); for (_ChildDoc cDoc : ((_ParentDoc) d).m_childDocs) { childParaOut.print(cDoc.getName() + "\t"); childParaOut.print("topicProportion\t"); for (int k = 0; k < number_of_topics; k++) { childParaOut.print(cDoc.m_topics[k] + "\t"); } childParaOut.println(); } } } parentParaOut.flush(); parentParaOut.close(); childParaOut.flush(); childParaOut.close(); } catch (Exception e) { e.printStackTrace(); } } protected void printTopKChild4Parent(String filePrefix, int topK) { String topKChild4StnFile = filePrefix + "topChild4Parent.txt"; try { PrintWriter pw = new PrintWriter(new File(topKChild4StnFile)); for (_Doc d : m_trainSet) { if (d instanceof _ParentDoc) { _ParentDoc pDoc = (_ParentDoc) d; pw.print(pDoc.getName() + "\t"); for (_ChildDoc cDoc : pDoc.m_childDocs) { double docScore = rankChild4ParentBySim(cDoc, pDoc); pw.print(cDoc.getName() + ":" + docScore + "\t"); } pw.println(); } } pw.flush(); pw.close(); } catch (Exception e) { e.printStackTrace(); } } protected double rankChild4ParentBySim(_ChildDoc cDoc, _ParentDoc pDoc) { double childSim = Utils.cosine(cDoc.m_topics, pDoc.m_topics); return childSim; } protected void printTopKChild4Stn(String filePrefix, int topK) { String topKChild4StnFile = filePrefix + "topChild4Stn.txt"; try { PrintWriter pw = new PrintWriter(new File(topKChild4StnFile)); for (_Doc d : m_trainSet) { if (d instanceof _ParentDoc4DCM) { _ParentDoc4DCM pDoc = (_ParentDoc4DCM) d; pw.println(pDoc.getName() + "\t" + pDoc.getSenetenceSize()); for (_Stn stnObj : pDoc.getSentences()) { HashMap<String, Double> likelihoodMap = rankChild4StnByLikelihood( stnObj, pDoc); pw.print((stnObj.getIndex() + 1) + "\t"); for (String e : likelihoodMap.keySet()) { pw.print(e); pw.print(":" + likelihoodMap.get(e)); pw.print("\t"); } pw.println(); } } } pw.flush(); pw.close(); } catch (Exception e) { e.printStackTrace(); } } protected HashMap<String, Double> rankChild4StnByLikelihood(_Stn stnObj, _ParentDoc4DCM pDoc) { HashMap<String, Double> likelihoodMap = new HashMap<String, Double>(); for (_ChildDoc cDoc : pDoc.m_childDocs) { double stnLogLikelihood = 0; for (_Word w : stnObj.getWords()) { double wordLikelihood = 0; int wid = w.getIndex(); for (int k = 0; k < number_of_topics; k++) { wordLikelihood += cDoc.m_topics[k] * topic_term_probabilty[k][wid]; } stnLogLikelihood += Math.log(wordLikelihood); } likelihoodMap.put(cDoc.getName(), stnLogLikelihood); } return likelihoodMap; } // the ranking is based on likelihood protected void printTopStn4ParentByNormLikelihood(String filePrefix, int topK){ String topKChild4StnFile = filePrefix + "topStn4Parent_normStnlikelihood.txt"; try { PrintWriter pw = new PrintWriter(new File(topKChild4StnFile)); for (_Doc d : m_trainSet) { if (d instanceof _ParentDoc4DCM) { _ParentDoc4DCM pDoc = (_ParentDoc4DCM) d; pw.println(pDoc.getName() + "\t" + pDoc.getSenetenceSize()+"\t"); for (_Stn stnObj : pDoc.getSentences()) { double likelihood = rankStn4ParentByNormLikelihood(stnObj, pDoc); likelihood = likelihood/(stnObj.getLength()*1.0); pw.print((stnObj.getIndex() + 1)); pw.print(":" + likelihood); pw.print("\t"); } pw.println(); } } pw.flush(); pw.close(); } catch (Exception e) { e.printStackTrace(); } } protected double rankStn4ParentByNormLikelihood(_Stn stnObj, _ParentDoc4DCM pDoc) { double stnLogLikelihood = 0; for (_Word w : stnObj.getWords()) { double wordLikelihood = 0; int wid = w.getIndex(); for (int k = 0; k < number_of_topics; k++) { wordLikelihood += pDoc.m_topics[k] *topic_term_probabilty[k][wid]; } stnLogLikelihood += Math.log(wordLikelihood); } return stnLogLikelihood; } // the ranking is based on major topic protected void printTopStn4ParentByMajorTopic(String filePrefix, int topK){ String topKChild4StnByMajorTopicFile = filePrefix + "topStn4Parent_majorTopic.txt"; String topKChild4StnFile = filePrefix + "topStn4Parent_parentLikelihood.txt"; try { PrintWriter pwByMajor = new PrintWriter(new File(topKChild4StnByMajorTopicFile)); PrintWriter pw = new PrintWriter(new File(topKChild4StnFile)); for (_Doc d : m_trainSet) { if (d instanceof _ParentDoc4DCM) { _ParentDoc4DCM pDoc = (_ParentDoc4DCM) d; pwByMajor.println(pDoc.getName() + ":" + pDoc.getSenetenceSize()+"\t"); pw.println(pDoc.getName()+":"+pDoc.getSenetenceSize()+"\t"); for(int k=0; k<number_of_topics; k++){ pwByMajor.print(pDoc.m_topics[k]+"\t"); pw.print(pDoc.m_topics[k]+"\t"); } estimateTopicProb4Words(pDoc); for (_Stn stnObj : pDoc.getSentences()) { int majorTopicIndex = rankStn4ParentByMajorTopic(stnObj, pDoc); pwByMajor.print((stnObj.getIndex() + 1)); pwByMajor.print(":" + majorTopicIndex); pwByMajor.print("\t"); double likelihood = rankStn4ParentByParentLikelihood(stnObj, pDoc); pw.print((stnObj.getIndex() + 1)); pw.print(":"+likelihood); pw.print("\t"); } pwByMajor.println(); pw.println(); } } pwByMajor.flush(); pwByMajor.close(); pw.flush(); pw.close(); } catch (Exception e) { e.printStackTrace(); } } protected int rankStn4ParentByMajorTopic(_Stn stnObj, _ParentDoc4DCM pDoc) { int[] topicNumArray = new int[number_of_topics]; Arrays.fill(topicNumArray, 0); int maxTopicIndex = 0; double maxTopicRatio = 0; for (_Word w : stnObj.getWords()) { int wid = w.getIndex(); int sparseWid = pDoc.m_word2Index.get(wid); for(int k=0; k<number_of_topics; k++) { topicNumArray[k] += pDoc.m_phi[sparseWid][k]; if(maxTopicRatio<topicNumArray[k]) { maxTopicIndex = k; maxTopicRatio = topicNumArray[k]; } } } return maxTopicIndex; } protected void estimateTopicProb4Words(_ParentDoc4DCM pDoc){ int uniqueWordsNum = pDoc.getSparse().length; for(int i=0; i<uniqueWordsNum; i++){ Arrays.fill(pDoc.m_phi[i], 0); } for(_Word w: pDoc.getWords()){ int tid = w.getTopic(); int wid = w.getIndex(); int sparseWid = pDoc.m_word2Index.get(wid); pDoc.m_phi[sparseWid][tid] ++; } for(int i=0; i<uniqueWordsNum; i++){ double phiSum = 0; phiSum = Utils.sumOfArray(pDoc.m_phi[i]); for(int k=0; k<number_of_topics; k++){ pDoc.m_phi[i][k] /= phiSum; } } } protected double rankStn4ParentByParentLikelihood(_Stn stnObj, _ParentDoc4DCM pDoc) { double parentLikelihood = 0; double[] topicProportion = new double[number_of_topics]; Arrays.fill(topicProportion, 0); for (_Word w : stnObj.getWords()) { int wid = w.getIndex(); int sparseWid = pDoc.m_word2Index.get(wid); for(int k=0; k<number_of_topics; k++) { topicProportion[k] += pDoc.m_phi[sparseWid][k]; } } Utils.L1Normalization(topicProportion); for(_Word w:pDoc.getWords()){ int wid = w.getIndex(); double wordLikelihood = 0; for(int k=0; k<number_of_topics; k++){ wordLikelihood += topicProportion[k]*topic_term_probabilty[k][wid]; } parentLikelihood += Math.log(wordLikelihood); } return parentLikelihood; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.util.List; import java.util.concurrent.TimeUnit; import javax.management.openmbean.CompositeData; import org.apache.jackrabbit.api.stats.TimeSeries; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.apache.jackrabbit.oak.stats.MeterStats; import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.apache.jackrabbit.oak.stats.StatsOptions; import org.apache.jackrabbit.oak.stats.TimerStats; import org.apache.jackrabbit.stats.TimeSeriesStatsUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.google.common.base.Preconditions.checkNotNull; /** * Document Store statistics helper class. */ public class DocumentStoreStats implements DocumentStoreStatsCollector, DocumentStoreStatsMBean { private final Logger perfLog = LoggerFactory.getLogger(DocumentStoreStats.class.getName() + ".perf"); public static final int PERF_LOG_THRESHOLD = 1; static final String NODES_FIND_CACHED = "DOCUMENT_NODES_FIND_CACHED"; static final String NODES_FIND_SPLIT = "DOCUMENT_NODES_FIND_SPLIT"; static final String NODES_FIND_SLAVE = "DOCUMENT_NODES_FIND_SLAVE"; static final String NODES_FIND_PRIMARY = "DOCUMENT_NODES_FIND_PRIMARY"; static final String NODES_FIND_MISSING_TIMER = "DOCUMENT_NODES_FIND_MISSING"; static final String NODES_FIND_TIMER = "DOCUMENT_NODES_FIND"; static final String NODES_QUERY_FIND_READ_COUNT = "DOCUMENT_NODES_QUERY_FIND"; static final String NODES_QUERY_FILTER = "DOCUMENT_NODES_QUERY_FILTER"; static final String NODES_QUERY_TIMER = "DOCUMENT_NODES_QUERY"; static final String NODES_QUERY_SLAVE = "DOCUMENT_NODES_QUERY_SLAVE"; static final String NODES_QUERY_PRIMARY = "DOCUMENT_NODES_QUERY_PRIMARY"; static final String NODES_QUERY_LOCK = "DOCUMENT_NODES_QUERY_LOCK"; static final String NODES_QUERY_LOCK_TIMER = "DOCUMENT_NODES_QUERY_LOCK_TIMER"; static final String NODES_CREATE = "DOCUMENT_NODES_CREATE"; static final String NODES_CREATE_UPSERT = "DOCUMENT_NODES_CREATE_UPSERT"; static final String NODES_CREATE_SPLIT = "DOCUMENT_NODES_CREATE_SPLIT"; static final String NODES_CREATE_UPSERT_TIMER = "DOCUMENT_NODES_CREATE_UPSERT_TIMER"; static final String NODES_CREATE_TIMER = "DOCUMENT_NODES_CREATE_TIMER"; static final String NODES_UPDATE = "DOCUMENT_NODES_UPDATE"; static final String NODES_UPDATE_FAILURE = "DOCUMENT_NODES_UPDATE_FAILURE"; static final String NODES_UPDATE_RETRY_COUNT = "DOCUMENT_NODES_UPDATE_RETRY"; static final String NODES_UPDATE_TIMER = "DOCUMENT_NODES_UPDATE_TIMER"; static final String JOURNAL_QUERY = "DOCUMENT_JOURNAL_QUERY"; static final String JOURNAL_CREATE = "DOCUMENT_JOURNAL_CREATE"; static final String JOURNAL_QUERY_TIMER = "DOCUMENT_JOURNAL_QUERY_TIMER"; static final String JOURNAL_CREATE_TIMER = "DOCUMENT_JOURNAL_CREATE_TIMER"; private final MeterStats findNodesCachedMeter; private final TimerStats findNodesMissingTimer; private final MeterStats findNodesSlave; private final TimerStats findNodesTimer; private final MeterStats findNodesPrimary; private final MeterStats queryNodesSlave; private final MeterStats queryNodesPrimary; private final MeterStats queryNodesResult; private final TimerStats queryNodesWithFilterTimer; private final TimerStats queryNodesTimer; private final MeterStats queryJournal; private final TimerStats queryJournalTimer; private final TimerStats createNodeUpsertTimer; private final TimerStats createNodeTimer; private final TimerStats updateNodeTimer; private final MeterStats createNodeUpsertMeter; private final MeterStats createNodeMeter; private final MeterStats updateNodeMeter; private final MeterStats createJournal; private final TimerStats createJournalTimer; private final MeterStats findSplitNodes; private final StatisticsProvider statisticsProvider; private final MeterStats queryNodesLock; private final TimerStats queryNodesLockTimer; private final MeterStats createSplitNodeMeter; private final MeterStats updateNodeFailureMeter; private final MeterStats updateNodeRetryCountMeter; public DocumentStoreStats(StatisticsProvider provider) { statisticsProvider = checkNotNull(provider); findNodesCachedMeter = provider.getMeter(NODES_FIND_CACHED, StatsOptions.DEFAULT); findNodesMissingTimer = provider.getTimer(NODES_FIND_MISSING_TIMER, StatsOptions.METRICS_ONLY); findNodesTimer = provider.getTimer(NODES_FIND_TIMER, StatsOptions.METRICS_ONLY); findSplitNodes = provider.getMeter(NODES_FIND_SPLIT, StatsOptions.DEFAULT); findNodesSlave = provider.getMeter(NODES_FIND_SLAVE, StatsOptions.DEFAULT); findNodesPrimary = provider.getMeter(NODES_FIND_PRIMARY, StatsOptions.DEFAULT); queryNodesSlave = provider.getMeter(NODES_QUERY_SLAVE, StatsOptions.DEFAULT); queryNodesPrimary = provider.getMeter(NODES_QUERY_PRIMARY, StatsOptions.DEFAULT); queryNodesResult = provider.getMeter(NODES_QUERY_FIND_READ_COUNT, StatsOptions.DEFAULT); queryNodesWithFilterTimer = provider.getTimer(NODES_QUERY_FILTER, StatsOptions.METRICS_ONLY); queryNodesTimer = provider.getTimer(NODES_QUERY_TIMER, StatsOptions.METRICS_ONLY); queryJournal = provider.getMeter(JOURNAL_QUERY, StatsOptions.DEFAULT); queryJournalTimer = provider.getTimer(JOURNAL_QUERY_TIMER, StatsOptions.METRICS_ONLY); createJournal = provider.getMeter(JOURNAL_CREATE, StatsOptions.DEFAULT); createJournalTimer = provider.getTimer(JOURNAL_CREATE_TIMER, StatsOptions.METRICS_ONLY); createNodeUpsertTimer = provider.getTimer(NODES_CREATE_UPSERT_TIMER, StatsOptions.METRICS_ONLY); createNodeTimer = provider.getTimer(NODES_CREATE_TIMER, StatsOptions.METRICS_ONLY); updateNodeTimer = provider.getTimer(NODES_UPDATE_TIMER, StatsOptions.METRICS_ONLY); createNodeMeter = provider.getMeter(NODES_CREATE, StatsOptions.DEFAULT); createNodeUpsertMeter = provider.getMeter(NODES_CREATE_UPSERT, StatsOptions.DEFAULT); createSplitNodeMeter = provider.getMeter(NODES_CREATE_SPLIT, StatsOptions.DEFAULT); updateNodeMeter = provider.getMeter(NODES_UPDATE, StatsOptions.DEFAULT); updateNodeFailureMeter = provider.getMeter(NODES_UPDATE_FAILURE, StatsOptions.DEFAULT); updateNodeRetryCountMeter = provider.getMeter(NODES_UPDATE_RETRY_COUNT, StatsOptions.DEFAULT); queryNodesLock = provider.getMeter(NODES_QUERY_LOCK, StatsOptions.DEFAULT); queryNodesLockTimer = provider.getTimer(NODES_QUERY_LOCK_TIMER, StatsOptions.METRICS_ONLY); } //~------------------------------------------< DocumentStoreStatsCollector > @Override public void doneFindCached(Collection<? extends Document> collection, String key) { //findCached call is almost done for NODES collection only if (collection == Collection.NODES){ findNodesCachedMeter.mark(); } } @Override public void doneFindUncached(long timeTakenNanos, Collection<? extends Document> collection, String key, boolean docFound, boolean isSlaveOk) { if (collection == Collection.NODES){ //For now collect time for reads from primary/secondary in same timer TimerStats timer = docFound ? findNodesTimer : findNodesMissingTimer; timer.update(timeTakenNanos, TimeUnit.NANOSECONDS); //For now only nodes can be looked up from slave if (isSlaveOk){ findNodesSlave.mark(); } else { findNodesPrimary.mark(); } if (Utils.isPreviousDocId(key)){ findSplitNodes.mark(); } } perfLog(timeTakenNanos, "findUncached on key={}, isSlaveOk={}", key, isSlaveOk); } @Override public void doneQuery(long timeTakenNanos, Collection<? extends Document> collection, String fromKey, String toKey, boolean indexedProperty, int resultSize, long lockTime, boolean isSlaveOk) { if (collection == Collection.NODES){ //Distinguish between query done with filter and without filter TimerStats timer = indexedProperty ? queryNodesWithFilterTimer : queryNodesTimer; timer.update(timeTakenNanos, TimeUnit.NANOSECONDS); //Number of nodes read queryNodesResult.mark(resultSize); //Stats for queries to slaves if (isSlaveOk){ queryNodesSlave.mark(); } else { queryNodesPrimary.mark(); } if (lockTime > 0){ queryNodesLock.mark(); queryNodesLockTimer.update(lockTime, TimeUnit.NANOSECONDS); } //TODO What more to gather // - Histogram of result - How the number of children vary } else if (collection == Collection.JOURNAL){ //Journals are read from primary and without any extra condition on indexedProperty queryJournal.mark(resultSize); queryJournalTimer.update(timeTakenNanos, TimeUnit.NANOSECONDS); } perfLog(timeTakenNanos, "query for children from [{}] to [{}], lock:{}", fromKey, toKey, lockTime); } @Override public void doneCreate(long timeTakenNanos, Collection<? extends Document> collection, List<String> ids, boolean insertSuccess) { if (collection == Collection.NODES && insertSuccess){ for (String id : ids){ createNodeMeter.mark(); if (Utils.isPreviousDocId(id)){ createSplitNodeMeter.mark(); } } createNodeTimer.update(timeTakenNanos / ids.size(), TimeUnit.NANOSECONDS); } else if (collection == Collection.JOURNAL){ createJournal.mark(ids.size()); createJournalTimer.update(timeTakenNanos, TimeUnit.NANOSECONDS); } perfLog(timeTakenNanos, "create"); } @Override public void doneCreateOrUpdate(long timeTakenNanos, Collection<? extends Document> collection, List<String> ids) { if (collection == Collection.NODES) { for (String id : ids){ createNodeUpsertMeter.mark(); if (Utils.isPreviousDocId(id)){ createSplitNodeMeter.mark(); } } createNodeUpsertTimer.update(timeTakenNanos / ids.size(), TimeUnit.NANOSECONDS); } perfLog(timeTakenNanos, "createOrUpdate {}", ids); } @Override public void doneUpdate(long timeTakenNanos, Collection<? extends Document> collection, int updateCount) { //NODES - Update is called for lastRev update perfLog(timeTakenNanos, "update"); } @Override public void doneFindAndModify(long timeTakenNanos, Collection<? extends Document> collection, String key, boolean newEntry, boolean success, int retryCount) { if (collection == Collection.NODES){ if (success) { if (newEntry) { createNodeUpsertMeter.mark(); createNodeUpsertTimer.update(timeTakenNanos, TimeUnit.NANOSECONDS); } else { updateNodeMeter.mark(); updateNodeTimer.update(timeTakenNanos, TimeUnit.NANOSECONDS); } if (retryCount > 0){ updateNodeRetryCountMeter.mark(retryCount); } } else { updateNodeRetryCountMeter.mark(retryCount); updateNodeFailureMeter.mark(); } } perfLog(timeTakenNanos, "findAndModify [{}]", key); } private void perfLog(long timeTakenNanos, String logMessagePrefix, Object... arguments){ if (!perfLog.isDebugEnabled()){ return; } final long diff = TimeUnit.NANOSECONDS.toMillis(timeTakenNanos); if (perfLog.isTraceEnabled()) { // if log level is TRACE, then always log - and do that on TRACE // then: perfLog.trace(logMessagePrefix + " [took " + diff + "ms]", (Object[]) arguments); } else if (diff > PERF_LOG_THRESHOLD) { perfLog.debug(logMessagePrefix + " [took " + diff + "ms]", (Object[]) arguments); } } //~--------------------------------------------< DocumentStoreStatsMBean > @Override public long getNodesFindCount() { return findNodesSlave.getCount() + queryNodesPrimary.getCount(); } @Override public long getNodesFindQueryCount() { return queryNodesSlave.getCount() + queryNodesPrimary.getCount(); } @Override public long getNodesReadByQueryCount() { return queryNodesResult.getCount(); } @Override public long getNodesCreateCount() { return createNodeMeter.getCount() + createNodeUpsertMeter.getCount(); } @Override public long getNodesUpdateCount() { return updateNodeMeter.getCount(); } @Override public long getJournalCreateCount() { return createJournal.getCount(); } @Override public long getJournalReadCount() { return queryJournal.getCount(); } @Override public CompositeData getFindCachedNodesHistory() { return getTimeSeriesData(NODES_FIND_CACHED, NODES_FIND_CACHED); } @Override public CompositeData getFindSplitNodesHistory() { return getTimeSeriesData(NODES_FIND_SPLIT, NODES_FIND_SPLIT); } @Override public CompositeData getFindNodesFromPrimaryHistory() { return getTimeSeriesData(NODES_FIND_PRIMARY, NODES_FIND_PRIMARY); } @Override public CompositeData getFindNodesFromSlaveHistory() { return getTimeSeriesData(NODES_FIND_SLAVE, NODES_FIND_SLAVE); } @Override public CompositeData getQueryNodesFromSlaveHistory() { return getTimeSeriesData(NODES_QUERY_SLAVE, NODES_QUERY_SLAVE); } @Override public CompositeData getQueryNodesFromPrimaryHistory() { return getTimeSeriesData(NODES_QUERY_PRIMARY, NODES_QUERY_PRIMARY); } @Override public CompositeData getQueryNodesLockHistory() { return getTimeSeriesData(NODES_QUERY_LOCK, NODES_QUERY_LOCK); } @Override public CompositeData getQueryJournalHistory() { return getTimeSeriesData(JOURNAL_QUERY, JOURNAL_QUERY); } @Override public CompositeData getCreateJournalHistory() { return getTimeSeriesData(JOURNAL_CREATE, JOURNAL_CREATE); } @Override public CompositeData getCreateNodesHistory() { return getTimeSeriesData(NODES_CREATE, NODES_CREATE); } @Override public CompositeData getUpdateNodesHistory() { return getTimeSeriesData(NODES_UPDATE, NODES_UPDATE); } @Override public CompositeData getUpdateNodesRetryHistory() { return getTimeSeriesData(NODES_UPDATE_RETRY_COUNT, NODES_UPDATE_RETRY_COUNT); } @Override public CompositeData getUpdateNodesFailureHistory() { return getTimeSeriesData(NODES_UPDATE_FAILURE, NODES_UPDATE_FAILURE); } private CompositeData getTimeSeriesData(String name, String desc){ return TimeSeriesStatsUtil.asCompositeData(getTimeSeries(name), desc); } private TimeSeries getTimeSeries(String name) { return statisticsProvider.getStats().getTimeSeries(name, true); } }
package com.maimemo.text.stemmer; import com.maimemo.text.SubCharSequence; import java.util.HashMap; import java.util.Map; /** * Irregular verb word table * Created by TJT on 9/3/16. */ @SuppressWarnings("SpellCheckingInspection") public class IrregularWordTable { private static Map<CharSequence, CharSequence> mapping = null; private static void buildMap() { Map<CharSequence, CharSequence> map = new HashMap<>(510); map.put(new SubCharSequence("unbuilt"), "unbuild"); map.put(new SubCharSequence("bode"), "bide"); map.put(new SubCharSequence("went"), "go"); map.put(new SubCharSequence("leaped"), "leap"); map.put(new SubCharSequence("underran"), "underrun"); map.put(new SubCharSequence("knelt"), "kneel"); map.put(new SubCharSequence("retold"), "retell"); map.put(new SubCharSequence("smelt"), "smell"); map.put(new SubCharSequence("forecasted"), "forecast"); map.put(new SubCharSequence("clove"), "cleave"); map.put(new SubCharSequence("overflown"), "overfly"); map.put(new SubCharSequence("throve"), "thrive"); map.put(new SubCharSequence("rent"), "rend"); map.put(new SubCharSequence("fordid"), "fordo"); map.put(new SubCharSequence("outdid"), "outdo"); map.put(new SubCharSequence("sawed"), "saw"); map.put(new SubCharSequence("unwove"), "unweave"); map.put(new SubCharSequence("bade"), "bid"); map.put(new SubCharSequence("woken"), "wake"); map.put(new SubCharSequence("mislaid"), "mislay"); map.put(new SubCharSequence("rang"), "ring"); map.put(new SubCharSequence("abided"), "abide"); map.put(new SubCharSequence("overblown"), "overblow"); map.put(new SubCharSequence("shoed"), "shoe"); map.put(new SubCharSequence("waylaid"), "waylay"); map.put(new SubCharSequence("blest"), "bless"); map.put(new SubCharSequence("trodden"), "tread"); map.put(new SubCharSequence("lent"), "lend"); map.put(new SubCharSequence("sunk"), "sink"); map.put(new SubCharSequence("forbore"), "forbear"); map.put(new SubCharSequence("hidden"), "hide"); map.put(new SubCharSequence("outgone"), "outgo"); map.put(new SubCharSequence("mistaken"), "mistake"); map.put(new SubCharSequence("withdrawn"), "withdraw"); map.put(new SubCharSequence("laid"), "lay"); map.put(new SubCharSequence("overpaid"), "overpay"); map.put(new SubCharSequence("saw"), "see"); map.put(new SubCharSequence("overbought"), "overbuy"); map.put(new SubCharSequence("girded"), "gird"); map.put(new SubCharSequence("willed"), "will"); map.put(new SubCharSequence("dug"), "dig"); map.put(new SubCharSequence("lain"), "lie"); map.put(new SubCharSequence("wedded"), "wed"); map.put(new SubCharSequence("leapt"), "leap"); map.put(new SubCharSequence("blessed"), "bless"); map.put(new SubCharSequence("overdid"), "overdo"); map.put(new SubCharSequence("awaked"), "awake"); map.put(new SubCharSequence("lay"), "lie"); map.put(new SubCharSequence("ate"), "eat"); map.put(new SubCharSequence("felt"), "feel"); map.put(new SubCharSequence("arose"), "arise"); map.put(new SubCharSequence("sung"), "sing"); map.put(new SubCharSequence("overheard"), "overhear"); map.put(new SubCharSequence("overdrew"), "overdraw"); map.put(new SubCharSequence("should"), "shall"); map.put(new SubCharSequence("held"), "hold"); map.put(new SubCharSequence("overgrew"), "overgrow"); map.put(new SubCharSequence("bethought"), "bethink"); map.put(new SubCharSequence("graved"), "grave"); map.put(new SubCharSequence("unslung"), "unsling"); map.put(new SubCharSequence("ungirded"), "ungird"); map.put(new SubCharSequence("dared"), "dare"); map.put(new SubCharSequence("strid"), "stride"); map.put(new SubCharSequence("stunk"), "stink"); map.put(new SubCharSequence("overeaten"), "overeat"); map.put(new SubCharSequence("outate"), "outeat"); map.put(new SubCharSequence("unswore"), "unswear"); map.put(new SubCharSequence("weighed"), "weigh"); map.put(new SubCharSequence("spun"), "spin"); map.put(new SubCharSequence("gone"), "go"); map.put(new SubCharSequence("grown"), "grow"); map.put(new SubCharSequence("unlearned"), "unlearn"); map.put(new SubCharSequence("was"), "is"); map.put(new SubCharSequence("undid"), "undo"); map.put(new SubCharSequence("outbade"), "outbid"); map.put(new SubCharSequence("mowed"), "mow"); map.put(new SubCharSequence("unstrung"), "unstring"); map.put(new SubCharSequence("overgrown"), "overgrow"); map.put(new SubCharSequence("thrown"), "throw"); map.put(new SubCharSequence("ground"), "grind"); map.put(new SubCharSequence("stuck"), "stick"); map.put(new SubCharSequence("alit"), "alight"); map.put(new SubCharSequence("smote"), "smite"); map.put(new SubCharSequence("misgiven"), "misgive"); map.put(new SubCharSequence("outworn"), "outwear"); map.put(new SubCharSequence("underdid"), "underdo"); map.put(new SubCharSequence("underwritten"), "underwrite"); map.put(new SubCharSequence("bit"), "bite"); map.put(new SubCharSequence("partaken"), "partake"); map.put(new SubCharSequence("prechosen"), "prechoose"); map.put(new SubCharSequence("relaid"), "relay"); map.put(new SubCharSequence("stood"), "stand"); map.put(new SubCharSequence("gilded"), "gild"); map.put(new SubCharSequence("stove"), "stave"); map.put(new SubCharSequence("slunk"), "slink"); map.put(new SubCharSequence("stricken"), "strike"); map.put(new SubCharSequence("stole"), "steal"); map.put(new SubCharSequence("quitted"), "quit"); map.put(new SubCharSequence("hid"), "hide"); map.put(new SubCharSequence("forgotten"), "forget"); map.put(new SubCharSequence("beaten"), "beat"); map.put(new SubCharSequence("underwent"), "undergo"); map.put(new SubCharSequence("wove"), "weave"); map.put(new SubCharSequence("found"), "find"); map.put(new SubCharSequence("seen"), "see"); map.put(new SubCharSequence("smitten"), "smite"); map.put(new SubCharSequence("overborne"), "overbear"); map.put(new SubCharSequence("unstuck"), "unstick"); map.put(new SubCharSequence("taught"), "teach"); map.put(new SubCharSequence("drunk"), "drink"); map.put(new SubCharSequence("forsworn"), "forswear"); map.put(new SubCharSequence("underbidden"), "underbid"); map.put(new SubCharSequence("dwelt"), "dwell"); map.put(new SubCharSequence("shrived"), "shrive"); map.put(new SubCharSequence("would/willed"), "will"); map.put(new SubCharSequence("flew"), "fly"); map.put(new SubCharSequence("misdealt"), "misdeal"); map.put(new SubCharSequence("underpaid"), "underpay"); map.put(new SubCharSequence("sang"), "sing"); map.put(new SubCharSequence("overladen"), "overlade"); map.put(new SubCharSequence("overblew"), "overblow"); map.put(new SubCharSequence("overate"), "overeat"); map.put(new SubCharSequence("overdrawn"), "overdraw"); map.put(new SubCharSequence("forsaken"), "forsake"); map.put(new SubCharSequence("learnt"), "learn"); map.put(new SubCharSequence("clave"), "cleave"); map.put(new SubCharSequence("repaid"), "repay"); map.put(new SubCharSequence("wept"), "weep"); map.put(new SubCharSequence("shaken"), "shake"); map.put(new SubCharSequence("knew"), "know"); map.put(new SubCharSequence("wrought"), "work"); map.put(new SubCharSequence("swelled"), "swell"); map.put(new SubCharSequence("prechose"), "prechoose"); map.put(new SubCharSequence("overthrown"), "overthrow"); map.put(new SubCharSequence("gilt"), "gild"); map.put(new SubCharSequence("cleaved"), "cleave"); map.put(new SubCharSequence("written"), "write"); map.put(new SubCharSequence("spat"), "spit"); map.put(new SubCharSequence("learned"), "learn"); map.put(new SubCharSequence("woke"), "wake"); map.put(new SubCharSequence("slain"), "slay"); map.put(new SubCharSequence("fell"), "fall"); map.put(new SubCharSequence("alighted"), "alight"); map.put(new SubCharSequence("undertook"), "undertake"); map.put(new SubCharSequence("sewn"), "sew"); map.put(new SubCharSequence("gotten"), "get"); map.put(new SubCharSequence("burned"), "burn"); map.put(new SubCharSequence("unlearnt"), "unlearn"); map.put(new SubCharSequence("foreknew"), "foreknow"); map.put(new SubCharSequence("partook"), "partake"); map.put(new SubCharSequence("bided"), "bide"); map.put(new SubCharSequence("swung"), "swing"); map.put(new SubCharSequence("overshot"), "overshoot"); map.put(new SubCharSequence("overbuilt"), "overbuild"); map.put(new SubCharSequence("grew"), "grow"); map.put(new SubCharSequence("withstood"), "withstand"); map.put(new SubCharSequence("striven"), "strive"); map.put(new SubCharSequence("foreran"), "forerun"); map.put(new SubCharSequence("built"), "build"); map.put(new SubCharSequence("oversaw"), "oversee"); map.put(new SubCharSequence("unsaid"), "unsay"); map.put(new SubCharSequence("strung"), "string"); map.put(new SubCharSequence("bled"), "bleed"); map.put(new SubCharSequence("oversold"), "oversell"); map.put(new SubCharSequence("overtaken"), "overtake"); map.put(new SubCharSequence("rived"), "rive"); map.put(new SubCharSequence("forswore"), "forswear"); map.put(new SubCharSequence("lighted"), "light"); map.put(new SubCharSequence("foregone"), "forego"); map.put(new SubCharSequence("outsat"), "outsit"); map.put(new SubCharSequence("spelled"), "spell"); map.put(new SubCharSequence("wrote"), "write"); map.put(new SubCharSequence("leaned"), "lean"); map.put(new SubCharSequence("slidden"), "slide"); map.put(new SubCharSequence("staved"), "stave"); map.put(new SubCharSequence("broadcasted"), "broadcast"); map.put(new SubCharSequence("browbeaten"), "browbeat"); map.put(new SubCharSequence("shot"), "shoot"); map.put(new SubCharSequence("sawn"), "saw"); map.put(new SubCharSequence("unbound"), "unbind"); map.put(new SubCharSequence("trod"), "tread"); map.put(new SubCharSequence("unmade"), "unmake"); map.put(new SubCharSequence("slept"), "sleep"); map.put(new SubCharSequence("clad"), "clothe"); map.put(new SubCharSequence("sold"), "sell"); map.put(new SubCharSequence("kept"), "keep"); map.put(new SubCharSequence("overflew"), "overfly"); map.put(new SubCharSequence("unladen"), "unlade"); map.put(new SubCharSequence("overladed"), "overlade"); map.put(new SubCharSequence("hamstrung"), "hamstring"); map.put(new SubCharSequence("thought"), "think"); map.put(new SubCharSequence("chosen"), "choose"); map.put(new SubCharSequence("worked"), "work"); map.put(new SubCharSequence("swore"), "swear"); map.put(new SubCharSequence("forewent"), "forego"); map.put(new SubCharSequence("chid"), "chide"); map.put(new SubCharSequence("swam"), "swim"); map.put(new SubCharSequence("bestrewed"), "bestrew"); map.put(new SubCharSequence("bestridden"), "bestride"); map.put(new SubCharSequence("untrodden"), "untread"); map.put(new SubCharSequence("tore"), "tear"); map.put(new SubCharSequence("rung"), "ring"); map.put(new SubCharSequence("bereaved"), "bereave"); map.put(new SubCharSequence("cloven"), "cleave"); map.put(new SubCharSequence("wist"), "wit"); map.put(new SubCharSequence("slid"), "slide"); map.put(new SubCharSequence("taken"), "take"); map.put(new SubCharSequence("hewed"), "hew"); map.put(new SubCharSequence("undershot"), "undershoot"); map.put(new SubCharSequence("undergone"), "undergo"); map.put(new SubCharSequence("outspent"), "outspend"); map.put(new SubCharSequence("betook"), "betake"); map.put(new SubCharSequence("awoke"), "awake"); map.put(new SubCharSequence("strived"), "strive"); map.put(new SubCharSequence("sent"), "send"); map.put(new SubCharSequence("forgiven"), "forgive"); map.put(new SubCharSequence("waked"), "wake"); map.put(new SubCharSequence("unthought"), "unthink"); map.put(new SubCharSequence("unknitted"), "unknit"); map.put(new SubCharSequence("mistook"), "mistake"); map.put(new SubCharSequence("gainsaid"), "gainsay"); map.put(new SubCharSequence("rose"), "rise"); map.put(new SubCharSequence("forbidden"), "forbid"); map.put(new SubCharSequence("underwrote"), "underwrite"); map.put(new SubCharSequence("hewn"), "hew"); map.put(new SubCharSequence("winded"), "wind"); map.put(new SubCharSequence("spoilt"), "spoil"); map.put(new SubCharSequence("ridden"), "ride"); map.put(new SubCharSequence("overdrove"), "overdrive"); map.put(new SubCharSequence("unrove"), "unreeve"); map.put(new SubCharSequence("done"), "do"); map.put(new SubCharSequence("drawn"), "draw"); map.put(new SubCharSequence("left"), "leave"); map.put(new SubCharSequence("undrawn"), "undraw"); map.put(new SubCharSequence("foretold"), "foretell"); map.put(new SubCharSequence("bound"), "bind"); map.put(new SubCharSequence("spilled"), "spill"); map.put(new SubCharSequence("strewed"), "strew"); map.put(new SubCharSequence("outeaten"), "outeat"); map.put(new SubCharSequence("speeded"), "speed"); map.put(new SubCharSequence("overtook"), "overtake"); map.put(new SubCharSequence("proven"), "prove"); map.put(new SubCharSequence("blown"), "blow"); map.put(new SubCharSequence("shined"), "shine"); map.put(new SubCharSequence("thrived"), "thrive"); map.put(new SubCharSequence("wound"), "wind"); map.put(new SubCharSequence("overrode"), "override"); map.put(new SubCharSequence("did"), "do"); map.put(new SubCharSequence("came"), "come"); map.put(new SubCharSequence("hove"), "heave"); map.put(new SubCharSequence("outfought"), "outfight"); map.put(new SubCharSequence("stridden"), "stride"); map.put(new SubCharSequence("slung"), "sling"); map.put(new SubCharSequence("underlaid"), "underlay"); map.put(new SubCharSequence("struck"), "strike"); map.put(new SubCharSequence("sat"), "sit"); map.put(new SubCharSequence("begot"), "beget"); map.put(new SubCharSequence("eaten"), "eat"); map.put(new SubCharSequence("overwound"), "overwind"); map.put(new SubCharSequence("chided"), "chide"); map.put(new SubCharSequence("forgave"), "forgive"); map.put(new SubCharSequence("overlay"), "overlie"); map.put(new SubCharSequence("bespoke"), "bespeak"); map.put(new SubCharSequence("outthrew"), "outthrow"); map.put(new SubCharSequence("could"), "can"); map.put(new SubCharSequence("frozen"), "freeze"); map.put(new SubCharSequence("besought"), "beseech"); map.put(new SubCharSequence("fought"), "fight"); map.put(new SubCharSequence("underlay"), "underlie"); map.put(new SubCharSequence("misled"), "mislead"); map.put(new SubCharSequence("wetted"), "wet"); map.put(new SubCharSequence("undertaken"), "undertake"); map.put(new SubCharSequence("bestrid"), "bestride"); map.put(new SubCharSequence("withheld"), "withhold"); map.put(new SubCharSequence("undersold"), "undersell"); map.put(new SubCharSequence("meant"), "mean"); map.put(new SubCharSequence("bestrewn"), "bestrew"); map.put(new SubCharSequence("lost"), "lose"); map.put(new SubCharSequence("forgot"), "forget"); map.put(new SubCharSequence("paid"), "pay"); map.put(new SubCharSequence("shriven"), "shrive"); map.put(new SubCharSequence("blent"), "blend"); map.put(new SubCharSequence("foreshowed"), "foreshow"); map.put(new SubCharSequence("spoke"), "speak"); map.put(new SubCharSequence("strove"), "strive"); map.put(new SubCharSequence("drank"), "drink"); map.put(new SubCharSequence("unspoke"), "unspeak"); map.put(new SubCharSequence("outran"), "outrun"); map.put(new SubCharSequence("sprang"), "spring"); map.put(new SubCharSequence("oversewed"), "oversew"); map.put(new SubCharSequence("overseen"), "oversee"); map.put(new SubCharSequence("foresaw"), "foresee"); map.put(new SubCharSequence("forbad"), "forbid"); map.put(new SubCharSequence("upswept"), "upsweep"); map.put(new SubCharSequence("broken"), "break"); map.put(new SubCharSequence("fordone"), "fordo"); map.put(new SubCharSequence("got"), "get"); map.put(new SubCharSequence("overslept"), "oversleep"); map.put(new SubCharSequence("won"), "win"); map.put(new SubCharSequence("betted"), "bet"); map.put(new SubCharSequence("sunken"), "sink"); map.put(new SubCharSequence("forborne"), "forbear"); map.put(new SubCharSequence("told"), "tell"); map.put(new SubCharSequence("heard"), "hear"); map.put(new SubCharSequence("overspent"), "overspend"); map.put(new SubCharSequence("bidden"), "bid"); map.put(new SubCharSequence("outthrown"), "outthrow"); map.put(new SubCharSequence("had"), "have"); map.put(new SubCharSequence("betaken"), "betake"); map.put(new SubCharSequence("drew"), "draw"); map.put(new SubCharSequence("beheld"), "behold"); map.put(new SubCharSequence("became"), "become"); map.put(new SubCharSequence("graven"), "grave"); map.put(new SubCharSequence("riven"), "rive"); map.put(new SubCharSequence("might"), "may"); map.put(new SubCharSequence("met"), "meet"); map.put(new SubCharSequence("showed"), "show"); map.put(new SubCharSequence("dwelled"), "dwell"); map.put(new SubCharSequence("shrank"), "shrink"); map.put(new SubCharSequence("untaught"), "unteach"); map.put(new SubCharSequence("upheld"), "uphold"); map.put(new SubCharSequence("flown"), "fly"); map.put(new SubCharSequence("bereft"), "bereave"); map.put(new SubCharSequence("clung"), "cling"); map.put(new SubCharSequence("leant"), "lean"); map.put(new SubCharSequence("outgrew"), "outgrow"); map.put(new SubCharSequence("underlain"), "underlie"); map.put(new SubCharSequence("wrung"), "wring"); map.put(new SubCharSequence("sank"), "sink"); map.put(new SubCharSequence("forbade"), "forbid"); map.put(new SubCharSequence("bespoken"), "bespeak"); map.put(new SubCharSequence("overridden"), "override"); map.put(new SubCharSequence("spent"), "spend"); map.put(new SubCharSequence("unladed"), "unlade"); map.put(new SubCharSequence("flung"), "fling"); map.put(new SubCharSequence("sworn"), "swear"); map.put(new SubCharSequence("sweated"), "sweat"); map.put(new SubCharSequence("befell"), "befall"); map.put(new SubCharSequence("thriven"), "thrive"); map.put(new SubCharSequence("fed"), "feed"); map.put(new SubCharSequence("broke"), "break"); map.put(new SubCharSequence("underfed"), "underfeed"); map.put(new SubCharSequence("unspoken"), "unspeak"); map.put(new SubCharSequence("unfroze"), "unfreeze"); map.put(new SubCharSequence("sewed"), "sew"); map.put(new SubCharSequence("overleaped"), "overleap"); map.put(new SubCharSequence("risen"), "rise"); map.put(new SubCharSequence("rebuilt"), "rebuild"); map.put(new SubCharSequence("bred"), "breed"); map.put(new SubCharSequence("slew"), "slay"); map.put(new SubCharSequence("laded"), "lade"); map.put(new SubCharSequence("overleapt"), "overleap"); map.put(new SubCharSequence("dreamt"), "dream"); map.put(new SubCharSequence("drove"), "drive"); map.put(new SubCharSequence("crept"), "creep"); map.put(new SubCharSequence("worn"), "wear"); map.put(new SubCharSequence("laden"), "lade"); map.put(new SubCharSequence("overdriven"), "overdrive"); map.put(new SubCharSequence("shorn"), "shear"); map.put(new SubCharSequence("outgrown"), "outgrow"); map.put(new SubCharSequence("underbought"), "underbuy"); map.put(new SubCharSequence("unbent"), "unbend"); map.put(new SubCharSequence("overhung"), "overhang"); map.put(new SubCharSequence("bought"), "buy"); map.put(new SubCharSequence("lied"), "lie"); map.put(new SubCharSequence("fled"), "fly"); map.put(new SubCharSequence("stung"), "sting"); map.put(new SubCharSequence("shod"), "shoe"); map.put(new SubCharSequence("bent"), "bend"); map.put(new SubCharSequence("outshone"), "outshine"); map.put(new SubCharSequence("hung"), "hang"); map.put(new SubCharSequence("underdone"), "underdo"); map.put(new SubCharSequence("outwent"), "outgo"); map.put(new SubCharSequence("swum"), "swim"); map.put(new SubCharSequence("spelt"), "spell"); map.put(new SubCharSequence("born"), "bear"); map.put(new SubCharSequence("swollen"), "swell"); map.put(new SubCharSequence("shrunk"), "shrink"); map.put(new SubCharSequence("been"), "be"); map.put(new SubCharSequence("crowed"), "crow"); map.put(new SubCharSequence("durst"), "dare"); map.put(new SubCharSequence("shrunken"), "shrink"); map.put(new SubCharSequence("stolen"), "steal"); map.put(new SubCharSequence("bestrode"), "bestride"); map.put(new SubCharSequence("undone"), "undo"); map.put(new SubCharSequence("understood"), "understand"); map.put(new SubCharSequence("reeved"), "reeve"); map.put(new SubCharSequence("proved"), "prove"); map.put(new SubCharSequence("were"), "be"); map.put(new SubCharSequence("spilt"), "spill"); map.put(new SubCharSequence("unreeved"), "unreeve"); map.put(new SubCharSequence("undrew"), "undraw"); map.put(new SubCharSequence("dreamed"), "dream"); map.put(new SubCharSequence("girt"), "gird"); map.put(new SubCharSequence("awoken"), "awake"); map.put(new SubCharSequence("flied"), "fly"); map.put(new SubCharSequence("said"), "say"); map.put(new SubCharSequence("knitted"), "knit"); map.put(new SubCharSequence("unwoven"), "unweave"); map.put(new SubCharSequence("led"), "lead"); map.put(new SubCharSequence("unfrozen"), "unfreeze"); map.put(new SubCharSequence("outshot"), "outshoot"); map.put(new SubCharSequence("gave"), "give"); map.put(new SubCharSequence("given"), "give"); map.put(new SubCharSequence("uprisen"), "uprise"); map.put(new SubCharSequence("arisen"), "arise"); map.put(new SubCharSequence("forsook"), "forsake"); map.put(new SubCharSequence("froze"), "freeze"); map.put(new SubCharSequence("caught"), "catch"); map.put(new SubCharSequence("foreseen"), "foresee"); map.put(new SubCharSequence("overran"), "overrun"); map.put(new SubCharSequence("abode"), "abide"); map.put(new SubCharSequence("unlaid"), "unlay"); map.put(new SubCharSequence("swept"), "sweep"); map.put(new SubCharSequence("ran"), "run"); map.put(new SubCharSequence("blew"), "blow"); map.put(new SubCharSequence("burnt"), "burn"); map.put(new SubCharSequence("ungirt"), "ungird"); map.put(new SubCharSequence("begun"), "begin"); map.put(new SubCharSequence("smelled"), "smell"); map.put(new SubCharSequence("shook"), "shake"); map.put(new SubCharSequence("upswung"), "upswing"); map.put(new SubCharSequence("blended"), "blend"); map.put(new SubCharSequence("unsworn"), "unswear"); map.put(new SubCharSequence("threw"), "throw"); map.put(new SubCharSequence("hamstringed"), "hamstring"); map.put(new SubCharSequence("overcame"), "overcome"); map.put(new SubCharSequence("died"), "die"); map.put(new SubCharSequence("rode"), "ride"); map.put(new SubCharSequence("sown"), "sow"); map.put(new SubCharSequence("outlaid"), "outlay"); map.put(new SubCharSequence("inlaid"), "inlay"); map.put(new SubCharSequence("torn"), "tear"); map.put(new SubCharSequence("overdone"), "overdo"); map.put(new SubCharSequence("strewn"), "strew"); map.put(new SubCharSequence("untrod"), "untread"); map.put(new SubCharSequence("outwore"), "outwear"); map.put(new SubCharSequence("beseeched"), "beseech"); map.put(new SubCharSequence("unwound"), "unwind"); map.put(new SubCharSequence("upbuilt"), "upbuild"); map.put(new SubCharSequence("bitten"), "bite"); map.put(new SubCharSequence("hanged"), "hang"); map.put(new SubCharSequence("lit"), "light"); map.put(new SubCharSequence("heaved"), "heave"); map.put(new SubCharSequence("woven"), "weave"); map.put(new SubCharSequence("overfed"), "overfeed"); map.put(new SubCharSequence("mown"), "mow"); map.put(new SubCharSequence("foreshown"), "foreshow"); map.put(new SubCharSequence("clothed"), "clothe"); map.put(new SubCharSequence("outdone"), "outdo"); map.put(new SubCharSequence("outbred"), "outbreed"); map.put(new SubCharSequence("outsold"), "outsell"); map.put(new SubCharSequence("overbore"), "overbear"); map.put(new SubCharSequence("outrode"), "outride"); map.put(new SubCharSequence("overwritten"), "overwrite"); map.put(new SubCharSequence("begotten"), "beget"); map.put(new SubCharSequence("borne"), "bear"); map.put(new SubCharSequence("driven"), "drive"); map.put(new SubCharSequence("shaven"), "shave"); map.put(new SubCharSequence("shrove"), "shrive"); map.put(new SubCharSequence("unhung"), "unhang"); map.put(new SubCharSequence("melten"), "melt"); map.put(new SubCharSequence("known"), "know"); map.put(new SubCharSequence("rove"), "reeve"); map.put(new SubCharSequence("overlain"), "overlie"); map.put(new SubCharSequence("overthrew"), "overthrow"); map.put(new SubCharSequence("outbidden"), "outbid"); map.put(new SubCharSequence("took"), "take"); map.put(new SubCharSequence("sought"), "seek"); map.put(new SubCharSequence("foreknown"), "foreknow"); map.put(new SubCharSequence("spoiled"), "spoil"); map.put(new SubCharSequence("bore"), "bear"); map.put(new SubCharSequence("crew"), "crow"); map.put(new SubCharSequence("made"), "make"); map.put(new SubCharSequence("brought"), "bring"); map.put(new SubCharSequence("oversewn"), "oversew"); map.put(new SubCharSequence("misunderstood"), "misunderstand"); map.put(new SubCharSequence("began"), "begin"); map.put(new SubCharSequence("strode"), "stride"); map.put(new SubCharSequence("misgave"), "misgive"); map.put(new SubCharSequence("shredded"), "shred"); map.put(new SubCharSequence("sowed"), "sow"); map.put(new SubCharSequence("befallen"), "befall"); map.put(new SubCharSequence("overlaid"), "overlay"); map.put(new SubCharSequence("uprose"), "uprise"); map.put(new SubCharSequence("reaved"), "reave"); map.put(new SubCharSequence("melted"), "melt"); map.put(new SubCharSequence("fallen"), "fall"); map.put(new SubCharSequence("overwrote"), "overwrite"); map.put(new SubCharSequence("chidden"), "chide"); map.put(new SubCharSequence("shaved"), "shave"); map.put(new SubCharSequence("sped"), "speed"); map.put(new SubCharSequence("overbidden"), "overbid"); map.put(new SubCharSequence("spoken"), "speak"); map.put(new SubCharSequence("ridded"), "rid"); map.put(new SubCharSequence("shown"), "show"); map.put(new SubCharSequence("wore"), "wear"); map.put(new SubCharSequence("sheared"), "shear"); map.put(new SubCharSequence("cleft"), "cleave"); map.put(new SubCharSequence("shone"), "shine"); map.put(new SubCharSequence("outridden"), "outride"); map.put(new SubCharSequence("kneeled"), "kneel"); map.put(new SubCharSequence("withdrew"), "withdraw"); map.put(new SubCharSequence("stank"), "stink"); map.put(new SubCharSequence("dealt"), "deal"); map.put(new SubCharSequence("chose"), "choose"); map.put(new SubCharSequence("reft"), "reave"); map.put(new SubCharSequence("sprung"), "spring"); mapping = map; } public static CharSequence get(CharSequence word) { if (mapping == null) { buildMap(); } return mapping.get(word); } }
/* * Copyright (c) 2003, the JUNG Project and the Regents of the University of * California All rights reserved. * * This software is open-source under the BSD license; see either "license.txt" * or http://jung.sourceforge.net/license.txt for a description. */ /* * Created on Jul 2, 2003 * */ package edu.uci.ics.jung.graph.util; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.SparseMultigraph; import edu.uci.ics.jung.graph.UndirectedGraph; import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; /** * Provides generators for several different test graphs. */ public class TestGraphs { /** * A series of pairs that may be useful for generating graphs. The * miniature graph consists of 8 edges, 10 nodes, and is formed of two * connected components, one of 8 nodes, the other of 2. * */ public static String[][] pairs = { { "a", "b", "3" }, { "a", "c", "4" }, { "a", "d", "5" }, { "d", "c", "6" }, { "d", "e", "7" }, { "e", "f", "8" }, { "f", "g", "9" }, { "h", "i", "1" } }; /** * Creates a small sample graph that can be used for testing purposes. The * graph is as described in the section on {@link #pairs pairs}. If <code>isDirected</code>, * the graph is a {@link DirectedSparseMultigraph DirectedSparseMultigraph}, * otherwise, it is an {@link UndirectedSparseMultigraph UndirectedSparseMultigraph}. * * @return a graph consisting of eight edges and ten nodes. */ public static Graph<String, Number> createTestGraph(boolean directed) { Graph<String, Number> graph = null; if(directed) { graph = new DirectedSparseMultigraph<String,Number>(); } else { graph = new UndirectedSparseMultigraph<String,Number>(); } for (int i = 0; i < pairs.length; i++) { String[] pair = pairs[i]; graph.addEdge(Integer.parseInt(pair[2]), pair[0], pair[1]); } return graph; } /** * Returns a graph consisting of a chain of <code>vertex_count - 1</code> vertices * plus one isolated vertex. */ public static Graph<String,Number> createChainPlusIsolates(int chain_length, int isolate_count) { Graph<String,Number> g = new UndirectedSparseMultigraph<String,Number>(); if (chain_length > 0) { String[] v = new String[chain_length]; v[0] = "v"+0; g.addVertex(v[0]); for (int i = 1; i < chain_length; i++) { v[i] = "v"+i; g.addVertex(v[i]); g.addEdge(new Double(Math.random()), v[i], v[i-1]); } } for (int i = 0; i < isolate_count; i++) { String v = "v"+(chain_length+i); g.addVertex(v); } return g; } /** * Creates a sample directed acyclic graph by generating several "layers", * and connecting nodes (randomly) to nodes in earlier (but never later) * layers. Each layer has some random number of nodes in it 1 less than n * less than maxNodesPerLayer. * * @return the created graph */ public static Graph<String,Number> createDirectedAcyclicGraph( int layers, int maxNodesPerLayer, double linkprob) { DirectedGraph<String,Number> dag = new DirectedSparseMultigraph<String,Number>(); Set<String> previousLayers = new HashSet<String>(); Set<String> inThisLayer = new HashSet<String>(); for (int i = 0; i < layers; i++) { int nodesThisLayer = (int) (Math.random() * maxNodesPerLayer) + 1; for (int j = 0; j < nodesThisLayer; j++) { String v = i+":"+j; dag.addVertex(v); inThisLayer.add(v); // for each previous node... for(String v2 : previousLayers) { if (Math.random() < linkprob) { Double de = new Double(Math.random()); dag.addEdge(de, v, v2); } } } previousLayers.addAll(inThisLayer); inThisLayer.clear(); } return dag; } private static void createEdge( Graph<String, Number> g, String v1Label, String v2Label, int weight) { g.addEdge(new Double(Math.random()), v1Label, v2Label); } /** * Returns a bigger, undirected test graph with a just one component. This * graph consists of a clique of ten edges, a partial clique (randomly * generated, with edges of 0.6 probability), and one series of edges * running from the first node to the last. * * @return the testgraph */ public static Graph<String,Number> getOneComponentGraph() { UndirectedGraph<String,Number> g = new UndirectedSparseMultigraph<String,Number>(); // let's throw in a clique, too for (int i = 1; i <= 10; i++) { for (int j = i + 1; j <= 10; j++) { String i1 = "" + i; String i2 = "" + j; g.addEdge(Math.pow(i+2,j), i1, i2); } } // and, last, a partial clique for (int i = 11; i <= 20; i++) { for (int j = i + 1; j <= 20; j++) { if (Math.random() > 0.6) continue; String i1 = "" + i; String i2 = "" + j; g.addEdge(Math.pow(i+2,j), i1, i2); } } List<String> index = new ArrayList<String>(); index.addAll(g.getVertices()); // and one edge to connect them all for (int i = 0; i < index.size() - 1; i++) g.addEdge(new Integer(i), index.get(i), index.get(i+1)); return g; } /** * Returns a bigger test graph with a clique, several components, and other * parts. * * @return a demonstration graph of type <tt>UndirectedSparseMultigraph</tt> * with 28 vertices. */ public static Graph<String, Number> getDemoGraph() { UndirectedGraph<String, Number> g = new UndirectedSparseMultigraph<String, Number>(); for (int i = 0; i < pairs.length; i++) { String[] pair = pairs[i]; createEdge(g, pair[0], pair[1], Integer.parseInt(pair[2])); } // let's throw in a clique, too for (int i = 1; i <= 10; i++) { for (int j = i + 1; j <= 10; j++) { String i1 = "c" + i; String i2 = "c" + j; g.addEdge(Math.pow(i+2,j), i1, i2); } } // and, last, a partial clique for (int i = 11; i <= 20; i++) { for (int j = i + 1; j <= 20; j++) { if (Math.random() > 0.6) continue; String i1 = "p" + i; String i2 = "p" + j; g.addEdge(Math.pow(i+2,j), i1, i2); } } return g; } /** * Returns a small graph with directed and undirected edges, and parallel edges. */ public static Graph<String, Number> getSmallGraph() { Graph<String, Number> graph = new SparseMultigraph<String, Number>(); String[] v = new String[3]; for (int i = 0; i < 3; i++) { v[i] = String.valueOf(i); graph.addVertex(v[i]); } graph.addEdge(new Double(0), v[0], v[1], EdgeType.DIRECTED); graph.addEdge(new Double(.1), v[0], v[1], EdgeType.DIRECTED); graph.addEdge(new Double(.2), v[0], v[1], EdgeType.DIRECTED); graph.addEdge(new Double(.3), v[1], v[0], EdgeType.DIRECTED); graph.addEdge(new Double(.4), v[1], v[0], EdgeType.DIRECTED); graph.addEdge(new Double(.5), v[1], v[2]); graph.addEdge(new Double(.6), v[1], v[2]); return graph; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.testing.mock.jcr; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import javax.jcr.Credentials; import javax.jcr.Item; import javax.jcr.ItemNotFoundException; import javax.jcr.Node; import javax.jcr.PathNotFoundException; import javax.jcr.Property; import javax.jcr.RangeIterator; import javax.jcr.Repository; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.ValueFactory; import javax.jcr.Workspace; import javax.jcr.retention.RetentionManager; import javax.jcr.security.AccessControlManager; import org.apache.commons.lang3.StringUtils; import org.apache.jackrabbit.commons.iterator.RangeIteratorAdapter; import org.apache.jackrabbit.value.ValueFactoryImpl; import org.xml.sax.ContentHandler; /** * Mock {@link Session} implementation. This instance holds the JCR data in a * simple ordered map. */ class MockSession implements Session { private final MockRepository repository; private final Workspace workspace; private final Map<String, ItemData> items; private final String userId; private boolean isLive; private boolean hasKnownChanges; public MockSession(MockRepository repository, Map<String, ItemData> items, String userId, String workspaceName) throws RepositoryException { this.repository = repository; this.workspace = new MockWorkspace(repository, this, workspaceName); this.items = items; this.userId = userId; isLive = true; hasKnownChanges = false; this.save(); } private void checkLive() throws RepositoryException { if (!isLive) { throw new RepositoryException("Session is logged out / not live."); } } @Override public ValueFactory getValueFactory() throws RepositoryException { checkLive(); return ValueFactoryImpl.getInstance(); } @Override public Item getItem(final String absPath) throws RepositoryException { checkLive(); final ItemData itemData = getItemData(absPath); if (itemData != null) { if (itemData.isNode()) { return new MockNode(itemData, this); } else { return new MockProperty(itemData, this); } } else { throw new PathNotFoundException(String.format("No item found at: %s.", absPath)); } } @Override public Node getNode(final String absPath) throws RepositoryException { checkLive(); Item item = getItem(absPath); if (item instanceof Node) { return (Node) item; } else { throw new PathNotFoundException(String.format("No node found at: %s.", absPath)); } } @Override public Node getNodeByIdentifier(final String id) throws RepositoryException { checkLive(); for (ItemData item : this.items.values()) { if (item.isNode() && StringUtils.equals(item.getUuid(), id)) { return new MockNode(item, this); } } throw new ItemNotFoundException(String.format("No node found with id: %s.", id)); } @Override public Property getProperty(final String absPath) throws RepositoryException { checkLive(); Item item = getItem(absPath); if (item instanceof Property) { return (Property) item; } else { throw new PathNotFoundException(String.format("No property found at: %s.", absPath)); } } @Override public boolean nodeExists(final String absPath) throws RepositoryException { checkLive(); return itemExists(absPath) && getItemData(absPath).isNode(); } @Override public boolean propertyExists(final String absPath) throws RepositoryException { checkLive(); return itemExists(absPath) && getItemData(absPath).isProperty(); } @Override public void removeItem(final String absPath) throws RepositoryException { checkLive(); removeItemWithChildren(absPath); } @Override public Node getRootNode() throws RepositoryException { checkLive(); return getNode("/"); } @Override public Node getNodeByUUID(final String uuid) throws RepositoryException { checkLive(); return getNodeByIdentifier(uuid); } /** * Add item * @param itemData item data */ void addItem(final ItemData itemData) { this.items.put(itemData.getPath(), itemData); } private ItemData getItemData(final String absPath) { final String normalizedPath = ResourceUtil.normalize(absPath); return this.items.get(normalizedPath); } /** * Remove item incl. children * @param absPath Item path */ private void removeItemWithChildren(final String absPath) throws RepositoryException { if (!itemExists(absPath)) { return; } final ItemData parent = getItemData(absPath); final String descendantPrefix = parent.getPath() + "/"; final List<String> pathsToRemove = new ArrayList<String>(); pathsToRemove.add(parent.getPath()); for (String itemPath : this.items.keySet()) { if (itemPath.startsWith(descendantPrefix)) { pathsToRemove.add(itemPath); } } for (String pathToRemove : pathsToRemove) { this.items.remove(pathToRemove); } hasKnownChanges = true; } RangeIterator listChildren(final String parentPath, final ItemFilter filter) throws RepositoryException { List<Item> children = new ArrayList<Item>(); //remove trailing slash or make root path / empty string final String path = parentPath.replaceFirst("/$", ""); // build regex pattern for all child paths of parent Pattern pattern = Pattern.compile("^" + Pattern.quote(path) + "/[^/]+$"); // collect child resources for (ItemData item : this.items.values()) { if (pattern.matcher(item.getPath()).matches() && (filter == null || filter.accept(item))) { children.add(item.getItem(this)); } } return new RangeIteratorAdapter(children.iterator(), children.size()); } @Override public boolean hasPendingChanges() throws RepositoryException { checkLive(); if (hasKnownChanges) { return true; } for (final ItemData item : this.items.values()) { if (item.isNew() || item.isChanged()) { return true; } } return false; } @Override public boolean itemExists(final String absPath) throws RepositoryException { checkLive(); return getItemData(absPath) != null; } @Override public Workspace getWorkspace() { return this.workspace; } @Override public String getUserID() { return this.userId; } @Override public String getNamespacePrefix(final String uri) throws RepositoryException { checkLive(); return getWorkspace().getNamespaceRegistry().getPrefix(uri); } @Override public String[] getNamespacePrefixes() throws RepositoryException { checkLive(); return getWorkspace().getNamespaceRegistry().getPrefixes(); } @Override public String getNamespaceURI(final String prefix) throws RepositoryException { checkLive(); return getWorkspace().getNamespaceRegistry().getURI(prefix); } @Override public void setNamespacePrefix(final String prefix, final String uri) throws RepositoryException { checkLive(); getWorkspace().getNamespaceRegistry().registerNamespace(prefix, uri); } @Override public Repository getRepository() { return this.repository; } @Override public void save() throws RepositoryException { checkLive(); // reset new flags for (ItemData itemData : this.items.values()) { itemData.setIsNew(false); itemData.setIsChanged(false); } hasKnownChanges = false; } @Override public void refresh(final boolean keepChanges) throws RepositoryException { // do nothing checkLive(); } @Override public void checkPermission(final String absPath, final String actions) throws RepositoryException { // always grant permission checkLive(); } @Override public boolean isLive() { return isLive; } @Override public void logout() { isLive = false; } // --- unsupported operations --- @Override public void addLockToken(final String lt) { throw new UnsupportedOperationException(); } @Override public void exportDocumentView(final String absPath, final ContentHandler contentHandler, final boolean skipBinary, final boolean noRecurse) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public void exportDocumentView(final String absPath, final OutputStream out, final boolean skipBinary, final boolean noRecurse) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public void exportSystemView(final String absPath, final ContentHandler contentHandler, final boolean skipBinary, final boolean noRecurse) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public void exportSystemView(final String absPath, final OutputStream out, final boolean skipBinary, final boolean noRecurse) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public Object getAttribute(final String name) { throw new UnsupportedOperationException(); } @Override public String[] getAttributeNames() { throw new UnsupportedOperationException(); } @Override public ContentHandler getImportContentHandler(final String parentAbsPath, final int uuidBehavior) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public String[] getLockTokens() { throw new UnsupportedOperationException(); } @Override public Session impersonate(final Credentials credentials) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public void importXML(final String parentAbsPath, final InputStream in, final int uuidBehavior) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public void move(final String srcAbsPath, final String destAbsPath) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public void removeLockToken(final String lt) { throw new UnsupportedOperationException(); } @Override public AccessControlManager getAccessControlManager() throws RepositoryException { throw new UnsupportedOperationException(); } @Override public RetentionManager getRetentionManager() throws RepositoryException { throw new UnsupportedOperationException(); } @Override public boolean hasCapability(final String methodName, final Object target, final Object[] arguments) throws RepositoryException { throw new UnsupportedOperationException(); } @Override public boolean hasPermission(final String absPath, final String actions) throws RepositoryException { throw new UnsupportedOperationException(); } }
package com.b2msolutions.reyna.messageProvider; import android.app.AlarmManager; import android.content.Context; import com.b2msolutions.reyna.*; import com.b2msolutions.reyna.system.Header; import com.b2msolutions.reyna.system.Message; import com.b2msolutions.reyna.system.PeriodicBackoutCheck; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertNull; import static junit.framework.Assert.assertTrue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.*; import com.google.gson.Gson; import org.robolectric.RobolectricTestRunner; import org.robolectric.RuntimeEnvironment; @RunWith(RobolectricTestRunner.class) public class BatchProviderTest { private BatchProvider messageProvider; @Mock Repository repository; @Mock BatchConfiguration batchConfiguration; @Mock PeriodicBackoutCheck periodicBackoutCheck; private Context context; @Before public void setup() { MockitoAnnotations.initMocks(this); this.context = RuntimeEnvironment.application.getApplicationContext(); this.messageProvider = new BatchProvider(this.context, this.repository); this.messageProvider.batchConfiguration = this.batchConfiguration; this.messageProvider.periodicBackoutCheck = this.periodicBackoutCheck; doReturn(3).when(this.batchConfiguration).getBatchMessageCount(); doReturn(1000L).when(this.batchConfiguration).getBatchMessagesSize(); doReturn(URI.create("www.post.com/api/batch")).when(this.batchConfiguration).getBatchUrl(); doReturn(AlarmManager.INTERVAL_DAY).when(this.batchConfiguration).getSubmitInterval(); } @Test public void testConstruction() { this.messageProvider = new BatchProvider(this.context, this.repository); assertNotNull(this.messageProvider); assertNotNull(this.messageProvider.repository); assertNotNull(this.messageProvider.batchConfiguration); assertNotNull(this.messageProvider.periodicBackoutCheck); } @Test public void whenCallingCloseShouldCloseRepository() throws URISyntaxException { this.messageProvider.close(); verify(this.repository).close(); } @Test public void whenCallingCloseAndNeverSendSuccessfulBatchShouldNotRecord() throws URISyntaxException { this.messageProvider.close(); verify(this.repository).close(); verify(this.periodicBackoutCheck,never()).record("BatchProvider"); } @Test public void whenCallingCloseAndSuccessfullySentBatchShouldRecord() throws URISyntaxException { Message message = mock(Message.class); when(message.getId()).thenReturn(42L); this.messageProvider.delete(message); this.messageProvider.close(); verify(this.repository).close(); verify(this.periodicBackoutCheck).record("BatchProvider"); } @Test public void whenCallingDeleteShouldDeleteFromRepository() throws URISyntaxException { Message message = mock(Message.class); when(message.getId()).thenReturn(42L); this.messageProvider.delete(message); verify(this.repository).deleteMessagesFrom(message.getId()); } @Test public void whenCallingGetNextAndNoMessageShouldReturnNull() throws URISyntaxException { Message actual = this.messageProvider.getNext(); assertNull(actual); } @Test public void whenCallingGetNextShouldReturnCorrectFormat() throws URISyntaxException { ArrayList<Message> messages = this.getTestMessages(); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}," + "{\"url\":\"http://google2.com\",\"reynaId\":2,\"payload\":{\"key11\":\"value11\",\"key12\":12}}," + "{\"url\":\"http://google3.com\",\"reynaId\":3,\"payload\":{\"key21\":\"value21\",\"key22\":22}}" + "]}", actual.getBody()); this.assertHeaders(actual); Batch batch = new Gson().fromJson(actual.getBody(), Batch.class); assertEquals(1L, batch.getEvents().get(0).getReynaId()); assertEquals("http://google.com", batch.getEvents().get(0).getUrl()); assertEquals("{\"key01\":\"value01\",\"key02\":11}", batch.getEvents().get(0).getPayload().toString()); assertEquals(2L, batch.getEvents().get(1).getReynaId()); assertEquals("http://google2.com", batch.getEvents().get(1).getUrl()); assertEquals("{\"key11\":\"value11\",\"key12\":12}", batch.getEvents().get(1).getPayload().toString()); assertEquals(3L, batch.getEvents().get(2).getReynaId()); assertEquals("http://google3.com", batch.getEvents().get(2).getUrl()); assertEquals("{\"key21\":\"value21\",\"key22\":22}", batch.getEvents().get(2).getPayload().toString()); } @Test public void whenCallingGetNextAndThereIsCorruptedMessageShouldPostIt() throws URISyntaxException { Message message = new Message(2L, URI.create("http://google2.com"), "{\"key11\":", getTestMessageHeaders()); ArrayList<Message> messages = this.getTestMessages(); messages.remove(1); messages.add(1,message); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}," + "{\"url\":\"http://google2.com\",\"reynaId\":2,\"payload\":{\"body\":\"{\\\"key11\\\":\"}}," + "{\"url\":\"http://google3.com\",\"reynaId\":3,\"payload\":{\"key21\":\"value21\",\"key22\":22}}" + "]}", actual.getBody()); assertEquals(3L, actual.getId().longValue()); this.assertHeaders(actual); Batch batch = new Gson().fromJson(actual.getBody(), Batch.class); assertEquals(1L, batch.getEvents().get(0).getReynaId()); assertEquals("http://google.com", batch.getEvents().get(0).getUrl()); assertEquals("{\"key01\":\"value01\",\"key02\":11}", batch.getEvents().get(0).getPayload().toString()); assertEquals(3L, batch.getEvents().get(2).getReynaId()); assertEquals("http://google3.com", batch.getEvents().get(2).getUrl()); assertEquals("{\"key21\":\"value21\",\"key22\":22}", batch.getEvents().get(2).getPayload().toString()); } @Test public void whenCallingGetNextShouldReturnMessagesRelatedToMaximumLimit() throws URISyntaxException { doReturn(2).when(this.batchConfiguration).getBatchMessageCount(); ArrayList<Message> messages = this.getTestMessages(); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}," + "{\"url\":\"http://google2.com\",\"reynaId\":2,\"payload\":{\"key11\":\"value11\",\"key12\":12}}" + "]}", actual.getBody()); this.assertHeaders(actual); Batch batch = new Gson().fromJson(actual.getBody(), Batch.class); assertEquals(1L, batch.getEvents().get(0).getReynaId()); assertEquals("http://google.com", batch.getEvents().get(0).getUrl()); assertEquals("{\"key01\":\"value01\",\"key02\":11}", batch.getEvents().get(0).getPayload().toString()); assertEquals(2L, batch.getEvents().get(1).getReynaId()); assertEquals("http://google2.com", batch.getEvents().get(1).getUrl()); assertEquals("{\"key11\":\"value11\",\"key12\":12}", batch.getEvents().get(1).getPayload().toString()); } @Test public void whenCallingGetNextAndThereIsStringMessageShouldPostIt() throws URISyntaxException { Message message = new Message(2L, URI.create("http://google2.com"), "Message body", getTestMessageHeaders()); ArrayList<Message> messages = this.getTestMessages(); messages.remove(1); messages.add(1,message); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}," + "{\"url\":\"http://google2.com\",\"reynaId\":2,\"payload\":{\"body\":\"Message body\"}}," + "{\"url\":\"http://google3.com\",\"reynaId\":3,\"payload\":{\"key21\":\"value21\",\"key22\":22}}" + "]}", actual.getBody()); this.assertHeaders(actual); Batch batch = new Gson().fromJson(actual.getBody(), Batch.class); assertEquals(1L, batch.getEvents().get(0).getReynaId()); assertEquals("http://google.com", batch.getEvents().get(0).getUrl()); assertEquals("{\"key01\":\"value01\",\"key02\":11}", batch.getEvents().get(0).getPayload().toString()); assertEquals(2L, batch.getEvents().get(1).getReynaId()); assertEquals("http://google2.com", batch.getEvents().get(1).getUrl()); assertEquals("{\"body\":\"Message body\"}", batch.getEvents().get(1).getPayload().toString()); assertEquals(3L, batch.getEvents().get(2).getReynaId()); assertEquals("http://google3.com", batch.getEvents().get(2).getUrl()); assertEquals("{\"key21\":\"value21\",\"key22\":22}", batch.getEvents().get(2).getPayload().toString()); } @Test public void whenCallingGetNextShouldReturnMessagesRelatedToMaximumSize() throws URISyntaxException { doReturn(95L).when(this.batchConfiguration).getBatchMessagesSize(); ArrayList<Message> messages = this.getTestMessages(); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}" + "]}", actual.getBody()); this.assertHeaders(actual); Batch batch = new Gson().fromJson(actual.getBody(), Batch.class); assertEquals(1L, batch.getEvents().get(0).getReynaId()); assertEquals("http://google.com", batch.getEvents().get(0).getUrl()); assertEquals("{\"key01\":\"value01\",\"key02\":11}", batch.getEvents().get(0).getPayload().toString()); } @Test public void whenCallingGetNextAndNoUrlConfiguredShouldReturnUrlWithBatchAppended() throws URISyntaxException { doReturn(95L).when(this.batchConfiguration).getBatchMessagesSize(); doReturn(null).when(this.batchConfiguration).getBatchUrl(); Message message1 = new Message(1L, URI.create("http://www.post.com"), "{\"key01\":\"value01\", \"key02\": 11}", getTestMessageHeaders()); Message message2 = new Message(2L, URI.create("http://www.post.com"), "{\"key11\":\"value11\", \"key12\": 12}", getTestMessageHeaders()); Message message3 = new Message(3L, URI.create("http://www.post.com"), "{\"key21\":\"value21\", \"key22\": 22}", getTestMessageHeaders()); when(this.repository.getNext()).thenReturn(message1); when(this.repository.getNextMessageAfter(1L)).thenReturn(message2); when(this.repository.getNextMessageAfter(2L)).thenReturn(message3); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("http://www.post.com/api/1/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://www.post.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}" + "]}", actual.getBody()); } @Test public void whenCallingGetNextAndNoUrlConfiguredAndHTTPSShouldReturnUrlWithBatchAppended() throws URISyntaxException { doReturn(95L).when(this.batchConfiguration).getBatchMessagesSize(); doReturn(null).when(this.batchConfiguration).getBatchUrl(); Message message1 = new Message(1L, URI.create("https://www.post.com/1/2/req"), "{\"key01\":\"value01\", \"key02\": 11}", getTestMessageHeaders()); Message message2 = new Message(2L, URI.create("https://www.post.com/1/2/req"), "{\"key11\":\"value11\", \"key12\": 12}", getTestMessageHeaders()); Message message3 = new Message(3L, URI.create("https://www.post.com/1/2/req"), "{\"key21\":\"value21\", \"key22\": 22}", getTestMessageHeaders()); when(this.repository.getNext()).thenReturn(message1).thenReturn(message2).thenReturn(message3).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("https://www.post.com/api/1/batch", actual.getUrl()); } @Test public void whenCallingCanSendAndTimeNotElapsedShouldReturnFalse() throws URISyntaxException { long interval = (long)(AlarmManager.INTERVAL_DAY * 0.9); doReturn(false).when(this.periodicBackoutCheck).timeElapsed("BatchProvider", interval); boolean actual = this.messageProvider.canSend(); assertFalse(actual); } @Test public void whenCallingCanSendAndTimeElapsedShouldReturnTrue() throws URISyntaxException { long interval = (long)(AlarmManager.INTERVAL_DAY * 0.9); doReturn(true).when(this.periodicBackoutCheck).timeElapsed("BatchProvider", interval); boolean actual = this.messageProvider.canSend(); assertTrue(actual); } @Test public void whenCallingCanSendThereAreMoreMessagesThanMaxMessagesCountShouldSend() throws URISyntaxException { doReturn(false).when(this.periodicBackoutCheck).timeElapsed("BatchProvider", AlarmManager.INTERVAL_DAY); doReturn(100).when(this.batchConfiguration).getBatchMessageCount(); doReturn(100L).when(this.repository).getAvailableMessagesCount(); boolean actual = this.messageProvider.canSend(); assertTrue(actual); } @Test public void whenCallingCanSendThereAreLessMessagesThanMaxMessagesCountShouldSend() throws URISyntaxException { doReturn(false).when(this.periodicBackoutCheck).timeElapsed("BatchProvider", AlarmManager.INTERVAL_DAY); doReturn(100).when(this.batchConfiguration).getBatchMessageCount(); doReturn(99L).when(this.repository).getAvailableMessagesCount(); boolean actual = this.messageProvider.canSend(); assertFalse(actual); } @Test public void whenCallingGetNextAndPreviousBatchWasLessThanMaximumessagesCountShouldReturnNull() throws URISyntaxException { doReturn(2).when(this.batchConfiguration).getBatchMessageCount(); ArrayList<Message> messages = this.getTestMessages(); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}," + "{\"url\":\"http://google2.com\",\"reynaId\":2,\"payload\":{\"key11\":\"value11\",\"key12\":12}}" + "]}", actual.getBody()); when(this.repository.getNext()).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google3.com\",\"reynaId\":3,\"payload\":{\"key21\":\"value21\",\"key22\":22}}" + "]}", actual.getBody()); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); actual = this.messageProvider.getNext(); assertNull(actual); } @Test public void whenCallingGetNextAndPreviousBatchWasLessThanMaximumMessagesButSizeIsReachedShouldReturnBatch() throws URISyntaxException { doReturn(95L).when(this.batchConfiguration).getBatchMessagesSize(); doReturn(3).when(this.batchConfiguration).getBatchMessageCount(); ArrayList<Message> messages = this.getTestMessages(); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); /*batch due to size limit*/ Message actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}" + "]}", actual.getBody()); /*batch due to number of max messages limit*/ doReturn(10000L).when(this.batchConfiguration).getBatchMessagesSize(); doReturn(2).when(this.batchConfiguration).getBatchMessageCount(); when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("{\"events\":[" + "{\"url\":\"http://google.com\",\"reynaId\":1,\"payload\":{\"key01\":\"value01\",\"key02\":11}}," + "{\"url\":\"http://google2.com\",\"reynaId\":2,\"payload\":{\"key11\":\"value11\",\"key12\":12}}" + "]}", actual.getBody()); /*batch include rest of the messages*/ when(this.repository.getNext()).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); actual = this.messageProvider.getNext(); assertNotNull(actual); assertEquals("www.post.com/api/batch", actual.getUrl()); assertEquals("{\"events\":[" + "{\"url\":\"http://google3.com\",\"reynaId\":3,\"payload\":{\"key21\":\"value21\",\"key22\":22}}" + "]}", actual.getBody()); /*should return null as last batch has only 1 message*/ when(this.repository.getNext()).thenReturn(messages.get(0)); when(this.repository.getNextMessageAfter(1L)).thenReturn(messages.get(1)); when(this.repository.getNextMessageAfter(2L)).thenReturn(messages.get(2)); when(this.repository.getNextMessageAfter(3L)).thenReturn(null); actual = this.messageProvider.getNext(); assertNull(actual); } private ArrayList<Message> getTestMessages() { Message message1 = new Message(1L, URI.create("http://google.com"), "{\"key01\":\"value01\", \"key02\": 11}", getTestMessageHeaders()); Message message2 = new Message(2L, URI.create("http://google2.com"), "{\"key11\":\"value11\", \"key12\": 12}", getTestMessageHeaders()); Message message3 = new Message(3L, URI.create("http://google3.com"), "{\"key21\":\"value21\", \"key22\": 22}", getTestMessageHeaders()); ArrayList<Message> messages = new ArrayList<Message>(3); messages.add(message1); messages.add(message2); messages.add(message3); return messages; } private void assertHeaders(Message actual) { assertEquals(3, actual.getHeaders().length); assertEquals("key1", actual.getHeaders()[0].getKey()); assertEquals("value1", actual.getHeaders()[0].getValue()); assertEquals("key2", actual.getHeaders()[1].getKey()); assertEquals("value2", actual.getHeaders()[1].getValue()); assertEquals("key4", actual.getHeaders()[2].getKey()); assertEquals("value4", actual.getHeaders()[2].getValue()); } private Header[] getTestMessageHeaders() { ArrayList<Header> headers = new ArrayList<Header>(); headers.add(new Header("key1", "value1")); headers.add(new Header("key2", "value2")); headers.add(new Header("key4", "value4")); Header[] headersForMessage = new Header[headers.size()]; return headers.toArray(headersForMessage); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state; import org.apache.flink.annotation.Internal; import org.apache.flink.configuration.CheckpointingOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.IllegalConfigurationException; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.state.delegate.DelegatingStateBackend; import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage; import org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage; import org.apache.flink.util.DynamicCodeLoadingException; import org.apache.flink.util.Preconditions; import org.slf4j.Logger; import javax.annotation.Nullable; import java.util.Optional; /** This class contains utility methods to load checkpoint storage from configurations. */ @Internal public class CheckpointStorageLoader { private static final String JOB_MANAGER_STORAGE_NAME = "jobmanager"; private static final String FILE_SYSTEM_STORAGE_NAME = "filesystem"; /** * Loads the checkpoint storage from the configuration, from the parameter * 'state.checkpoint-storage', as defined in {@link CheckpointingOptions#CHECKPOINT_STORAGE}. * * <p>The implementation can be specified either via their shortcut name, or via the class name * of a {@link CheckpointStorageFactory}. If a CheckpointStorageFactory class name is specified, * the factory is instantiated (via its zero-argument constructor) and its {@link * CheckpointStorageFactory#createFromConfig(ReadableConfig, ClassLoader)} method is called. * * <p>Recognized shortcut names are '{@value #JOB_MANAGER_STORAGE_NAME}', and '{@value * #FILE_SYSTEM_STORAGE_NAME}'. * * @param config The configuration to load the checkpoint storage from * @param classLoader The class loader that should be used to load the checkpoint storage * @param logger Optionally, a logger to log actions to (may be null) * @return The instantiated checkpoint storage. * @throws DynamicCodeLoadingException Thrown if a checkpoint storage factory is configured and * the factory class was not found or the factory could not be instantiated * @throws IllegalConfigurationException May be thrown by the CheckpointStorageFactory when * creating / configuring the checkpoint storage in the factory */ public static Optional<CheckpointStorage> fromConfig( ReadableConfig config, ClassLoader classLoader, @Nullable Logger logger) throws IllegalStateException, DynamicCodeLoadingException { Preconditions.checkNotNull(config, "config"); Preconditions.checkNotNull(classLoader, "classLoader"); final String storageName = config.get(CheckpointingOptions.CHECKPOINT_STORAGE); if (storageName == null) { if (logger != null) { logger.debug( "The configuration {} has not be set in the current" + " sessions flink-conf.yaml. Falling back to a default CheckpointStorage" + " type. Users are strongly encouraged explicitly set this configuration" + " so they understand how their applications are checkpointing" + " snapshots for fault-tolerance.", CheckpointingOptions.CHECKPOINT_STORAGE.key()); } return Optional.empty(); } switch (storageName.toLowerCase()) { case JOB_MANAGER_STORAGE_NAME: return Optional.of(createJobManagerCheckpointStorage(config, classLoader, logger)); case FILE_SYSTEM_STORAGE_NAME: return Optional.of(createFileSystemCheckpointStorage(config, classLoader, logger)); default: if (logger != null) { logger.info("Loading state backend via factory '{}'", storageName); } CheckpointStorageFactory<?> factory; try { @SuppressWarnings("rawtypes") Class<? extends CheckpointStorageFactory> clazz = Class.forName(storageName, false, classLoader) .asSubclass(CheckpointStorageFactory.class); factory = clazz.newInstance(); } catch (ClassNotFoundException e) { throw new DynamicCodeLoadingException( "Cannot find configured state backend factory class: " + storageName, e); } catch (ClassCastException | InstantiationException | IllegalAccessException e) { throw new DynamicCodeLoadingException( "The class configured under '" + CheckpointingOptions.CHECKPOINT_STORAGE.key() + "' is not a valid checkpoint storage factory (" + storageName + ')', e); } return Optional.of(factory.createFromConfig(config, classLoader)); } } /** * Loads the configured {@link CheckpointStorage} for the job based on the following precedent * rules: * * <p>1) If the jobs configured {@link StateBackend} implements {@code CheckpointStorage} it * will always be used. This is to maintain backwards compatibility with older versions of Flink * that intermixed these responsibilities. * * <p>2) Use the {@link CheckpointStorage} instance configured via the {@code * StreamExecutionEnvironment}. * * <p>3) Use the {@link CheckpointStorage} instance configured via the clusters * <b>flink-conf.yaml</b>. * * <p>4) Load a default {@link CheckpointStorage} instance. * * @param fromApplication The checkpoint storage instance passed to the jobs * StreamExecutionEnvironment. Or null if not was set. * @param configuredStateBackend The jobs configured state backend. * @param config The configuration to load the checkpoint storage from. * @param classLoader The class loader that should be used to load the checkpoint storage. * @param logger Optionally, a logger to log actions to (may be null). * @return The configured checkpoint storage instance. * @throws DynamicCodeLoadingException Thrown if a checkpoint storage factory is configured and * the factory class was not found or the factory could not be instantiated * @throws IllegalConfigurationException May be thrown by the CheckpointStorageFactory when * creating / configuring the checkpoint storage in the factory */ public static CheckpointStorage load( @Nullable CheckpointStorage fromApplication, @Nullable Path defaultSavepointDirectory, StateBackend configuredStateBackend, Configuration config, ClassLoader classLoader, @Nullable Logger logger) throws IllegalConfigurationException, DynamicCodeLoadingException { Preconditions.checkNotNull(config, "config"); Preconditions.checkNotNull(classLoader, "classLoader"); Preconditions.checkNotNull(configuredStateBackend, "statebackend"); if (defaultSavepointDirectory != null) { // If a savepoint directory was manually specified in code // we override any value set in the flink-conf. This allows // us to pass this value to the CheckpointStorage instance // where it is needed at runtime while keeping its API logically // separated for users. config.set( CheckpointingOptions.SAVEPOINT_DIRECTORY, defaultSavepointDirectory.toString()); } // Legacy state backends always take precedence for backwards compatibility. StateBackend rootStateBackend = (configuredStateBackend instanceof DelegatingStateBackend) ? ((DelegatingStateBackend) configuredStateBackend) .getDelegatedStateBackend() : configuredStateBackend; if (rootStateBackend instanceof CheckpointStorage) { if (logger != null) { logger.info( "Using legacy state backend {} as Job checkpoint storage", rootStateBackend); } return (CheckpointStorage) rootStateBackend; } else if (fromApplication instanceof ConfigurableCheckpointStorage) { if (logger != null) { logger.info( "Using job/cluster config to configure application-defined checkpoint storage: {}", fromApplication); } return ((ConfigurableCheckpointStorage) fromApplication).configure(config, classLoader); } else if (fromApplication != null) { if (logger != null) { logger.info("Using application defined checkpoint storage: {}", fromApplication); } return fromApplication; } else { return fromConfig(config, classLoader, logger) .orElseGet(() -> createDefaultCheckpointStorage(config, classLoader, logger)); } } /** * Creates a default checkpoint storage instance if none was explicitly configured. For * backwards compatibility, the default storage will be {@link FileSystemCheckpointStorage} if a * checkpoint directory was configured, {@link * org.apache.flink.runtime.state.storage.JobManagerCheckpointStorage} otherwise. * * @param config The configuration to load the checkpoint storage from * @param classLoader The class loader that should be used to load the checkpoint storage * @param logger Optionally, a logger to log actions to (may be null) * @return The instantiated checkpoint storage. * @throws IllegalConfigurationException May be thrown by the CheckpointStorageFactory when * creating / configuring the checkpoint storage in the factory. */ private static CheckpointStorage createDefaultCheckpointStorage( ReadableConfig config, ClassLoader classLoader, @Nullable Logger logger) { if (config.getOptional(CheckpointingOptions.CHECKPOINTS_DIRECTORY).isPresent()) { return createFileSystemCheckpointStorage(config, classLoader, logger); } return createJobManagerCheckpointStorage(config, classLoader, logger); } private static CheckpointStorage createFileSystemCheckpointStorage( ReadableConfig config, ClassLoader classLoader, @Nullable Logger logger) { FileSystemCheckpointStorage storage = FileSystemCheckpointStorage.createFromConfig(config, classLoader); if (logger != null) { logger.info( "Checkpoint storage is set to '{}': (checkpoints \"{}\")", FILE_SYSTEM_STORAGE_NAME, storage.getCheckpointPath()); } return storage; } private static CheckpointStorage createJobManagerCheckpointStorage( ReadableConfig config, ClassLoader classLoader, @Nullable Logger logger) { if (logger != null) { logger.info("Checkpoint storage is set to '{}'", JOB_MANAGER_STORAGE_NAME); } return JobManagerCheckpointStorage.createFromConfig(config, classLoader); } }
package ma.glasnost.orika.test.community; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.StringUtils; import org.junit.Test; import ma.glasnost.orika.CustomMapper; import ma.glasnost.orika.MapperFacade; import ma.glasnost.orika.MapperFactory; import ma.glasnost.orika.MappingContext; import ma.glasnost.orika.ObjectFactory; import ma.glasnost.orika.impl.DefaultMapperFactory; import ma.glasnost.orika.metadata.TypeFactory; /** * Support for mapping one type to many. * <p> * * @see <a href="https://github.com/orika-mapper/orika/issues/176">https://github.com/orika-mapper/orika/issues</a> */ public class Issue176WithSuperClassesTestCase { private static final ObjectFactory<B> OBJECT_FACTORY = new ObjectFactory<B>() { public B create(Object source, MappingContext mappingContext) { A a = (A) source; final B b; if ("1".equals(a.type)) { b = new B1(); } else if ("2".equals(a.type)) { b = new B2(); } else if (StringUtils.isEmpty(a.type)) { b = new B(); } else { throw new IllegalArgumentException("type not supported: " + a.type); } return b; } }; @Test public void testIssue176_withSuperClasses() throws Exception { final MapperFactory factory = new DefaultMapperFactory.Builder().build(); factory.registerObjectFactory(OBJECT_FACTORY, TypeFactory.valueOf(B.class), TypeFactory.valueOf(A.class)); factory.classMap(A.class, B1.class) .field("x", "x1") .customize(new CustomMapper<A, B1>() { // custom reverse mapping @Override public void mapBtoA(B1 b, A a, MappingContext context) { a.type = "1"; } }) .byDefault() .register(); factory.classMap(A.class, B2.class) .field("x", "x2") .customize(new CustomMapper<A, B2>() { // custom reverse mapping @Override public void mapBtoA(B2 b, A a, MappingContext context) { a.type = "2"; } }) .byDefault() .register(); // testing auto generation of Mapper (A.class, B.class) // run the Test MapperFacade mapper = factory.getMapperFacade(); A a0 = new A(); A a1 = new A(); a1.type = "1"; a1.x = 11; A a2 = new A(); a2.type = "2"; a2.x = 22; // run test B b0 = mapper.map(a0, B.class); B b1 = mapper.map(a1, B.class); B b2 = mapper.map(a2, B.class); // validate result assertThat(b0, is(instanceOf(B.class))); assertThat(b1, is(instanceOf(B1.class))); assertThat(((B1) b1).x1, is(11)); assertThat(b2, is(instanceOf(B2.class))); assertThat(((B2) b2).x2, is(22)); // run test reverse mapping A aMapped0 = mapper.map(b0, A.class); A aMapped1 = mapper.map(b1, A.class); A aMapped2 = mapper.map(b2, A.class); // validate result assertThat(aMapped0, is(instanceOf(A.class))); assertThat(aMapped1, is(instanceOf(A.class))); assertThat(aMapped1.x, is(11)); assertThat(aMapped1.type, is("1")); assertThat(aMapped2, is(instanceOf(A.class))); assertThat(aMapped2.x, is(22)); assertThat(aMapped2.type, is("2")); } @Test public void testIssue176_compinationOfSuperClassHirarchyAndObjectFactory() throws Exception { final MapperFactory factory = new DefaultMapperFactory.Builder().build(); factory.registerObjectFactory(OBJECT_FACTORY, TypeFactory.valueOf(B.class), TypeFactory.valueOf(A.class)); factory.classMap(A.class, B1.class) .field("x", "x1") .customize(new CustomMapper<A, B1>() { // custom reverse mapping @Override public void mapBtoA(B1 b, A a, MappingContext context) { a.type = "1"; } }) .byDefault() .register(); factory.classMap(A.class, B2.class) .field("x", "x2") .customize(new CustomMapper<A, B2>() { // custom reverse mapping @Override public void mapBtoA(B2 b, A a, MappingContext context) { a.type = "2"; } }) .byDefault() .register(); factory.classMap(AX.class, BX.class) .field("y", "x3") .byDefault() .register(); factory.classMap(A.class, B.class) .byDefault() .register(); factory.classMap(ASuper.class, BSuper.class) .byDefault() .register(); // run the Test A a0 = new A(); a0.type = StringUtils.EMPTY; A a1 = new A(); a1.type = "1"; a1.x = 11; A a2 = new A(); a2.type = "2"; a2.x = 22; AX aX = new AX(); aX.y = 33; ASuperContainer container = new ASuperContainer(); container.elelemts.add(a0); container.elelemts.add(a1); container.elelemts.add(a2); container.elelemts.add(aX); // run test MapperFacade mapper = factory.getMapperFacade(); BSuperContainer bContainer = mapper.map(container, BSuperContainer.class); BSuper b0 = bContainer.elelemts.get(0); BSuper b1 = bContainer.elelemts.get(1); BSuper b2 = bContainer.elelemts.get(2); BSuper b3 = bContainer.elelemts.get(3); // validate result assertThat(b0, is(instanceOf(B.class))); assertThat(b1, is(instanceOf(B1.class))); assertThat(((B1) b1).x1, is(11)); assertThat(b2, is(instanceOf(B2.class))); assertThat(((B2) b2).x2, is(22)); assertThat(b3, is(instanceOf(BX.class))); assertThat(((BX) b3).x3, is(33)); // run test reverse mapping ASuperContainer mappedAContainer = mapper.map(bContainer, ASuperContainer.class); ASuper aMapped0 = mappedAContainer.elelemts.get(0); ASuper aMapped1 = mappedAContainer.elelemts.get(1); ASuper aMapped2 = mappedAContainer.elelemts.get(2); ASuper aMapped3 = mappedAContainer.elelemts.get(3); // validate result assertThat(aMapped0, is(instanceOf(A.class))); assertThat(aMapped1, is(instanceOf(A.class))); assertThat(((A) aMapped1).x, is(11)); assertThat(((A) aMapped1).type, is("1")); assertThat(aMapped2, is(instanceOf(A.class))); assertThat(((A) aMapped2).x, is(22)); assertThat(((A) aMapped2).type, is("2")); assertThat(aMapped3, is(instanceOf(AX.class))); assertThat(((AX) aMapped3).y, is(33)); } @Test public void testIssue176_withSuperClassHirarchyOnly() throws Exception { final MapperFactory factory = new DefaultMapperFactory.Builder().build(); factory.classMap(A.class, B1.class) .field("x", "x1") .byDefault() .register(); factory.classMap(AX.class, BX.class) .field("y", "x3") .byDefault() .register(); factory.classMap(ASuper.class, BSuper.class) .byDefault() .register(); // run the Test A a1 = new A(); a1.x = 11; AX aX = new AX(); aX.y = 33; // run test MapperFacade mapper = factory.getMapperFacade(); BSuper b1 = mapper.map(a1, BSuper.class); BSuper b3 = mapper.map(aX, BSuper.class); // validate result assertThat(b1, is(instanceOf(B1.class))); assertThat(((B1) b1).x1, is(11)); assertThat(b3, is(instanceOf(BX.class))); assertThat(((BX) b3).x3, is(33)); } // A-Hierarchy: public static class ASuperContainer { public List<ASuper> elelemts = new ArrayList<ASuper>(); } public static class ASuper { // marker interface } public static class A extends ASuper { public String type; public int x; } public static class AX extends ASuper { public int y; } // B-Hierarchy: public static class BSuperContainer { public List<BSuper> elelemts = new ArrayList<BSuper>(); } public static class BSuper { // marker interface } public static class B extends BSuper { // marker interface } public static class B1 extends B { public int x1; } public static class B2 extends B { public int x2; } public static class BX extends BSuper { public int x3; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.jpa.processor.core.access.data; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import javax.persistence.FlushModeType; import javax.persistence.LockModeType; import javax.persistence.Parameter; import javax.persistence.Query; import javax.persistence.TemporalType; import org.apache.olingo.odata2.jpa.processor.core.access.data.JPAPage.JPAPageBuilder; import org.junit.Test; public class JPAPageBuilderTest { private static final int PAGE_SIZE = 10; @Test public void testBuildDefault() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(1, page.getPagedEntities().size()); assertEquals(10, query.getFirstResult()); assertEquals(10, query.getMaxResults()); } @Test public void testBuildDefaultZeroPage() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(0) .skipToken("10") .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(0, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(0, page.getPagedEntities().size()); assertEquals(0, query.getFirstResult()); assertEquals(0, query.getMaxResults()); } @Test public void testBuildWithNoSkipToken() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("0") .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(1, page.getPagedEntities().size()); assertEquals(0, query.getFirstResult()); assertEquals(10, query.getMaxResults()); } @Test public void testBuildWithNullSkipToken() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken(null) .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(0, query.getFirstResult()); assertEquals(10, query.getMaxResults()); } @Test public void testBuildWithInvalidSkipToken() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); try { pageBuilder.query(query) .skipToken("AB"); } catch (NumberFormatException e) { return; } fail("Exception Expected"); } @Test public void testBuildWithTop() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(5) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(10, query.getFirstResult()); assertEquals(5, query.getMaxResults()); } @Test public void testBuildWithTopZeroPage() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(0) .skipToken("10") .top(5) .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(0, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(0, query.getFirstResult()); assertEquals(5, query.getMaxResults()); } @Test public void testBuildWithSkipZeroPage() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(0) .skipToken("10") .skip(5) .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(0, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(5, query.getFirstResult()); assertEquals(0, query.getMaxResults()); } @Test public void testBuildWithTopSkipZeroPage() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(0) .skipToken("10") .skip(5) .top(5) .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(0, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(5, query.getFirstResult()); assertEquals(5, query.getMaxResults()); } @Test public void testBuildWithTopExceeds() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(15) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(10, query.getFirstResult()); assertEquals(10, query.getMaxResults()); } @Test public void testBuildWithTopSkipExceeds() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(5) .skip(10) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertEquals(0, page.getPagedEntities().size()); assertEquals(0, query.getFirstResult()); assertEquals(0, query.getMaxResults()); } @Test public void testBuildWithTopSkipMore() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(5) .skip(9) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(19, query.getFirstResult()); assertEquals(1, query.getMaxResults()); } @Test public void testBuildWithTopMoreSkip() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(15) .skip(9) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(19, query.getFirstResult()); assertEquals(1, query.getMaxResults()); } @Test public void testBuildWithTopXSkipX() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(15) .skip(15) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(0, query.getFirstResult()); assertEquals(0, query.getMaxResults()); } @Test public void testBuildWithNegativeTop() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(-5) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(10, query.getFirstResult()); assertEquals(10, query.getMaxResults()); } @Test public void testBuildWithNegativeTopSkipToken() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(false); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("-10") .top(-5) .skip(-1) .build(); assertEquals(0, page.getNextPage()); assertEquals(0, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(0, query.getFirstResult()); assertEquals(10, query.getMaxResults()); } @Test public void testBuildWithNoRecords() { JPAPageBuilder pageBuilder = new JPAPageBuilder(); Query query = mockQuery(true); JPAPage page = pageBuilder.query(query) .pageSize(PAGE_SIZE) .skipToken("10") .top(1) .skip(1) .build(); assertEquals(0, page.getNextPage()); assertEquals(10, page.getStartPage()); assertEquals(PAGE_SIZE, page.getPageSize()); assertNotNull(page.getPagedEntities()); assertEquals(11, query.getFirstResult()); assertEquals(1, query.getMaxResults()); } private Query mockQuery(final boolean setNoRecords) { return new Query() { private int maxResults; private int firstResult; @Override public Query setFirstResult(final int arg0) { firstResult = arg0; return this; } @Override public Query setMaxResults(final int arg0) { maxResults = arg0; return this; } @Override public int getMaxResults() { return maxResults; } @Override public int getFirstResult() { return firstResult; } @Override public List<Object> getResultList() { List<Object> list = new ArrayList<Object>(); if (maxResults > 0 && setNoRecords == false) { list.add(new Integer(1)); } return list; } @Override public <T> T unwrap(final Class<T> arg0) { return null; } @Override public Query setParameter(final int arg0, final Date arg1, final TemporalType arg2) { return null; } @Override public Query setParameter(final int arg0, final Calendar arg1, final TemporalType arg2) { return null; } @Override public Query setParameter(final String arg0, final Date arg1, final TemporalType arg2) { return null; } @Override public Query setParameter(final String arg0, final Calendar arg1, final TemporalType arg2) { return null; } @Override public Query setParameter(final Parameter<Date> arg0, final Date arg1, final TemporalType arg2) { return null; } @Override public Query setParameter(final Parameter<Calendar> arg0, final Calendar arg1, final TemporalType arg2) { return null; } @Override public Query setParameter(final int arg0, final Object arg1) { return null; } @Override public Query setParameter(final String arg0, final Object arg1) { return null; } @Override public <T> Query setParameter(final Parameter<T> arg0, final T arg1) { return null; } @Override public Query setLockMode(final LockModeType arg0) { return null; } @Override public Query setHint(final String arg0, final Object arg1) { return null; } @Override public Query setFlushMode(final FlushModeType arg0) { return null; } @Override public boolean isBound(final Parameter<?> arg0) { return false; } @Override public Object getSingleResult() { return null; } @Override public Set<Parameter<?>> getParameters() { return null; } @Override public Object getParameterValue(final int arg0) { return null; } @Override public Object getParameterValue(final String arg0) { return null; } @Override public <T> T getParameterValue(final Parameter<T> arg0) { return null; } @Override public <T> Parameter<T> getParameter(final int arg0, final Class<T> arg1) { return null; } @Override public <T> Parameter<T> getParameter(final String arg0, final Class<T> arg1) { return null; } @Override public Parameter<?> getParameter(final int arg0) { return null; } @Override public Parameter<?> getParameter(final String arg0) { return null; } @Override public LockModeType getLockMode() { return null; } @Override public Map<String, Object> getHints() { return null; } @Override public FlushModeType getFlushMode() { return null; } @Override public int executeUpdate() { return 0; } }; } }
/* * Copyright (c) 2003, PostgreSQL Global Development Group * See the LICENSE file in the project root for more information. */ package org.postgresql; import static org.postgresql.util.internal.Nullness.castNonNull; import org.postgresql.jdbc.PgConnection; import org.postgresql.util.DriverInfo; import org.postgresql.util.GT; import org.postgresql.util.HostSpec; import org.postgresql.util.PGPropertyPasswordParser; import org.postgresql.util.PGPropertyServiceParser; import org.postgresql.util.PGPropertyUtil; import org.postgresql.util.PSQLException; import org.postgresql.util.PSQLState; import org.postgresql.util.SharedTimer; import org.postgresql.util.URLCoder; import org.checkerframework.checker.nullness.qual.Nullable; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.sql.Connection; import java.sql.DriverManager; import java.sql.DriverPropertyInfo; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.ArrayList; import java.util.Enumeration; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; /** * <p>The Java SQL framework allows for multiple database drivers. Each driver should supply a class * that implements the Driver interface</p> * * <p>The DriverManager will try to load as many drivers as it can find and then for any given * connection request, it will ask each driver in turn to try to connect to the target URL.</p> * * <p>It is strongly recommended that each Driver class should be small and standalone so that the * Driver class can be loaded and queried without bringing in vast quantities of supporting code.</p> * * <p>When a Driver class is loaded, it should create an instance of itself and register it with the * DriverManager. This means that a user can load and register a driver by doing * Class.forName("foo.bah.Driver")</p> * * @see org.postgresql.PGConnection * @see java.sql.Driver */ public class Driver implements java.sql.Driver { private static @Nullable Driver registeredDriver; private static final Logger PARENT_LOGGER = Logger.getLogger("org.postgresql"); private static final Logger LOGGER = Logger.getLogger("org.postgresql.Driver"); private static final SharedTimer SHARED_TIMER = new SharedTimer(); static { try { // moved the registerDriver from the constructor to here // because some clients call the driver themselves (I know, as // my early jdbc work did - and that was based on other examples). // Placing it here, means that the driver is registered once only. register(); } catch (SQLException e) { throw new ExceptionInInitializerError(e); } } // Helper to retrieve default properties from classloader resource // properties files. private @Nullable Properties defaultProperties; private synchronized Properties getDefaultProperties() throws IOException { if (defaultProperties != null) { return defaultProperties; } // Make sure we load properties with the maximum possible privileges. try { defaultProperties = AccessController.doPrivileged(new PrivilegedExceptionAction<Properties>() { public Properties run() throws IOException { return loadDefaultProperties(); } }); } catch (PrivilegedActionException e) { throw (IOException) e.getException(); } return defaultProperties; } private Properties loadDefaultProperties() throws IOException { Properties merged = new Properties(); try { PGProperty.USER.set(merged, System.getProperty("user.name")); } catch (SecurityException se) { // We're just trying to set a default, so if we can't // it's not a big deal. } // If we are loaded by the bootstrap classloader, getClassLoader() // may return null. In that case, try to fall back to the system // classloader. // // We should not need to catch SecurityException here as we are // accessing either our own classloader, or the system classloader // when our classloader is null. The ClassLoader javadoc claims // neither case can throw SecurityException. ClassLoader cl = getClass().getClassLoader(); if (cl == null) { LOGGER.log(Level.FINE, "Can't find our classloader for the Driver; " + "attempt to use the system class loader"); cl = ClassLoader.getSystemClassLoader(); } if (cl == null) { LOGGER.log(Level.WARNING, "Can't find a classloader for the Driver; not loading driver " + "configuration from org/postgresql/driverconfig.properties"); return merged; // Give up on finding defaults. } LOGGER.log(Level.FINE, "Loading driver configuration via classloader {0}", cl); // When loading the driver config files we don't want settings found // in later files in the classpath to override settings specified in // earlier files. To do this we've got to read the returned // Enumeration into temporary storage. ArrayList<URL> urls = new ArrayList<URL>(); Enumeration<URL> urlEnum = cl.getResources("org/postgresql/driverconfig.properties"); while (urlEnum.hasMoreElements()) { urls.add(urlEnum.nextElement()); } for (int i = urls.size() - 1; i >= 0; i--) { URL url = urls.get(i); LOGGER.log(Level.FINE, "Loading driver configuration from: {0}", url); InputStream is = url.openStream(); merged.load(is); is.close(); } return merged; } /** * <p>Try to make a database connection to the given URL. The driver should return "null" if it * realizes it is the wrong kind of driver to connect to the given URL. This will be common, as * when the JDBC driverManager is asked to connect to a given URL, it passes the URL to each * loaded driver in turn.</p> * * <p>The driver should raise an SQLException if it is the right driver to connect to the given URL, * but has trouble connecting to the database.</p> * * <p>The java.util.Properties argument can be used to pass arbitrary string tag/value pairs as * connection arguments.</p> * * <ul> * <li>user - (required) The user to connect as</li> * <li>password - (optional) The password for the user</li> * <li>ssl -(optional) Use SSL when connecting to the server</li> * <li>readOnly - (optional) Set connection to read-only by default</li> * <li>charSet - (optional) The character set to be used for converting to/from * the database to unicode. If multibyte is enabled on the server then the character set of the * database is used as the default, otherwise the jvm character encoding is used as the default. * This value is only used when connecting to a 7.2 or older server.</li> * <li>loglevel - (optional) Enable logging of messages from the driver. The value is an integer * from 0 to 2 where: OFF = 0, INFO =1, DEBUG = 2 The output is sent to * DriverManager.getPrintWriter() if set, otherwise it is sent to System.out.</li> * <li>compatible - (optional) This is used to toggle between different functionality * as it changes across different releases of the jdbc driver code. The values here are versions * of the jdbc client and not server versions. For example in 7.1 get/setBytes worked on * LargeObject values, in 7.2 these methods were changed to work on bytea values. This change in * functionality could be disabled by setting the compatible level to be "7.1", in which case the * driver will revert to the 7.1 functionality.</li> * </ul> * * <p>Normally, at least "user" and "password" properties should be included in the properties. For a * list of supported character encoding , see * http://java.sun.com/products/jdk/1.2/docs/guide/internat/encoding.doc.html Note that you will * probably want to have set up the Postgres database itself to use the same encoding, with the * {@code -E <encoding>} argument to createdb.</p> * * <p>Our protocol takes the forms:</p> * * <pre> * jdbc:postgresql://host:port/database?param1=val1&amp;... * </pre> * * @param url the URL of the database to connect to * @param info a list of arbitrary tag/value pairs as connection arguments * @return a connection to the URL or null if it isnt us * @exception SQLException if a database access error occurs or the url is * {@code null} * @see java.sql.Driver#connect */ @Override public @Nullable Connection connect(String url, @Nullable Properties info) throws SQLException { if (url == null) { throw new SQLException("url is null"); } // get defaults Properties defaults; if (!url.startsWith("jdbc:postgresql:")) { return null; } try { defaults = getDefaultProperties(); } catch (IOException ioe) { throw new PSQLException(GT.tr("Error loading default settings from driverconfig.properties"), PSQLState.UNEXPECTED_ERROR, ioe); } // override defaults with provided properties Properties props = new Properties(defaults); if (info != null) { Set<String> e = info.stringPropertyNames(); for (String propName : e) { String propValue = info.getProperty(propName); if (propValue == null) { throw new PSQLException( GT.tr("Properties for the driver contains a non-string value for the key ") + propName, PSQLState.UNEXPECTED_ERROR); } props.setProperty(propName, propValue); } } // parse URL and add more properties if ((props = parseURL(url, props)) == null) { throw new PSQLException( GT.tr("Unable to parse URL "), PSQLState.UNEXPECTED_ERROR); } try { LOGGER.log(Level.FINE, "Connecting with URL: {0}", url); // Enforce login timeout, if specified, by running the connection // attempt in a separate thread. If we hit the timeout without the // connection completing, we abandon the connection attempt in // the calling thread, but the separate thread will keep trying. // Eventually, the separate thread will either fail or complete // the connection; at that point we clean up the connection if // we managed to establish one after all. See ConnectThread for // more details. long timeout = timeout(props); if (timeout <= 0) { return makeConnection(url, props); } ConnectThread ct = new ConnectThread(url, props); Thread thread = new Thread(ct, "PostgreSQL JDBC driver connection thread"); thread.setDaemon(true); // Don't prevent the VM from shutting down thread.start(); return ct.getResult(timeout); } catch (PSQLException ex1) { LOGGER.log(Level.FINE, "Connection error: ", ex1); // re-throw the exception, otherwise it will be caught next, and a // org.postgresql.unusual error will be returned instead. throw ex1; } catch (java.security.AccessControlException ace) { throw new PSQLException( GT.tr( "Your security policy has prevented the connection from being attempted. You probably need to grant the connect java.net.SocketPermission to the database server host and port that you wish to connect to."), PSQLState.UNEXPECTED_ERROR, ace); } catch (Exception ex2) { LOGGER.log(Level.FINE, "Unexpected connection error: ", ex2); throw new PSQLException( GT.tr( "Something unusual has occurred to cause the driver to fail. Please report this exception."), PSQLState.UNEXPECTED_ERROR, ex2); } } /** * this is an empty method left here for graalvm * we removed the ability to setup the logger from properties * due to a security issue * @param props Connection Properties */ private void setupLoggerFromProperties(final Properties props) { } /** * Perform a connect in a separate thread; supports getting the results from the original thread * while enforcing a login timeout. */ private static class ConnectThread implements Runnable { ConnectThread(String url, Properties props) { this.url = url; this.props = props; } public void run() { Connection conn; Throwable error; try { conn = makeConnection(url, props); error = null; } catch (Throwable t) { conn = null; error = t; } synchronized (this) { if (abandoned) { if (conn != null) { try { conn.close(); } catch (SQLException e) { } } } else { result = conn; resultException = error; notify(); } } } /** * Get the connection result from this (assumed running) thread. If the timeout is reached * without a result being available, a SQLException is thrown. * * @param timeout timeout in milliseconds * @return the new connection, if successful * @throws SQLException if a connection error occurs or the timeout is reached */ public Connection getResult(long timeout) throws SQLException { long expiry = TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) + timeout; synchronized (this) { while (true) { if (result != null) { return result; } if (resultException != null) { if (resultException instanceof SQLException) { resultException.fillInStackTrace(); throw (SQLException) resultException; } else { throw new PSQLException( GT.tr( "Something unusual has occurred to cause the driver to fail. Please report this exception."), PSQLState.UNEXPECTED_ERROR, resultException); } } long delay = expiry - TimeUnit.NANOSECONDS.toMillis(System.nanoTime()); if (delay <= 0) { abandoned = true; throw new PSQLException(GT.tr("Connection attempt timed out."), PSQLState.CONNECTION_UNABLE_TO_CONNECT); } try { wait(delay); } catch (InterruptedException ie) { // reset the interrupt flag Thread.currentThread().interrupt(); abandoned = true; // throw an unchecked exception which will hopefully not be ignored by the calling code throw new RuntimeException(GT.tr("Interrupted while attempting to connect.")); } } } } private final String url; private final Properties props; private @Nullable Connection result; private @Nullable Throwable resultException; private boolean abandoned; } /** * Create a connection from URL and properties. Always does the connection work in the current * thread without enforcing a timeout, regardless of any timeout specified in the properties. * * @param url the original URL * @param props the parsed/defaulted connection properties * @return a new connection * @throws SQLException if the connection could not be made */ private static Connection makeConnection(String url, Properties props) throws SQLException { return new PgConnection(hostSpecs(props), props, url); } /** * Returns true if the driver thinks it can open a connection to the given URL. Typically, drivers * will return true if they understand the subprotocol specified in the URL and false if they * don't. Our protocols start with jdbc:postgresql: * * @param url the URL of the driver * @return true if this driver accepts the given URL * @see java.sql.Driver#acceptsURL */ @Override public boolean acceptsURL(String url) { return parseURL(url, null) != null; } /** * <p>The getPropertyInfo method is intended to allow a generic GUI tool to discover what properties * it should prompt a human for in order to get enough information to connect to a database.</p> * * <p>Note that depending on the values the human has supplied so far, additional values may become * necessary, so it may be necessary to iterate through several calls to getPropertyInfo</p> * * @param url the Url of the database to connect to * @param info a proposed list of tag/value pairs that will be sent on connect open. * @return An array of DriverPropertyInfo objects describing possible properties. This array may * be an empty array if no properties are required * @see java.sql.Driver#getPropertyInfo */ @Override public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) { Properties copy = new Properties(info); Properties parse = parseURL(url, copy); if (parse != null) { copy = parse; } PGProperty[] knownProperties = PGProperty.values(); DriverPropertyInfo[] props = new DriverPropertyInfo[knownProperties.length]; for (int i = 0; i < props.length; ++i) { props[i] = knownProperties[i].toDriverPropertyInfo(copy); } return props; } @Override public int getMajorVersion() { return org.postgresql.util.DriverInfo.MAJOR_VERSION; } @Override public int getMinorVersion() { return org.postgresql.util.DriverInfo.MINOR_VERSION; } /** * Returns the server version series of this driver and the specific build number. * * @return JDBC driver version * @deprecated use {@link #getMajorVersion()} and {@link #getMinorVersion()} instead */ @Deprecated public static String getVersion() { return DriverInfo.DRIVER_FULL_NAME; } /** * <p>Report whether the driver is a genuine JDBC compliant driver. A driver may only report "true" * here if it passes the JDBC compliance tests, otherwise it is required to return false. JDBC * compliance requires full support for the JDBC API and full support for SQL 92 Entry Level.</p> * * <p>For PostgreSQL, this is not yet possible, as we are not SQL92 compliant (yet).</p> */ @Override public boolean jdbcCompliant() { return false; } /** * Constructs a new DriverURL, splitting the specified URL into its component parts. * * @param url JDBC URL to parse * @param defaults Default properties * @return Properties with elements added from the url */ public static @Nullable Properties parseURL(String url, @Nullable Properties defaults) { // priority 1 - URL values Properties priority1Url = new Properties(); // priority 2 - Properties given as argument to DriverManager.getConnection() // argument "defaults" EXCLUDING defaults // priority 3 - Values retrieved by "service" Properties priority3Service = new Properties(); // priority 4 - Properties loaded by Driver.loadDefaultProperties() (user, org/postgresql/driverconfig.properties) // argument "defaults" INCLUDING defaults // priority 5 - PGProperty defaults for PGHOST, PGPORT, PGDBNAME String urlServer = url; String urlArgs = ""; int qPos = url.indexOf('?'); if (qPos != -1) { urlServer = url.substring(0, qPos); urlArgs = url.substring(qPos + 1); } if (!urlServer.startsWith("jdbc:postgresql:")) { LOGGER.log(Level.FINE, "JDBC URL must start with \"jdbc:postgresql:\" but was: {0}", url); return null; } urlServer = urlServer.substring("jdbc:postgresql:".length()); if (urlServer.equals("//") || urlServer.equals("///")) { urlServer = ""; } else if (urlServer.startsWith("//")) { urlServer = urlServer.substring(2); long slashCount = urlServer.chars().filter(ch -> ch == '/').count(); if (slashCount > 1) { LOGGER.log(Level.WARNING, "JDBC URL contains too many / characters: {0}", url); return null; } int slash = urlServer.indexOf('/'); if (slash == -1) { LOGGER.log(Level.WARNING, "JDBC URL must contain a / at the end of the host or port: {0}", url); return null; } if (!urlServer.endsWith("/")) { String value = urlDecode(urlServer.substring(slash + 1)); if (value == null) { return null; } PGProperty.PG_DBNAME.set(priority1Url, value); } urlServer = urlServer.substring(0, slash); String[] addresses = urlServer.split(","); StringBuilder hosts = new StringBuilder(); StringBuilder ports = new StringBuilder(); for (String address : addresses) { int portIdx = address.lastIndexOf(':'); if (portIdx != -1 && address.lastIndexOf(']') < portIdx) { String portStr = address.substring(portIdx + 1); ports.append(portStr); CharSequence hostStr = address.subSequence(0, portIdx); if (hostStr.length() == 0) { hosts.append(PGProperty.PG_HOST.getDefaultValue()); } else { hosts.append(hostStr); } } else { ports.append(PGProperty.PG_PORT.getDefaultValue()); hosts.append(address); } ports.append(','); hosts.append(','); } ports.setLength(ports.length() - 1); hosts.setLength(hosts.length() - 1); PGProperty.PG_HOST.set(priority1Url, hosts.toString()); PGProperty.PG_PORT.set(priority1Url, ports.toString()); } else if (urlServer.startsWith("/")) { return null; } else { String value = urlDecode(urlServer); if (value == null) { return null; } priority1Url.setProperty(PGProperty.PG_DBNAME.getName(), value); } // parse the args part of the url String[] args = urlArgs.split("&"); String serviceName = null; for (String token : args) { if (token.isEmpty()) { continue; } int pos = token.indexOf('='); if (pos == -1) { priority1Url.setProperty(token, ""); } else { String pName = PGPropertyUtil.translatePGServiceToPGProperty(token.substring(0, pos)); String pValue = urlDecode(token.substring(pos + 1)); if (pValue == null) { return null; } if (PGProperty.SERVICE.getName().equals(pName)) { serviceName = pValue; } else { priority1Url.setProperty(pName, pValue); } } } // load pg_service.conf if (serviceName != null) { LOGGER.log(Level.FINE, "Processing option [?service={0}]", serviceName); Properties result = PGPropertyServiceParser.getServiceProperties(serviceName); if (result == null) { LOGGER.log(Level.WARNING, "Definition of service [{0}] not found", serviceName); return null; } priority3Service.putAll(result); } // combine result based on order of priority Properties result = new Properties(); result.putAll(priority1Url); if (defaults != null) { // priority 2 - forEach() returns all entries EXCEPT defaults defaults.forEach(result::putIfAbsent); } priority3Service.forEach(result::putIfAbsent); if (defaults != null) { // priority 4 - stringPropertyNames() returns all entries INCLUDING defaults defaults.stringPropertyNames().forEach(s -> result.putIfAbsent(s, castNonNull(defaults.getProperty(s)))); } // priority 5 - PGProperty defaults for PGHOST, PGPORT, PGDBNAME result.putIfAbsent(PGProperty.PG_PORT.getName(), castNonNull(PGProperty.PG_PORT.getDefaultValue())); result.putIfAbsent(PGProperty.PG_HOST.getName(), castNonNull(PGProperty.PG_HOST.getDefaultValue())); if (PGProperty.USER.get(result) != null) { result.putIfAbsent(PGProperty.PG_DBNAME.getName(), castNonNull(PGProperty.USER.get(result))); } // consistency check if (!PGPropertyUtil.propertiesConsistencyCheck(result)) { return null; } // try to load .pgpass if password is missing if (PGProperty.PASSWORD.get(result) == null) { String password = PGPropertyPasswordParser.getPassword( PGProperty.PG_HOST.get(result), PGProperty.PG_PORT.get(result), PGProperty.PG_DBNAME.get(result), PGProperty.USER.get(result) ); if (password != null && !password.isEmpty()) { PGProperty.PASSWORD.set(result, password); } } // return result; } // decode url, on failure log and return null private static @Nullable String urlDecode(String url) { try { return URLCoder.decode(url); } catch (IllegalArgumentException e) { LOGGER.log(Level.FINE, "Url [{0}] parsing failed with error [{1}]", new Object[]{url, e.getMessage()}); } return null; } /** * @return the address portion of the URL */ private static HostSpec[] hostSpecs(Properties props) { String[] hosts = castNonNull(PGProperty.PG_HOST.get(props)).split(","); String[] ports = castNonNull(PGProperty.PG_PORT.get(props)).split(","); String localSocketAddress = PGProperty.LOCAL_SOCKET_ADDRESS.get(props); HostSpec[] hostSpecs = new HostSpec[hosts.length]; for (int i = 0; i < hostSpecs.length; ++i) { hostSpecs[i] = new HostSpec(hosts[i], Integer.parseInt(ports[i]), localSocketAddress); } return hostSpecs; } /** * @return the timeout from the URL, in milliseconds */ private static long timeout(Properties props) { String timeout = PGProperty.LOGIN_TIMEOUT.get(props); if (timeout != null) { try { return (long) (Float.parseFloat(timeout) * 1000); } catch (NumberFormatException e) { LOGGER.log(Level.WARNING, "Couldn't parse loginTimeout value: {0}", timeout); } } return (long) DriverManager.getLoginTimeout() * 1000; } /** * This method was added in v6.5, and simply throws an SQLException for an unimplemented method. I * decided to do it this way while implementing the JDBC2 extensions to JDBC, as it should help * keep the overall driver size down. It now requires the call Class and the function name to help * when the driver is used with closed software that don't report the stack strace * * @param callClass the call Class * @param functionName the name of the unimplemented function with the type of its arguments * @return PSQLException with a localized message giving the complete description of the * unimplemeted function */ public static SQLFeatureNotSupportedException notImplemented(Class<?> callClass, String functionName) { return new SQLFeatureNotSupportedException( GT.tr("Method {0} is not yet implemented.", callClass.getName() + "." + functionName), PSQLState.NOT_IMPLEMENTED.getState()); } @Override public java.util.logging.Logger getParentLogger() { return PARENT_LOGGER; } public static SharedTimer getSharedTimer() { return SHARED_TIMER; } /** * Register the driver against {@link DriverManager}. This is done automatically when the class is * loaded. Dropping the driver from DriverManager's list is possible using {@link #deregister()} * method. * * @throws IllegalStateException if the driver is already registered * @throws SQLException if registering the driver fails */ public static void register() throws SQLException { if (isRegistered()) { throw new IllegalStateException( "Driver is already registered. It can only be registered once."); } Driver registeredDriver = new Driver(); DriverManager.registerDriver(registeredDriver); Driver.registeredDriver = registeredDriver; } /** * According to JDBC specification, this driver is registered against {@link DriverManager} when * the class is loaded. To avoid leaks, this method allow unregistering the driver so that the * class can be gc'ed if necessary. * * @throws IllegalStateException if the driver is not registered * @throws SQLException if deregistering the driver fails */ public static void deregister() throws SQLException { if (registeredDriver == null) { throw new IllegalStateException( "Driver is not registered (or it has not been registered using Driver.register() method)"); } DriverManager.deregisterDriver(registeredDriver); registeredDriver = null; } /** * @return {@code true} if the driver is registered against {@link DriverManager} */ public static boolean isRegistered() { return registeredDriver != null; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.wm.impl.welcomeScreen; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.RecentProjectsManager; import com.intellij.internal.statistic.UsageTrigger; import com.intellij.openapi.Disposable; import com.intellij.openapi.MnemonicHelper; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.JBProtocolCommand; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectManagerAdapter; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.IdeRootPaneNorthExtension; import com.intellij.openapi.wm.StatusBar; import com.intellij.openapi.wm.WelcomeScreen; import com.intellij.openapi.wm.impl.IdeGlassPaneImpl; import com.intellij.ui.*; import com.intellij.ui.border.CustomLineBorder; import com.intellij.ui.components.JBList; import com.intellij.ui.components.JBSlidingPanel; import com.intellij.ui.components.labels.ActionLink; import com.intellij.ui.components.panels.NonOpaquePanel; import com.intellij.ui.popup.PopupFactoryImpl; import com.intellij.util.NotNullFunction; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.MouseEventAdapter; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListDataEvent; import javax.swing.event.ListDataListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.*; import java.io.File; import java.io.InputStream; import java.net.URL; /** * @author Konstantin Bulenkov */ public class FlatWelcomeFrame extends JFrame implements IdeFrame { private final BalloonLayout myBalloonLayout; private final FlatWelcomeScreen myScreen; public FlatWelcomeFrame() { final JRootPane rootPane = getRootPane(); myScreen = new FlatWelcomeScreen(); final IdeGlassPaneImpl glassPane = new IdeGlassPaneImpl(rootPane) { @Override public void addNotify() { super.addNotify(); rootPane.remove(getProxyComponent()); //noinspection SSBasedInspection SwingUtilities.invokeLater(new Runnable() { public void run() { JBProtocolCommand.handleCurrentCommand(); } }); } }; setGlassPane(glassPane); glassPane.setVisible(false); //setUndecorated(true); setContentPane(myScreen.getWelcomePanel()); setTitle("Welcome to " + ApplicationNamesInfo.getInstance().getFullProductName()); AppUIUtil.updateWindowIcon(this); final int width = RecentProjectsManager.getInstance().getRecentProjectsActions(false).length == 0 ? 666 : 777; setSize(JBUI.size(width, 460)); setResizable(false); //int x = bounds.x + (bounds.width - getWidth()) / 2; //int y = bounds.y + (bounds.height - getHeight()) / 2; Point location = DimensionService.getInstance().getLocation(WelcomeFrame.DIMENSION_KEY, null); Rectangle screenBounds = ScreenUtil.getScreenRectangle(location != null ? location : new Point(0, 0)); setLocation(new Point( screenBounds.x + (screenBounds.width - getWidth()) / 2, screenBounds.y + (screenBounds.height - getHeight()) / 3 )); //setLocation(x, y); ProjectManager.getInstance().addProjectManagerListener(new ProjectManagerAdapter() { @Override public void projectOpened(Project project) { dispose(); } }); myBalloonLayout = new BalloonLayoutImpl(rootPane, JBUI.insets(8)); WelcomeFrame.setupCloseAction(this); MnemonicHelper.init(this); Disposer.register(ApplicationManager.getApplication(), new Disposable() { @Override public void dispose() { FlatWelcomeFrame.this.dispose(); } }); } @Override public void dispose() { saveLocation(getBounds()); super.dispose(); Disposer.dispose(myScreen); WelcomeFrame.resetInstance(); } private static void saveLocation(Rectangle location) { Point middle = new Point(location.x + location.width / 2, location.y = location.height / 2); DimensionService.getInstance().setLocation(WelcomeFrame.DIMENSION_KEY, middle, null); } @Override public StatusBar getStatusBar() { return null; } public static Color getMainBackground() { return new JBColor(0xf7f7f7, 0x45474a); } public static Color getProjectsBackground() { return new JBColor(Gray.xFF, Gray.x39); } public static Color getLinkNormalColor() { return new JBColor(Gray._0, Gray.xBB); } public static Color getListSelectionColor(boolean hasFocus) { return hasFocus ? new JBColor(0x3875d6, 0x4b6eaf) : new JBColor(Gray.xDD, Gray.x45); } public static Color getActionLinkSelectionColor() { return new JBColor(0xdbe5f5, 0x485875); } public static JBColor getSeparatorColor() { return new JBColor(Gray.xEC, new Color(72, 75, 78)); } private class FlatWelcomeScreen extends JPanel implements WelcomeScreen { private JBSlidingPanel mySlidingPanel = new JBSlidingPanel(); public FlatWelcomeScreen() { super(new BorderLayout()); mySlidingPanel.add("root", this); setBackground(getMainBackground()); if (RecentProjectsManager.getInstance().getRecentProjectsActions(false, isUseProjectGroups()).length > 0) { final JComponent recentProjects = createRecentProjects(); add(recentProjects, BorderLayout.WEST); final JList projectsList = UIUtil.findComponentOfType(recentProjects, JList.class); if (projectsList != null) { projectsList.getModel().addListDataListener(new ListDataListener() { @Override public void intervalAdded(ListDataEvent e) { } @Override public void intervalRemoved(ListDataEvent e) { removeIfNeeded(); } private void removeIfNeeded() { if (RecentProjectsManager.getInstance().getRecentProjectsActions(false, isUseProjectGroups()).length == 0) { FlatWelcomeScreen.this.remove(recentProjects); FlatWelcomeScreen.this.revalidate(); FlatWelcomeScreen.this.repaint(); } } @Override public void contentsChanged(ListDataEvent e) { removeIfNeeded(); } }); projectsList.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { projectsList.repaint(); } @Override public void focusLost(FocusEvent e) { projectsList.repaint(); } }); } } add(createBody(), BorderLayout.CENTER); } @Override public JComponent getWelcomePanel() { return mySlidingPanel; } private JComponent createBody() { NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout()); panel.add(createLogo(), BorderLayout.NORTH); panel.add(createActionPanel(), BorderLayout.CENTER); panel.add(createSettingsAndDocs(), BorderLayout.SOUTH); return panel; } private JComponent createSettingsAndDocs() { JPanel panel = new NonOpaquePanel(new BorderLayout()); NonOpaquePanel toolbar = new NonOpaquePanel(); AnAction register = ActionManager.getInstance().getAction("Register"); boolean registeredVisible = false; if (register != null) { AnActionEvent e = AnActionEvent.createFromAnAction(register, null, ActionPlaces.WELCOME_SCREEN, DataManager.getInstance().getDataContext(this)); register.update(e); Presentation presentation = e.getPresentation(); if (presentation.isEnabled()) { ActionLink registerLink = new ActionLink("Register", register); registerLink.setNormalColor(getLinkNormalColor()); NonOpaquePanel button = new NonOpaquePanel(new BorderLayout()); button.setBorder(JBUI.Borders.empty(4, 10)); button.add(registerLink); installFocusable(button, register, KeyEvent.VK_UP, KeyEvent.VK_RIGHT, true); NonOpaquePanel wrap = new NonOpaquePanel(); wrap.setBorder(JBUI.Borders.emptyLeft(10)); wrap.add(button); panel.add(wrap, BorderLayout.WEST); registeredVisible = true; } } toolbar.setLayout(new BoxLayout(toolbar, BoxLayout.X_AXIS)); toolbar.add(createActionLink("Configure", IdeActions.GROUP_WELCOME_SCREEN_CONFIGURE, AllIcons.General.GearPlain, !registeredVisible)); toolbar.add(createActionLink("Get Help", IdeActions.GROUP_WELCOME_SCREEN_DOC, null, false)); panel.add(toolbar, BorderLayout.EAST); panel.setBorder(JBUI.Borders.empty(0, 0, 8, 11)); return panel; } private JComponent createActionLink(final String text, final String groupId, Icon icon, boolean focusListOnLeft) { final Ref<ActionLink> ref = new Ref<ActionLink>(null); AnAction action = new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { ActionGroup configureGroup = (ActionGroup)ActionManager.getInstance().getAction(groupId); final PopupFactoryImpl.ActionGroupPopup popup = (PopupFactoryImpl.ActionGroupPopup)JBPopupFactory.getInstance() .createActionGroupPopup(null, new IconsFreeActionGroup(configureGroup), e.getDataContext(), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, false, ActionPlaces.WELCOME_SCREEN); popup.showUnderneathOfLabel(ref.get()); UsageTrigger.trigger("welcome.screen." + groupId); } }; ref.set(new ActionLink(text, icon, action)); ref.get().setPaintUnderline(false); ref.get().setNormalColor(getLinkNormalColor()); NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout()); panel.setBorder(JBUI.Borders.empty(4, 6, 4, 6)); panel.add(ref.get()); panel.add(createArrow(ref.get()), BorderLayout.EAST); installFocusable(panel, action, KeyEvent.VK_UP, KeyEvent.VK_DOWN, focusListOnLeft); return panel; } private JComponent createActionPanel() { JPanel actions = new NonOpaquePanel(); actions.setBorder(JBUI.Borders.emptyLeft(10)); actions.setLayout(new BoxLayout(actions, BoxLayout.Y_AXIS)); ActionManager actionManager = ActionManager.getInstance(); ActionGroup quickStart = (ActionGroup)actionManager.getAction(IdeActions.GROUP_WELCOME_SCREEN_QUICKSTART); DefaultActionGroup group = new DefaultActionGroup(); collectAllActions(group, quickStart); for (AnAction action : group.getChildren(null)) { JPanel button = new JPanel(new BorderLayout()); button.setOpaque(false); button.setBorder(JBUI.Borders.empty(8, 20)); AnActionEvent e = AnActionEvent.createFromAnAction(action, null, ActionPlaces.WELCOME_SCREEN, DataManager.getInstance().getDataContext(this)); action.update(e); Presentation presentation = e.getPresentation(); if (presentation.isVisible()) { String text = presentation.getText(); if (text != null && text.endsWith("...")) { text = text.substring(0, text.length() - 3); } Icon icon = presentation.getIcon(); if (icon.getIconHeight() != JBUI.scale(16) || icon.getIconWidth() != JBUI.scale(16)) { icon = JBUI.emptyIcon(16); } action = wrapGroups(action); ActionLink link = new ActionLink(text, icon, action, createUsageTracker(action)); link.setPaintUnderline(false); link.setNormalColor(getLinkNormalColor()); button.add(link); if (action instanceof WelcomePopupAction) { button.add(createArrow(link), BorderLayout.EAST); } installFocusable(button, action, KeyEvent.VK_UP, KeyEvent.VK_DOWN, true); actions.add(button); } } WelcomeScreenActionsPanel panel = new WelcomeScreenActionsPanel(); panel.actions.add(actions); return panel.root; } private AnAction wrapGroups(AnAction action) { if (action instanceof ActionGroup && ((ActionGroup)action).isPopup()) { final Pair<JPanel, JBList> panel = createActionGroupPanel((ActionGroup)action, mySlidingPanel, new Runnable() { @Override public void run() { goBack(); } }); final Runnable onDone = new Runnable() { @Override public void run() { final JBList list = panel.second; ListScrollingUtil.ensureSelectionExists(list); final ListSelectionListener[] listeners = ((DefaultListSelectionModel)list.getSelectionModel()).getListeners(ListSelectionListener.class); //avoid component cashing. This helps in case of LaF change for (ListSelectionListener listener : listeners) { listener.valueChanged(new ListSelectionEvent(list, list.getSelectedIndex(), list.getSelectedIndex(), true)); } list.requestFocus(); } }; final String name = action.getClass().getName(); mySlidingPanel.add(name, panel.first); final Presentation p = action.getTemplatePresentation(); return new DumbAwareAction(p.getText(), p.getDescription(), p.getIcon()) { @Override public void actionPerformed(@NotNull AnActionEvent e) { mySlidingPanel.getLayout().swipe(mySlidingPanel, name, JBCardLayout.SwipeDirection.FORWARD, onDone); } }; } return action; } protected void goBack() { mySlidingPanel.swipe("root", JBCardLayout.SwipeDirection.BACKWARD).doWhenDone(new Runnable() { @Override public void run() { mySlidingPanel.getRootPane().setDefaultButton(null); } }); } private void collectAllActions(DefaultActionGroup group, ActionGroup actionGroup) { for (AnAction action : actionGroup.getChildren(null)) { if (action instanceof ActionGroup && !((ActionGroup)action).isPopup()) { collectAllActions(group, (ActionGroup)action); } else { group.add(action); } } } private JComponent createLogo() { NonOpaquePanel panel = new NonOpaquePanel(new BorderLayout()); ApplicationInfoEx app = ApplicationInfoEx.getInstanceEx(); JLabel logo = new JLabel(IconLoader.getIcon(app.getWelcomeScreenLogoUrl())); logo.setBorder(JBUI.Borders.empty(30,0,10,0)); logo.setHorizontalAlignment(SwingConstants.CENTER); panel.add(logo, BorderLayout.NORTH); JLabel appName = new JLabel(ApplicationNamesInfo.getInstance().getFullProductName()); Font font = getProductFont(); appName.setForeground(JBColor.foreground()); appName.setFont(font.deriveFont(JBUI.scale(36f)).deriveFont(Font.PLAIN)); appName.setHorizontalAlignment(SwingConstants.CENTER); String appVersion = "Version " + app.getFullVersion(); if (app.isEAP() && app.getBuild().getBuildNumber() < Integer.MAX_VALUE) { appVersion += " (" + app.getBuild().asString() + ")"; } JLabel version = new JLabel(appVersion); version.setFont(getProductFont().deriveFont(JBUI.scale(16f))); version.setHorizontalAlignment(SwingConstants.CENTER); version.setForeground(Gray._128); panel.add(appName); panel.add(version, BorderLayout.SOUTH); panel.setBorder(JBUI.Borders.emptyBottom(20)); return panel; } private Font getProductFont() { String name = "/fonts/Roboto-Light.ttf"; URL url = AppUIUtil.class.getResource(name); if (url == null) { Logger.getInstance(AppUIUtil.class).warn("Resource missing: " + name); } else { try { InputStream is = url.openStream(); try { return Font.createFont(Font.TRUETYPE_FONT, is); } finally { is.close(); } } catch (Throwable t) { Logger.getInstance(AppUIUtil.class).warn("Cannot load font: " + url, t); } } return UIUtil.getLabelFont(); } private JComponent createRecentProjects() { JPanel panel = new JPanel(new BorderLayout()); panel.add(new NewRecentProjectPanel(this), BorderLayout.CENTER); panel.setBackground(getProjectsBackground()); panel.setBorder(new CustomLineBorder(getSeparatorColor(), JBUI.insetsRight(1))); return panel; } private void installFocusable(final JComponent comp, final AnAction action, final int prevKeyCode, final int nextKeyCode, final boolean focusListOnLeft) { comp.setFocusable(true); comp.setFocusTraversalKeysEnabled(true); comp.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { final JList list = UIUtil.findComponentOfType(FlatWelcomeFrame.this.getComponent(), JList.class); if (e.getKeyCode() == KeyEvent.VK_ENTER) { InputEvent event = e; if (e.getComponent() instanceof JComponent) { ActionLink link = UIUtil.findComponentOfType((JComponent)e.getComponent(), ActionLink.class); if (link != null) { event = new MouseEvent(link, MouseEvent.MOUSE_CLICKED, e.getWhen(), e.getModifiers(), 0, 0, 1, false, MouseEvent.BUTTON1); } } action.actionPerformed(AnActionEvent.createFromAnAction(action, event, ActionPlaces.WELCOME_SCREEN, DataManager.getInstance().getDataContext())); } else if (e.getKeyCode() == prevKeyCode) { focusPrev(comp); } else if (e.getKeyCode() == nextKeyCode) { focusNext(comp); } else if (e.getKeyCode() == KeyEvent.VK_LEFT) { if (focusListOnLeft) { if (list != null) { list.requestFocus(); } } else { focusPrev(comp); } } else if (e.getKeyCode() == KeyEvent.VK_RIGHT) { focusNext(comp); } } }); comp.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { comp.setOpaque(true); comp.setBackground(getActionLinkSelectionColor()); } @Override public void focusLost(FocusEvent e) { comp.setOpaque(false); comp.setBackground(getMainBackground()); } }); } protected void focusPrev(JComponent comp) { FocusTraversalPolicy policy = FlatWelcomeFrame.this.getFocusTraversalPolicy(); if (policy != null) { Component prev = policy.getComponentBefore(FlatWelcomeFrame.this, comp); if (prev != null) { prev.requestFocus(); } } } protected void focusNext(JComponent comp) { FocusTraversalPolicy policy = FlatWelcomeFrame.this.getFocusTraversalPolicy(); if (policy != null) { Component next = policy.getComponentAfter(FlatWelcomeFrame.this, comp); if (next != null) { next.requestFocus(); } } } @Override public void setupFrame(JFrame frame) { } @Override public void dispose() { } private class IconsFreeActionGroup extends ActionGroup { private final ActionGroup myGroup; public IconsFreeActionGroup(ActionGroup group) { super(group.getTemplatePresentation().getText(), group.getTemplatePresentation().getDescription(), null); myGroup = group; } @Override public boolean isPopup() { return myGroup.isPopup(); } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { AnAction[] children = myGroup.getChildren(e); AnAction[] patched = new AnAction[children.length]; for (int i = 0; i < children.length; i++) { patched[i] = patch(children[i]); } return patched; } private AnAction patch(final AnAction child) { if (child instanceof ActionGroup) { return new IconsFreeActionGroup((ActionGroup)child); } Presentation presentation = child.getTemplatePresentation(); return new AnAction(presentation.getText(), presentation.getDescription(), null) { @Override public void actionPerformed(@NotNull AnActionEvent e) { child.actionPerformed(e); UsageTrigger.trigger("welcome.screen." + e.getActionManager().getId(child)); } @Override public void update(@NotNull AnActionEvent e) { child.update(e); e.getPresentation().setIcon(null); } @Override public boolean isDumbAware() { return child.isDumbAware(); } }; } } } public static boolean isUseProjectGroups() { return Registry.is("welcome.screen.project.grouping.enabled"); } private static Runnable createUsageTracker(final AnAction action) { return new Runnable() { @Override public void run() { UsageTrigger.trigger("welcome.screen." + ActionManager.getInstance().getId(action)); } }; } private static JLabel createArrow(final ActionLink link) { JLabel arrow = new JLabel(AllIcons.General.Combo3); arrow.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); arrow.setVerticalAlignment(SwingConstants.BOTTOM); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { final MouseEvent newEvent = MouseEventAdapter.convert(e, link, e.getX(), e.getY()); link.doClick(newEvent); return true; } }.installOn(arrow); return arrow; } @Override public BalloonLayout getBalloonLayout() { return myBalloonLayout; } @Override public Rectangle suggestChildFrameBounds() { return getBounds(); } @Nullable @Override public Project getProject() { return ProjectManager.getInstance().getDefaultProject(); } @Override public void setFrameTitle(String title) { setTitle(title); } @Override public void setFileTitle(String fileTitle, File ioFile) { setTitle(fileTitle); } @Override public IdeRootPaneNorthExtension getNorthExtension(String key) { return null; } @Override public JComponent getComponent() { return getRootPane(); } public static void notifyFrameClosed(JFrame frame) { saveLocation(frame.getBounds()); } public static class WelcomeScreenActionsPanel { private JPanel root; private JPanel actions; } public static Pair<JPanel, JBList> createActionGroupPanel(ActionGroup action, final JComponent parent, final Runnable backAction) { JPanel actionsListPanel = new JPanel(new BorderLayout()); actionsListPanel.setBackground(getProjectsBackground()); final JBList list = new JBList(action.getChildren(null)); list.setBackground(getProjectsBackground()); list.installCellRenderer(new NotNullFunction<AnAction, JComponent>() { final JLabel label = new JLabel(); { label.setBorder(JBUI.Borders.empty(3, 7)); } @NotNull @Override public JComponent fun(AnAction action) { label.setText(action.getTemplatePresentation().getText()); Icon icon = action.getTemplatePresentation().getIcon(); label.setIcon(icon); return label; } }); JScrollPane pane = ScrollPaneFactory.createScrollPane(list, true); pane.setBackground(getProjectsBackground()); actionsListPanel.add(pane, BorderLayout.CENTER); if (backAction != null) { final JLabel back = new JLabel(AllIcons.Actions.Back); back.setBorder(JBUI.Borders.empty(3, 7, 10, 7)); back.setHorizontalAlignment(SwingConstants.LEFT); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent event, int clickCount) { backAction.run(); return true; } }.installOn(back); actionsListPanel.add(back, BorderLayout.SOUTH); } final Ref<Component> selected = Ref.create(); final JPanel main = new JPanel(new BorderLayout()); main.add(actionsListPanel, BorderLayout.WEST); ListSelectionListener selectionListener = new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { if (e.getValueIsAdjusting()) { // Update when a change has been finalized. // For instance, selecting an element with mouse fires two consecutive ListSelectionEvent events. return; } if (!selected.isNull()) { main.remove(selected.get()); } Object value = list.getSelectedValue(); if (value instanceof AbstractActionWithPanel) { JPanel panel = ((AbstractActionWithPanel)value).createPanel(); panel.setBorder(JBUI.Borders.empty(7, 10)); selected.set(panel); main.add(selected.get()); for (JButton button : UIUtil.findComponentsOfType(main, JButton.class)) { if (button.getClientProperty(DialogWrapper.DEFAULT_ACTION) == Boolean.TRUE) { parent.getRootPane().setDefaultButton(button); break; } } main.revalidate(); main.repaint(); } } }; list.addListSelectionListener(selectionListener); if (backAction != null) { new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { backAction.run(); } }.registerCustomShortcutSet(KeyEvent.VK_ESCAPE, 0, main); } return Pair.create(main, list); } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.openqa.selenium.testing.drivers.Browser.CHROME; import static org.openqa.selenium.testing.drivers.Browser.EDGIUM; import static org.openqa.selenium.testing.drivers.Browser.EDGE_HTML; import static org.openqa.selenium.testing.drivers.Browser.LEGACY_FIREFOX_XPI; import static org.openqa.selenium.testing.drivers.Browser.HTMLUNIT; import static org.openqa.selenium.testing.drivers.Browser.IE; import static org.openqa.selenium.testing.drivers.Browser.FIREFOX; import static org.openqa.selenium.testing.drivers.Browser.SAFARI; import org.junit.Test; import org.openqa.selenium.environment.webserver.Page; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.NotYetImplemented; import java.util.List; public class ElementDomAttributeTest extends JUnit4TestBase { @Test public void testShouldReturnNullWhenGettingTheValueOfAnAttributeThatIsNotListed() { driver.get(pages.simpleTestPage); WebElement head = driver.findElement(By.xpath("/html")); String attribute = head.getDomAttribute("cheese"); assertThat(attribute).isNull(); } @Test public void testShouldReturnNullWhenGettingSrcAttributeOfInvalidImgTag() { driver.get(pages.simpleTestPage); WebElement img = driver.findElement(By.id("invalidImgTag")); String attribute = img.getDomAttribute("src"); assertThat(attribute).isNull(); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldReturnTheActualValueWhenGettingSrcAttributeOfAValidImgTag() { driver.get(pages.simpleTestPage); WebElement img = driver.findElement(By.id("validImgTag")); String attribute = img.getDomAttribute("src"); assertThat(attribute).isEqualTo("icon.gif"); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldReturnTheActualValueWhenGettingHrefAttributeOfAValidAnchorTag() { driver.get(pages.simpleTestPage); WebElement img = driver.findElement(By.id("validAnchorTag")); String attribute = img.getDomAttribute("href"); assertThat(attribute).isEqualTo("icon.gif"); } @Test public void testShouldReturnEmptyAttributeValuesWhenPresentAndTheValueIsActuallyEmpty() { driver.get(pages.simpleTestPage); WebElement body = driver.findElement(By.xpath("//body")); assertThat(body.getDomAttribute("style")).isEqualTo(""); } @Test public void testShouldReturnTheValueOfTheDisabledAttributeAsNullIfNotSet() { driver.get(pages.formPage); WebElement inputElement = driver.findElement(By.xpath("//input[@id='working']")); assertThat(inputElement.getDomAttribute("disabled")).isNull(); assertThat(inputElement.isEnabled()).isTrue(); WebElement pElement = driver.findElement(By.id("peas")); assertThat(pElement.getDomAttribute("disabled")).isNull(); assertThat(pElement.isEnabled()).isTrue(); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldNotReturnTheValueOfTheIndexAttributeIfItIsMissing() { driver.get(pages.formPage); WebElement multiSelect = driver.findElement(By.id("multi")); List<WebElement> options = multiSelect.findElements(By.tagName("option")); assertThat(options.get(1).getDomAttribute("index")).isNull(); } @Test public void testShouldIndicateTheElementsThatAreDisabledAreNotEnabled() { driver.get(pages.formPage); WebElement inputElement = driver.findElement(By.xpath("//input[@id='notWorking']")); assertThat(inputElement.isEnabled()).isFalse(); inputElement = driver.findElement(By.xpath("//input[@id='working']")); assertThat(inputElement.isEnabled()).isTrue(); } @Test public void testElementsShouldBeDisabledIfTheyAreDisabledUsingRandomDisabledStrings() { driver.get(pages.formPage); WebElement disabledTextElement1 = driver.findElement(By.id("disabledTextElement1")); assertThat(disabledTextElement1.isEnabled()).isFalse(); WebElement disabledTextElement2 = driver.findElement(By.id("disabledTextElement2")); assertThat(disabledTextElement2.isEnabled()).isFalse(); WebElement disabledSubmitElement = driver.findElement(By.id("disabledSubmitElement")); assertThat(disabledSubmitElement.isEnabled()).isFalse(); } @Test @NotYetImplemented(SAFARI) public void testShouldThrowExceptionIfSendingKeysToElementDisabledUsingRandomDisabledStrings() { driver.get(pages.formPage); WebElement disabledTextElement1 = driver.findElement(By.id("disabledTextElement1")); assertThatExceptionOfType(InvalidElementStateException.class) .isThrownBy(() -> disabledTextElement1.sendKeys("foo")); assertThat(disabledTextElement1.getText()).isEqualTo(""); WebElement disabledTextElement2 = driver.findElement(By.id("disabledTextElement2")); assertThatExceptionOfType(InvalidElementStateException.class) .isThrownBy(() -> disabledTextElement2.sendKeys("bar")); assertThat(disabledTextElement2.getText()).isEqualTo(""); } @Test public void testShouldIndicateWhenATextAreaIsDisabled() { driver.get(pages.formPage); WebElement textArea = driver.findElement(By.xpath("//textarea[@id='notWorkingArea']")); assertThat(textArea.isEnabled()).isFalse(); } @Test public void testShouldIndicateWhenASelectIsDisabled() { driver.get(pages.formPage); WebElement enabled = driver.findElement(By.name("selectomatic")); WebElement disabled = driver.findElement(By.name("no-select")); assertThat(enabled.isEnabled()).isTrue(); assertThat(disabled.isEnabled()).isFalse(); } @Test public void testShouldReturnTheValueOfSelectedForOptionsOnlyIfTheyAreSelected() { driver.get(pages.formPage); WebElement selectBox = driver.findElement(By.xpath("//select[@name='selectomatic']")); List<WebElement> options = selectBox.findElements(By.tagName("option")); WebElement one = options.get(0); WebElement two = options.get(1); assertThat(one.isSelected()).isTrue(); assertThat(two.isSelected()).isFalse(); assertThat(one.getDomAttribute("selected")).isEqualTo("true"); assertThat(two.getDomAttribute("selected")).isNull(); } @Test public void testShouldReturnValueOfClassAttributeOfAnElement() { driver.get(pages.xhtmlTestPage); WebElement heading = driver.findElement(By.xpath("//h1")); String className = heading.getDomAttribute("class"); assertThat(className).isEqualTo("header"); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldNotReturnTheContentsOfATextAreaAsItsValue() { driver.get(pages.formPage); String value = driver.findElement(By.id("withText")).getDomAttribute("value"); assertThat(value).isNull(); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldNotReturnInnerHtmlProperty() { driver.get(pages.simpleTestPage); String html = driver.findElement(By.id("wrappingtext")).getDomAttribute("innerHTML"); assertThat(html).isNull(); } @Test public void testShouldTreatReadonlyAsAValue() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("readonly")); String readonly = element.getDomAttribute("readonly"); assertThat(readonly).isNotNull(); WebElement textInput = driver.findElement(By.name("x")); String notReadonly = textInput.getDomAttribute("readonly"); assertThat(readonly).isNotEqualTo(notReadonly); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldNotReturnTextContentProperty() { driver.get(pages.simpleTestPage); WebElement element = driver.findElement(By.id("hiddenline")); assertThat(element.getDomAttribute("textContent")).isNull(); } @Test public void testShouldGetNumericAtribute() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("withText")); assertThat(element.getDomAttribute("rows")).isEqualTo("5"); } @Test public void testCanReturnATextApproximationOfTheStyleAttribute() { driver.get(pages.javascriptPage); String style = driver.findElement(By.id("red-item")).getDomAttribute("style"); assertThat(style.toLowerCase().contains("background-color")).isTrue(); } @Test public void testShouldCorrectlyReportValueOfColspan() { driver.get(pages.tables); WebElement th1 = driver.findElement(By.id("th1")); WebElement td2 = driver.findElement(By.id("td2")); assertThat(th1.getDomAttribute("id")).isEqualTo("th1"); assertThat(th1.getDomAttribute("colspan")).isEqualTo("3"); assertThat(td2.getDomAttribute("id")).isEqualTo("td2"); assertThat(td2.getDomAttribute("colspan")).isEqualTo("2"); } // This is a test-case re-creating issue 900. @Test public void testShouldReturnValueOfOnClickAttribute() { driver.get(pages.javascriptPage); WebElement mouseclickDiv = driver.findElement(By.id("mouseclick")); String onClickValue = mouseclickDiv.getDomAttribute("onclick"); String expectedOnClickValue = "displayMessage('mouse click');"; assertThat(onClickValue).as("Javascript code").isIn( "javascript:" + expectedOnClickValue, // Non-IE "function anonymous()\n{\n" + expectedOnClickValue + "\n}", // IE "function onclick()\n{\n" + expectedOnClickValue + "\n}"); // IE WebElement mousedownDiv = driver.findElement(By.id("mousedown")); assertThat(mousedownDiv.getDomAttribute("onclick")).isNull(); } @Test public void testgetDomAttributeDoesNotReturnAnObjectForSvgProperties() { driver.get(pages.svgPage); WebElement svgElement = driver.findElement(By.id("rotate")); assertThat(svgElement.getDomAttribute("transform")).isEqualTo("rotate(30)"); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testCanRetrieveTheCurrentValueOfATextFormFieldWithPresetText() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("inputWithText")); assertThat(element.getDomAttribute("value")).isEqualTo("Example text"); element.sendKeys("hello@example.com"); assertThat(element.getDomAttribute("value")).isEqualTo("Example text"); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) public void testShouldNotReturnTextOfATextArea() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("withText")); assertThat(element.getDomAttribute("value")).isNull(); } @Test public void testShouldReturnNullForNonPresentBooleanAttributes() { driver.get(pages.booleanAttributes); WebElement element1 = driver.findElement(By.id("working")); assertThat(element1.getDomAttribute("required")).isNull(); WebElement element2 = driver.findElement(By.id("wallace")); assertThat(element2.getDomAttribute("nowrap")).isNull(); } @Test @NotYetImplemented(value = CHROME, reason = "It returns a property") @NotYetImplemented(EDGIUM) @NotYetImplemented(FIREFOX) public void testShouldReturnEmptyStringForPresentBooleanAttributes() { driver.get(pages.booleanAttributes); WebElement element1 = driver.findElement(By.id("emailRequired")); assertThat(element1.getDomAttribute("required")).isEqualTo(""); WebElement element2 = driver.findElement(By.id("emptyTextAreaRequired")); assertThat(element2.getDomAttribute("required")).isEqualTo("required"); WebElement element3 = driver.findElement(By.id("inputRequired")); assertThat(element3.getDomAttribute("required")).isEqualTo(""); WebElement element4 = driver.findElement(By.id("textAreaRequired")); assertThat(element4.getDomAttribute("required")).isEqualTo("false"); WebElement element5 = driver.findElement(By.id("unwrappable")); assertThat(element5.getDomAttribute("nowrap")).isEqualTo(""); } @Test public void testMultipleAttributeShouldBeNullWhenNotSet() { driver.get(pages.selectPage); WebElement element = driver.findElement(By.id("selectWithoutMultiple")); assertThat(element.getDomAttribute("multiple")).isNull(); } @Test public void testMultipleAttributeShouldBeTrueWhenSet() { driver.get(pages.selectPage); WebElement element = driver.findElement(By.id("selectWithMultipleEqualsMultiple")); assertThat(element.getDomAttribute("multiple")).isEqualTo("true"); } @Test public void testMultipleAttributeShouldBeTrueWhenSelectHasMultipleWithValueAsBlank() { driver.get(pages.selectPage); WebElement element = driver.findElement(By.id("selectWithEmptyStringMultiple")); assertThat(element.getDomAttribute("multiple")).isEqualTo("true"); } @Test public void testMultipleAttributeShouldBeTrueWhenSelectHasMultipleWithoutAValue() { driver.get(pages.selectPage); WebElement element = driver.findElement(By.id("selectWithMultipleWithoutValue")); assertThat(element.getDomAttribute("multiple")).isEqualTo("true"); } @Test public void testMultipleAttributeShouldBeTrueWhenSelectHasMultipleWithValueAsSomethingElse() { driver.get(pages.selectPage); WebElement element = driver.findElement(By.id("selectWithRandomMultipleValue")); assertThat(element.getDomAttribute("multiple")).isEqualTo("true"); } @Test public void shouldTreatContenteditableAsEnumeratedButNotBoolean() { checkEnumeratedAttribute("contenteditable", "true", "false", "yes", "no", "", "blabla"); } @Test @NotYetImplemented(IE) @NotYetImplemented(CHROME) @NotYetImplemented(EDGIUM) @Ignore(LEGACY_FIREFOX_XPI) @NotYetImplemented(HTMLUNIT) @NotYetImplemented(SAFARI) @NotYetImplemented(EDGE_HTML) public void shouldTreatDraggableAsEnumeratedButNotBoolean() { checkEnumeratedAttribute("draggable", "true", "false", "yes", "no", "", "blabla"); } private void checkEnumeratedAttribute(String name, String... values) { asList(values).forEach(value -> { driver.get(appServer.create(new Page().withBody( String.format("<div id=\"attr\" %s=\"%s\">", name, value)))); assertThat(driver.findElement(By.id("attr")).getDomAttribute(name)).isEqualTo(value); }); driver.get(appServer.create(new Page().withBody(String.format("<div id=\"attr\" %s>", name)))); assertThat(driver.findElement(By.id("attr")).getDomAttribute(name)).isEqualTo(""); driver.get(appServer.create(new Page().withBody("<div id=\"attr\">"))); assertThat(driver.findElement(By.id("attr")).getDomAttribute(name)).isNull(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIESOR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.tx.control.itests; import static java.lang.Boolean.getBoolean; import static org.ops4j.pax.exam.CoreOptions.junitBundles; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.options; import static org.ops4j.pax.exam.CoreOptions.systemProperty; import static org.ops4j.pax.exam.CoreOptions.when; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Dictionary; import java.util.Hashtable; import java.util.List; import java.util.NoSuchElementException; import javax.inject.Inject; import javax.persistence.EntityManager; import org.h2.tools.Server; import org.junit.After; import org.junit.Before; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.CoreOptions; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.ProbeBuilder; import org.ops4j.pax.exam.TestProbeBuilder; import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerClass; import org.osgi.framework.BundleContext; import org.osgi.framework.Filter; import org.osgi.framework.FrameworkUtil; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.osgi.service.cm.ConfigurationAdmin; import org.osgi.service.jdbc.DataSourceFactory; import org.osgi.service.jpa.EntityManagerFactoryBuilder; import org.osgi.service.transaction.control.TransactionControl; import org.osgi.service.transaction.control.jpa.JPAEntityManagerProvider; import org.osgi.util.tracker.ServiceTracker; @RunWith(PaxExam.class) @ExamReactorStrategy(PerClass.class) public abstract class AbstractJPATransactionTest { protected static final String TX_CONTROL_FILTER = "tx.control.filter"; protected static final String ARIES_EMF_BUILDER_TARGET_FILTER = "aries.emf.builder.target.filter"; protected static final String IS_XA = "aries.test.is.xa"; @Inject BundleContext context; protected TransactionControl txControl; protected EntityManager em; private Server server; private final List<ServiceTracker<?,?>> trackers = new ArrayList<>(); @Before public void setUp() throws Exception { txControl = getService(TransactionControl.class, System.getProperty(TX_CONTROL_FILTER), 5000); server = Server.createTcpServer("-tcpPort", "0"); server.start(); String jdbcUrl = "jdbc:h2:tcp://127.0.0.1:" + server.getPort() + "/" + getRemoteDBPath(); em = configuredEntityManager(jdbcUrl); } protected <T> T getService(Class<T> clazz, long timeout) { try { return getService(clazz, null, timeout); } catch (InvalidSyntaxException e) { throw new IllegalArgumentException(e); } } protected <T> T getService(Class<T> clazz, String filter, long timeout) throws InvalidSyntaxException { Filter f = FrameworkUtil.createFilter(filter == null ? "(|(foo=bar)(!(foo=bar)))" : filter); ServiceTracker<T, T> tracker = new ServiceTracker<T, T>(context, clazz, null) { @Override public T addingService(ServiceReference<T> reference) { return f.match(reference) ? super.addingService(reference) : null; } }; tracker.open(); try { T t = tracker.waitForService(timeout); if(t == null) { throw new NoSuchElementException(clazz.getName()); } return t; } catch (InterruptedException e) { throw new RuntimeException("Error waiting for service " + clazz.getName(), e); } finally { trackers.add(tracker); } } private String getRemoteDBPath() { String fullResourceName = getClass().getName().replace('.', '/') + ".class"; String resourcePath = getClass().getClassLoader().getResource(fullResourceName).getPath(); File testClassesDir = new File(resourcePath.substring(0, resourcePath.length() - fullResourceName.length())); return new File(testClassesDir.getParentFile(), "testdb/db1").getAbsolutePath(); } private EntityManager configuredEntityManager(String jdbcUrl) throws IOException { Dictionary<String, Object> props = getBaseProperties(); props.put(DataSourceFactory.OSGI_JDBC_DRIVER_CLASS, "org.h2.Driver"); props.put(DataSourceFactory.JDBC_URL, jdbcUrl); props.put(EntityManagerFactoryBuilder.JPA_UNIT_NAME, "test-unit"); String filter = System.getProperty(ARIES_EMF_BUILDER_TARGET_FILTER); if(filter != null) { props.put(ARIES_EMF_BUILDER_TARGET_FILTER, filter); } ConfigurationAdmin cm = getService(ConfigurationAdmin.class, 5000); String pid = getBoolean(IS_XA) ? "org.apache.aries.tx.control.jpa.xa" : "org.apache.aries.tx.control.jpa.local"; System.out.println("Configuring connection provider with pid " + pid); org.osgi.service.cm.Configuration config = cm.createFactoryConfiguration( pid, null); config.update(props); return getService(JPAEntityManagerProvider.class, 5000).getResource(txControl); } protected Dictionary<String, Object> getBaseProperties() { return new Hashtable<>(); } @After public void tearDown() { clearConfiguration(); if(server != null) { server.stop(); } trackers.stream().forEach(ServiceTracker::close); em = null; } private void clearConfiguration() { ConfigurationAdmin cm = getService(ConfigurationAdmin.class, 5000); org.osgi.service.cm.Configuration[] cfgs = null; try { cfgs = cm.listConfigurations(null); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if(cfgs != null) { for(org.osgi.service.cm.Configuration cfg : cfgs) { try { cfg.delete(); } catch (Exception e) {} } try { Thread.sleep(250); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } @ProbeBuilder public TestProbeBuilder probeConfiguration(TestProbeBuilder probe) { // makes sure the generated Test-Bundle contains this import! probe.setHeader("Meta-Persistence", "META-INF/persistence.xml"); return probe; } @Configuration public Option[] localTxConfiguration() { String localRepo = System.getProperty("maven.repo.local"); if (localRepo == null) { localRepo = System.getProperty("org.ops4j.pax.url.mvn.localRepository"); } return options(junitBundles(), systemProperty("org.ops4j.pax.logging.DefaultServiceLog.level").value("INFO"), when(localRepo != null) .useOptions(CoreOptions.vmOption("-Dorg.ops4j.pax.url.mvn.localRepository=" + localRepo)), localTxControlService(), localJpaResourceProviderWithH2(), jpaProvider(), ariesJPA(), mavenBundle("org.apache.felix", "org.apache.felix.configadmin").versionAsInProject(), mavenBundle("org.ops4j.pax.logging", "pax-logging-api").versionAsInProject(), mavenBundle("org.ops4j.pax.logging", "pax-logging-service").versionAsInProject() // ,CoreOptions.vmOption("-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005") ); } @Configuration public Option[] xaTxConfiguration() { String localRepo = System.getProperty("maven.repo.local"); if (localRepo == null) { localRepo = System.getProperty("org.ops4j.pax.url.mvn.localRepository"); } return options(junitBundles(), systemProperty("org.ops4j.pax.logging.DefaultServiceLog.level").value("INFO"), when(localRepo != null) .useOptions(CoreOptions.vmOption("-Dorg.ops4j.pax.url.mvn.localRepository=" + localRepo)), systemProperty(IS_XA).value(Boolean.TRUE.toString()), xaTxControlService(), xaJpaResourceProviderWithH2(), jpaProvider(), ariesJPA(), mavenBundle("org.apache.felix", "org.apache.felix.configadmin").versionAsInProject(), mavenBundle("org.ops4j.pax.logging", "pax-logging-api").versionAsInProject(), mavenBundle("org.ops4j.pax.logging", "pax-logging-service").versionAsInProject() // ,CoreOptions.vmOption("-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005") ); } public Option localTxControlService() { return CoreOptions.composite( systemProperty(TX_CONTROL_FILTER).value("(osgi.local.enabled=true)"), mavenBundle("org.apache.aries.tx-control", "tx-control-service-local").versionAsInProject()); } public Option xaTxControlService() { return CoreOptions.composite( systemProperty(TX_CONTROL_FILTER).value("(osgi.xa.enabled=true)"), mavenBundle("org.apache.aries.tx-control", "tx-control-service-xa").versionAsInProject()); } public Option localJpaResourceProviderWithH2() { return CoreOptions.composite( mavenBundle("com.h2database", "h2").versionAsInProject(), mavenBundle("org.apache.aries.tx-control", "tx-control-provider-jpa-local").versionAsInProject()); } public Option xaJpaResourceProviderWithH2() { return CoreOptions.composite( mavenBundle("com.h2database", "h2").versionAsInProject(), mavenBundle("org.apache.aries.tx-control", "tx-control-provider-jpa-xa").versionAsInProject()); } public Option ariesJPA() { return mavenBundle("org.apache.aries.jpa", "org.apache.aries.jpa.container", ariesJPAVersion()); } protected String ariesJPAVersion() { return "2.3.0"; } protected abstract Option jpaProvider(); }
package it.unibz.krdb.obda.reformulation.semindex.tests; /* * #%L * ontop-quest-owlapi3 * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.Predicate; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.ontology.Description; import it.unibz.krdb.obda.ontology.Ontology; import it.unibz.krdb.obda.ontology.OntologyFactory; import it.unibz.krdb.obda.ontology.PropertyExpression; import it.unibz.krdb.obda.ontology.impl.OntologyFactoryImpl; import it.unibz.krdb.obda.owlapi3.OWLAPI3Translator; import it.unibz.krdb.obda.owlrefplatform.core.dagjgrapht.TBoxReasoner; import it.unibz.krdb.obda.owlrefplatform.core.dagjgrapht.TBoxReasonerImpl; import it.unibz.krdb.obda.owlrefplatform.core.dagjgrapht.SemanticIndexRange; import java.io.BufferedReader; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.h2.jdbcx.JdbcDataSource; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Helper class to load ontologies and compare computed values to expected results * * @author Sergejs Pugac */ public class SemanticIndexHelper { public final static Logger log = LoggerFactory.getLogger(SemanticIndexHelper.class); public OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); public String owlloc = "src/test/resources/test/semanticIndex_ontologies/"; public transient Connection conn; private OBDADataFactory predicateFactory = OBDADataFactoryImpl.getInstance(); private OntologyFactory descFactory = OntologyFactoryImpl.getInstance(); private String owl_exists = "::__exists__::"; private String owl_inverse_exists = "::__inverse__exists__::"; private String owl_inverse = "::__inverse__::"; public SemanticIndexHelper() { JdbcDataSource ds = new JdbcDataSource(); ds.setURL("jdbc:h2:mem:db1"); try { conn = ds.getConnection(); } catch (SQLException e) { log.error("Error creating test database"); e.printStackTrace(); } } public Ontology load_onto(String ontoname) throws Exception { String owlfile = owlloc + ontoname + ".owl"; OWLOntology owlOntology = manager.loadOntologyFromOntologyDocument(new File(owlfile)); OWLAPI3Translator translator = new OWLAPI3Translator(); Ontology ontology = translator.translate(owlOntology); return ontology; } public TBoxReasoner load_dag(String ontoname) throws Exception { return new TBoxReasonerImpl(load_onto(ontoname)); //return DAGBuilder.getDAG(load_onto(ontoname)); } public List<List<Description>> get_results(String resname) { String resfile = owlloc + resname + ".si"; File results = new File(resfile); Document doc = null; try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); doc = db.parse(results); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } doc.getDocumentElement().normalize(); List<Description> cls = get_dag_type(doc, "classes"); List<Description> roles = get_dag_type(doc, "rolles"); List<List<Description>> rv = new ArrayList<List<Description>>(2); rv.add(cls); rv.add(roles); return rv; } /** * Extract particular type of DAG nodes from XML document * * @param doc XML document containing encoded DAG nodes * @param type type of DAGNodes to extract * @return a list of DAGNodes */ private List<Description> get_dag_type(Document doc, String type) { List<Description> rv = new LinkedList<Description>(); Node root = doc.getElementsByTagName(type).item(0); NodeList childNodes = root.getChildNodes(); for (int i = 0; i < childNodes.getLength(); i++) { if (childNodes.item(i).getNodeType() == Node.ELEMENT_NODE) { Element node = (Element) childNodes.item(i); String uri = node.getAttribute("uri"); int idx = Integer.parseInt(node.getAttribute("index")); int arity = 1; boolean inverse = false; boolean exists = false; Predicate p; Description description; if (uri.startsWith(owl_exists)) { uri = uri.substring(owl_exists.length()); arity = 2; exists = true; } else if (uri.startsWith(owl_inverse_exists)) { uri = uri.substring(owl_inverse_exists.length()); arity = 2; inverse = true; exists = true; } else if (uri.startsWith(owl_inverse)) { uri = uri.substring(owl_inverse.length()); inverse = true; } if (type.equals("classes")) { if (exists) { PropertyExpression prop = descFactory.createProperty(uri); if (inverse) prop = prop.getInverse(); description = descFactory.createPropertySomeRestriction(prop); } else description = descFactory.createClass(uri); } else { PropertyExpression prop = descFactory.createProperty(uri); if (inverse) description = prop.getInverse(); else description = prop; } Description _node = description; // _node.setIndex(idx); // _node.setRange(new SemanticIndexRange()); // // String[] range = node.getAttribute("range").split(","); // for (int j = 0; j < range.length; j++) { // String[] interval = range[j].split(":"); // int start = Integer.parseInt(interval[0]); // int end = Integer.parseInt(interval[1]); // _node.getRange().addInterval(start, end); // } rv.add(_node); } } return rv; } public List<String[]> get_abox(String resname) throws Exception { String resfile = owlloc + resname + ".abox"; List<String[]> rv = new LinkedList<String[]>(); FileInputStream fstream = new FileInputStream(resfile); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String strLine; while ((strLine = br.readLine()) != null) { String[] tokens = strLine.split(" "); rv.add(tokens); } return rv; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.storage.am.lsm.btree.util; import java.io.File; import java.io.FilenameFilter; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Random; import java.util.logging.Logger; import org.apache.hyracks.api.context.IHyracksTaskContext; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.exceptions.HyracksException; import org.apache.hyracks.api.io.FileReference; import org.apache.hyracks.api.io.IODeviceHandle; import org.apache.hyracks.control.nc.io.IOManager; import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType; import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback; import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler; import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy; import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker; import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache; import org.apache.hyracks.storage.am.lsm.common.impls.NoMergePolicy; import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback; import org.apache.hyracks.storage.am.lsm.common.impls.SynchronousScheduler; import org.apache.hyracks.storage.am.lsm.common.impls.ThreadCountingTracker; import org.apache.hyracks.storage.am.lsm.common.impls.VirtualBufferCache; import org.apache.hyracks.storage.common.buffercache.HeapBufferAllocator; import org.apache.hyracks.storage.common.buffercache.IBufferCache; import org.apache.hyracks.storage.common.file.IFileMapProvider; import org.apache.hyracks.test.support.TestStorageManagerComponentHolder; import org.apache.hyracks.test.support.TestUtils; public class LSMBTreeTestHarness { protected static final Logger LOGGER = Logger.getLogger(LSMBTreeTestHarness.class.getName()); public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST = new BTreeLeafFrameType[] { BTreeLeafFrameType.REGULAR_NSM }; private static final long RANDOM_SEED = 50; protected final int diskPageSize; protected final int diskNumPages; protected final int diskMaxOpenFiles; protected final int memPageSize; protected final int memNumPages; protected final int hyracksFrameSize; protected final double bloomFilterFalsePositiveRate; protected final int numMutableComponents; protected IOManager ioManager; protected int ioDeviceId; protected IBufferCache diskBufferCache; protected IFileMapProvider diskFileMapProvider; protected List<IVirtualBufferCache> virtualBufferCaches; protected IHyracksTaskContext ctx; protected ILSMIOOperationScheduler ioScheduler; protected ILSMMergePolicy mergePolicy; protected ILSMOperationTracker opTracker; protected ILSMIOOperationCallback ioOpCallback; protected final Random rnd = new Random(); protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS"); protected final static String sep = System.getProperty("file.separator"); protected String onDiskDir; protected FileReference file; public LSMBTreeTestHarness() { this.diskPageSize = AccessMethodTestsConfig.LSM_BTREE_DISK_PAGE_SIZE; this.diskNumPages = AccessMethodTestsConfig.LSM_BTREE_DISK_NUM_PAGES; this.diskMaxOpenFiles = AccessMethodTestsConfig.LSM_BTREE_DISK_MAX_OPEN_FILES; this.memPageSize = AccessMethodTestsConfig.LSM_BTREE_MEM_PAGE_SIZE; this.memNumPages = AccessMethodTestsConfig.LSM_BTREE_MEM_NUM_PAGES; this.hyracksFrameSize = AccessMethodTestsConfig.LSM_BTREE_HYRACKS_FRAME_SIZE; this.bloomFilterFalsePositiveRate = AccessMethodTestsConfig.LSM_BTREE_BLOOMFILTER_FALSE_POSITIVE_RATE; this.ioScheduler = SynchronousScheduler.INSTANCE; this.mergePolicy = new NoMergePolicy(); this.opTracker = new ThreadCountingTracker(); this.ioOpCallback = NoOpIOOperationCallback.INSTANCE; this.numMutableComponents = AccessMethodTestsConfig.LSM_BTREE_NUM_MUTABLE_COMPONENTS; } public void setUp() throws HyracksException { ioManager = TestStorageManagerComponentHolder.getIOManager(); ioDeviceId = 0; onDiskDir = ioManager.getIODevices().get(ioDeviceId).getPath() + sep + "lsm_btree_" + simpleDateFormat.format(new Date()) + sep; file = new FileReference(new File(onDiskDir)); ctx = TestUtils.create(getHyracksFrameSize()); TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles); diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx); diskFileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx); virtualBufferCaches = new ArrayList<IVirtualBufferCache>(); for (int i = 0; i < numMutableComponents; i++) { IVirtualBufferCache virtualBufferCache = new VirtualBufferCache(new HeapBufferAllocator(), memPageSize, memNumPages / numMutableComponents); virtualBufferCaches.add(virtualBufferCache); } rnd.setSeed(RANDOM_SEED); } public void tearDown() throws HyracksDataException { diskBufferCache.close(); IODeviceHandle dev = ioManager.getIODevices().get(ioDeviceId); File dir = new File(dev.getPath(), onDiskDir); FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { return !name.startsWith("."); } }; String[] files = dir.list(filter); if (files != null) { for (String fileName : files) { File file = new File(dir.getPath() + File.separator + fileName); file.delete(); } } dir.delete(); } public int getDiskPageSize() { return diskPageSize; } public int getDiskNumPages() { return diskNumPages; } public int getDiskMaxOpenFiles() { return diskMaxOpenFiles; } public int getMemPageSize() { return memPageSize; } public int getMemNumPages() { return memNumPages; } public int getHyracksFrameSize() { return hyracksFrameSize; } public IOManager getIOManager() { return ioManager; } public int getIODeviceId() { return ioDeviceId; } public IBufferCache getDiskBufferCache() { return diskBufferCache; } public IFileMapProvider getDiskFileMapProvider() { return diskFileMapProvider; } public List<IVirtualBufferCache> getVirtualBufferCaches() { return virtualBufferCaches; } public double getBoomFilterFalsePositiveRate() { return bloomFilterFalsePositiveRate; } public IHyracksTaskContext getHyracksTastContext() { return ctx; } public FileReference getFileReference() { return file; } public Random getRandom() { return rnd; } public ILSMIOOperationScheduler getIOScheduler() { return ioScheduler; } public ILSMOperationTracker getOperationTracker() { return opTracker; } public ILSMMergePolicy getMergePolicy() { return mergePolicy; } public ILSMIOOperationCallback getIOOperationCallback() { return ioOpCallback; } }
package org.deeplearning4j.spark.models.sequencevectors; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.spark.Accumulator; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.broadcast.Broadcast; import org.apache.spark.storage.StorageLevel; import org.deeplearning4j.exception.DL4JInvalidConfigException; import org.deeplearning4j.models.embeddings.loader.VectorsConfiguration; import org.deeplearning4j.models.sequencevectors.SequenceVectors; import org.deeplearning4j.models.sequencevectors.sequence.Sequence; import org.deeplearning4j.models.sequencevectors.sequence.SequenceElement; import org.deeplearning4j.models.sequencevectors.sequence.ShallowSequenceElement; import org.deeplearning4j.models.word2vec.Huffman; import org.deeplearning4j.models.word2vec.wordstore.VocabCache; import org.deeplearning4j.models.word2vec.wordstore.inmemory.AbstractCache; import org.deeplearning4j.spark.models.sequencevectors.export.ExportContainer; import org.deeplearning4j.spark.models.sequencevectors.export.SparkModelExporter; import org.deeplearning4j.spark.models.sequencevectors.functions.*; import org.deeplearning4j.spark.models.sequencevectors.learning.SparkElementsLearningAlgorithm; import org.deeplearning4j.spark.models.sequencevectors.learning.SparkSequenceLearningAlgorithm; import org.deeplearning4j.spark.models.sequencevectors.primitives.ExtraCounter; import org.nd4j.linalg.primitives.Counter; import org.nd4j.linalg.primitives.Pair; import org.nd4j.parameterserver.distributed.VoidParameterServer; import org.nd4j.parameterserver.distributed.conf.VoidConfiguration; import org.nd4j.parameterserver.distributed.enums.FaultToleranceStrategy; import org.nd4j.parameterserver.distributed.transport.RoutedTransport; import org.nd4j.parameterserver.distributed.util.NetworkInformation; import org.nd4j.parameterserver.distributed.util.NetworkOrganizer; import java.util.Arrays; import java.util.List; import java.util.Set; /** * Generic SequenceVectors implementation for dl4j-spark-nlp * * @author raver119@gmail.com */ @Slf4j public class SparkSequenceVectors<T extends SequenceElement> extends SequenceVectors<T> { protected Accumulator<Counter<Long>> elementsFreqAccum; protected Accumulator<ExtraCounter<Long>> elementsFreqAccumExtra; protected StorageLevel storageLevel = StorageLevel.MEMORY_ONLY(); // FIXME: we probably do not need this at all protected Broadcast<VocabCache<T>> vocabCacheBroadcast; protected Broadcast<VocabCache<ShallowSequenceElement>> shallowVocabCacheBroadcast; protected Broadcast<VectorsConfiguration> configurationBroadcast; protected transient boolean isEnvironmentReady = false; protected transient VocabCache<ShallowSequenceElement> shallowVocabCache; protected boolean isAutoDiscoveryMode = true; protected SparkModelExporter<T> exporter; protected SparkElementsLearningAlgorithm ela; protected SparkSequenceLearningAlgorithm sla; protected VoidConfiguration paramServerConfiguration; protected SparkSequenceVectors() { this(new VectorsConfiguration()); } protected SparkSequenceVectors(@NonNull VectorsConfiguration configuration) { this.configuration = configuration; } protected VocabCache<ShallowSequenceElement> getShallowVocabCache() { return shallowVocabCache; } /** * PLEASE NOTE: This method isn't supported for Spark implementation. Consider using fitLists() or fitSequences() instead. */ @Override @Deprecated public void fit() { throw new UnsupportedOperationException("To use fit() method, please consider using standalone implementation"); } protected void validateConfiguration() { if (!configuration.isUseHierarchicSoftmax() && configuration.getNegative() == 0) throw new DL4JInvalidConfigException( "Both HierarchicSoftmax and NegativeSampling are disabled. Nothing to learn here."); if (configuration.getElementsLearningAlgorithm() == null && configuration.getSequenceLearningAlgorithm() == null) throw new DL4JInvalidConfigException("No LearningAlgorithm was set. Nothing to learn here."); if (exporter == null) throw new DL4JInvalidConfigException( "SparkModelExporter is undefined. No sense for training, if model won't be exported."); } protected void broadcastEnvironment(JavaSparkContext context) { if (!isEnvironmentReady) { configurationBroadcast = context.broadcast(configuration); isEnvironmentReady = true; } } /** * Utility method. fitSequences() used within. * * PLEASE NOTE: This method can't be used to train for labels, since List<T> can't hold labels. If you need labels - consider manual Sequence creation instead. * * @param corpus */ public void fitLists(JavaRDD<List<T>> corpus) { // we just convert List to sequences JavaRDD<Sequence<T>> rdd = corpus.map(new ListSequenceConvertFunction<T>()); // and use fitSequences() fitSequences(rdd); } /** * Base training entry point * * @param corpus */ public void fitSequences(JavaRDD<Sequence<T>> corpus) { /** * Basically all we want for base implementation here is 3 things: * a) build vocabulary * b) build huffman tree * c) do training * * in this case all classes extending SeqVec, like deepwalk or word2vec will be just building their RDD<Sequence<T>>, * and calling this method for training, instead implementing own routines */ validateConfiguration(); if (ela == null) { try { ela = (SparkElementsLearningAlgorithm) Class.forName(configuration.getElementsLearningAlgorithm()) .newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } if (workers > 1) { log.info("Repartitioning corpus to {} parts...", workers); corpus.repartition(workers); } if (storageLevel != null) corpus.persist(storageLevel); final JavaSparkContext sc = new JavaSparkContext(corpus.context()); // this will have any effect only if wasn't called before, in extension classes broadcastEnvironment(sc); Counter<Long> finalCounter; long numberOfSequences = 0; /** * Here we s */ if (paramServerConfiguration == null) paramServerConfiguration = VoidConfiguration.builder().faultToleranceStrategy(FaultToleranceStrategy.NONE) .numberOfShards(2).unicastPort(40123).multicastPort(40124).build(); isAutoDiscoveryMode = paramServerConfiguration.getShardAddresses() != null && !paramServerConfiguration.getShardAddresses().isEmpty() ? false : true; Broadcast<VoidConfiguration> paramServerConfigurationBroadcast = null; if (isAutoDiscoveryMode) { log.info("Trying auto discovery mode..."); elementsFreqAccumExtra = corpus.context().accumulator(new ExtraCounter<Long>(), new ExtraElementsFrequenciesAccumulator()); ExtraCountFunction<T> elementsCounter = new ExtraCountFunction<>(elementsFreqAccumExtra, configuration.isTrainSequenceVectors()); JavaRDD<Pair<Sequence<T>, Long>> countedCorpus = corpus.map(elementsCounter); // just to trigger map function, since we need huffman tree before proceeding numberOfSequences = countedCorpus.count(); finalCounter = elementsFreqAccumExtra.value(); ExtraCounter<Long> spareReference = (ExtraCounter<Long>) finalCounter; // getting list of available hosts Set<NetworkInformation> availableHosts = spareReference.getNetworkInformation(); log.info("availableHosts: {}", availableHosts); if (availableHosts.size() > 1) { // now we have to pick N shards and optionally N backup nodes, and pass them within configuration bean NetworkOrganizer organizer = new NetworkOrganizer(availableHosts, paramServerConfiguration.getNetworkMask()); paramServerConfiguration .setShardAddresses(organizer.getSubset(paramServerConfiguration.getNumberOfShards())); // backup shards are optional if (paramServerConfiguration.getFaultToleranceStrategy() != FaultToleranceStrategy.NONE) { paramServerConfiguration.setBackupAddresses( organizer.getSubset(paramServerConfiguration.getNumberOfShards(), paramServerConfiguration.getShardAddresses())); } } else { // for single host (aka driver-only, aka spark-local) just run on loopback interface paramServerConfiguration.setShardAddresses( Arrays.asList("127.0.0.1:" + paramServerConfiguration.getUnicastPort())); paramServerConfiguration.setFaultToleranceStrategy(FaultToleranceStrategy.NONE); } log.info("Got Shards so far: {}", paramServerConfiguration.getShardAddresses()); // update ps configuration with real values where required paramServerConfiguration.setNumberOfShards(paramServerConfiguration.getShardAddresses().size()); paramServerConfiguration.setUseHS(configuration.isUseHierarchicSoftmax()); paramServerConfiguration.setUseNS(configuration.getNegative() > 0); paramServerConfigurationBroadcast = sc.broadcast(paramServerConfiguration); } else { // update ps configuration with real values where required paramServerConfiguration.setNumberOfShards(paramServerConfiguration.getShardAddresses().size()); paramServerConfiguration.setUseHS(configuration.isUseHierarchicSoftmax()); paramServerConfiguration.setUseNS(configuration.getNegative() > 0); paramServerConfigurationBroadcast = sc.broadcast(paramServerConfiguration); // set up freqs accumulator elementsFreqAccum = corpus.context().accumulator(new Counter<Long>(), new ElementsFrequenciesAccumulator()); CountFunction<T> elementsCounter = new CountFunction<>(configurationBroadcast, paramServerConfigurationBroadcast, elementsFreqAccum, configuration.isTrainSequenceVectors()); // count all sequence elements and their sum JavaRDD<Pair<Sequence<T>, Long>> countedCorpus = corpus.map(elementsCounter); // just to trigger map function, since we need huffman tree before proceeding numberOfSequences = countedCorpus.count(); // now we grab counter, which contains frequencies for all SequenceElements in corpus finalCounter = elementsFreqAccum.value(); } long numberOfElements = (long) finalCounter.totalCount(); long numberOfUniqueElements = finalCounter.size(); log.info("Total number of sequences: {}; Total number of elements entries: {}; Total number of unique elements: {}", numberOfSequences, numberOfElements, numberOfUniqueElements); /* build RDD of reduced SequenceElements, just get rid of labels temporary, stick to some numerical values, like index or hashcode. So we could reduce driver memory footprint */ // build huffman tree, and update original RDD with huffman encoding info shallowVocabCache = buildShallowVocabCache(finalCounter); shallowVocabCacheBroadcast = sc.broadcast(shallowVocabCache); // FIXME: probably we need to reconsider this approach JavaRDD<T> vocabRDD = corpus .flatMap(new VocabRddFunctionFlat<T>(configurationBroadcast, paramServerConfigurationBroadcast)) .distinct(); vocabRDD.count(); /** * now we initialize Shards with values. That call should be started from driver which is either Client or Shard in standalone mode. */ VoidParameterServer.getInstance().init(paramServerConfiguration, new RoutedTransport(), ela.getTrainingDriver()); VoidParameterServer.getInstance().initializeSeqVec(configuration.getLayersSize(), (int) numberOfUniqueElements, 119, configuration.getLayersSize() / paramServerConfiguration.getNumberOfShards(), paramServerConfiguration.isUseHS(), paramServerConfiguration.isUseNS()); // proceed to training // also, training function is the place where we invoke ParameterServer TrainingFunction<T> trainer = new TrainingFunction<>(shallowVocabCacheBroadcast, configurationBroadcast, paramServerConfigurationBroadcast); PartitionTrainingFunction<T> partitionTrainer = new PartitionTrainingFunction<>(shallowVocabCacheBroadcast, configurationBroadcast, paramServerConfigurationBroadcast); if (configuration != null) for (int e = 0; e < configuration.getEpochs(); e++) corpus.foreachPartition(partitionTrainer); //corpus.foreach(trainer); // we're transferring vectors to ExportContainer JavaRDD<ExportContainer<T>> exportRdd = vocabRDD.map(new DistributedFunction<T>(paramServerConfigurationBroadcast, configurationBroadcast, shallowVocabCacheBroadcast)); // at this particular moment training should be pretty much done, and we're good to go for export if (exporter != null) exporter.export(exportRdd); // unpersist, if we've persisten corpus after all if (storageLevel != null) corpus.unpersist(); log.info("Training finish, starting cleanup..."); VoidParameterServer.getInstance().shutdown(); } /** * This method builds shadow vocabulary and huffman tree * * @param counter * @return */ protected VocabCache<ShallowSequenceElement> buildShallowVocabCache(Counter<Long> counter) { // TODO: need simplified cache here, that will operate on Long instead of string labels VocabCache<ShallowSequenceElement> vocabCache = new AbstractCache<>(); for (Long id : counter.keySet()) { ShallowSequenceElement shallowElement = new ShallowSequenceElement(counter.getCount(id), id); vocabCache.addToken(shallowElement); } // building huffman tree Huffman huffman = new Huffman(vocabCache.vocabWords()); huffman.build(); huffman.applyIndexes(vocabCache); return vocabCache; } protected Counter<Long> getCounter() { if (isAutoDiscoveryMode) return elementsFreqAccumExtra.value(); else return elementsFreqAccum.value(); } public static class Builder<T extends SequenceElement> { protected VectorsConfiguration configuration; protected SparkModelExporter<T> modelExporter; protected VoidConfiguration peersConfiguration; protected int workers; protected StorageLevel storageLevel; /** * This method should NOT be used in real world environment */ @Deprecated public Builder() { this(new VoidConfiguration(), new VectorsConfiguration()); } public Builder(@NonNull VoidConfiguration psConfiguration) { this(psConfiguration, new VectorsConfiguration()); } public Builder(@NonNull VoidConfiguration psConfiguration, @NonNull VectorsConfiguration w2vConfiguration) { this.configuration = w2vConfiguration; this.peersConfiguration = psConfiguration; } /** * * @param level * @return */ public Builder<T> setStorageLevel(StorageLevel level) { storageLevel = level; return this; } /** * * @param num * @return */ public Builder<T> minWordFrequency(int num) { configuration.setMinWordFrequency(num); return this; } /** * * @param num * @return */ public Builder<T> workers(int num) { this.workers = num; return this; } /** * * @param lr * @return */ public Builder<T> setLearningRate(double lr) { configuration.setLearningRate(lr); return this; } /** * * @param configuration * @return */ public Builder<T> setParameterServerConfiguration(@NonNull VoidConfiguration configuration) { peersConfiguration = configuration; return this; } /** * * @param modelExporter * @return */ public Builder<T> setModelExporter(@NonNull SparkModelExporter<T> modelExporter) { this.modelExporter = modelExporter; return this; } /** * * @param num * @return */ public Builder<T> epochs(int num) { configuration.setEpochs(num); return this; } /** * * @param num * @return */ public Builder<T> iterations(int num) { configuration.setIterations(num); return this; } /** * * @param rate * @return */ public Builder<T> subsampling(double rate) { configuration.setSampling(rate); return this; } /** * * @param reallyUse * @return */ public Builder<T> useHierarchicSoftmax(boolean reallyUse) { configuration.setUseHierarchicSoftmax(reallyUse); return this; } /** * * @param samples * @return */ public Builder<T> negativeSampling(long samples) { configuration.setNegative((double) samples); return this; } /** * * @param ela * @return */ public Builder<T> setElementsLearningAlgorithm(@NonNull SparkElementsLearningAlgorithm ela) { configuration.setElementsLearningAlgorithm(ela.getClass().getCanonicalName()); return this; } /** * * @param sla * @return */ public Builder<T> setSequenceLearningAlgorithm(@NonNull SparkSequenceLearningAlgorithm sla) { configuration.setSequenceLearningAlgorithm(sla.getClass().getCanonicalName()); return this; } public Builder<T> layerSize(int layerSize) { if (layerSize < 1) throw new DL4JInvalidConfigException("LayerSize should be positive value"); configuration.setLayersSize(layerSize); return this; } public SparkSequenceVectors<T> build() { if (modelExporter == null) throw new IllegalStateException("ModelExporter is undefined!"); SparkSequenceVectors seqVec = new SparkSequenceVectors(configuration); seqVec.exporter = modelExporter; seqVec.paramServerConfiguration = peersConfiguration; seqVec.storageLevel = storageLevel; seqVec.workers = workers; return seqVec; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.util.concurrent; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.CloseableThreadLocal; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpTransportSettings; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.RunnableFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import java.nio.charset.StandardCharsets; /** * A ThreadContext is a map of string headers and a transient map of keyed objects that are associated with * a thread. It allows to store and retrieve header information across method calls, network calls as well as threads spawned from a * thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool} have out of the box * support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread that it is forking from.". * Network calls will also preserve the senders headers automatically. * <p> * Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by a thread pool or executor * being responsible for stashing and restoring the threads context. For instance if a network request is received, all headers are deserialized from the network * and directly added as the headers of the threads {@link ThreadContext} (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently * active on this thread the network code uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread * is forked (which in turn inherits the context) it restores the previous context. For instance: * </p> * <pre> * // current context is stashed and replaced with a default context * try (StoredContext context = threadContext.stashContext()) { * threadContext.readHeaders(in); // read headers into current context * if (fork) { * threadPool.execute(() -&gt; request.handle()); // inherits context * } else { * request.handle(); * } * } * // previous context is restored on StoredContext#close() * </pre> * */ public final class ThreadContext implements Closeable, Writeable { public static final String PREFIX = "request.headers"; public static final Setting<Settings> DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", Property.NodeScope); private static final Logger logger = LogManager.getLogger(ThreadContext.class); private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(); private final Map<String, String> defaultHeader; private final ContextThreadLocal threadLocal; private final int maxWarningHeaderCount; private final long maxWarningHeaderSize; /** * Creates a new ThreadContext instance * @param settings the settings to read the default request headers from */ public ThreadContext(Settings settings) { Settings headers = DEFAULT_HEADERS_SETTING.get(settings); if (headers == null) { this.defaultHeader = Collections.emptyMap(); } else { Map<String, String> defaultHeader = new HashMap<>(); for (String key : headers.names()) { defaultHeader.put(key, headers.get(key)); } this.defaultHeader = Collections.unmodifiableMap(defaultHeader); } threadLocal = new ContextThreadLocal(); this.maxWarningHeaderCount = SETTING_HTTP_MAX_WARNING_HEADER_COUNT.get(settings); this.maxWarningHeaderSize = SETTING_HTTP_MAX_WARNING_HEADER_SIZE.get(settings).getBytes(); } @Override public void close() throws IOException { threadLocal.close(); } /** * Removes the current context and resets a default context. The removed context can be * restored when closing the returned {@link StoredContext} */ public StoredContext stashContext() { final ThreadContextStruct context = threadLocal.get(); threadLocal.set(null); return () -> threadLocal.set(context); } /** * Removes the current context and resets a new context that contains a merge of the current headers and the given headers. The removed context can be * restored when closing the returned {@link StoredContext}. The merge strategy is that headers that are already existing are preserved unless they are defaults. */ public StoredContext stashAndMergeHeaders(Map<String, String> headers) { final ThreadContextStruct context = threadLocal.get(); Map<String, String> newHeader = new HashMap<>(headers); newHeader.putAll(context.requestHeaders); threadLocal.set(DEFAULT_CONTEXT.putHeaders(newHeader)); return () -> threadLocal.set(context); } /** * Just like {@link #stashContext()} but no default context is set. * @param preserveResponseHeaders if set to <code>true</code> the response headers of the restore thread will be preserved. */ public StoredContext newStoredContext(boolean preserveResponseHeaders) { final ThreadContextStruct context = threadLocal.get(); return () -> { if (preserveResponseHeaders && threadLocal.get() != context) { threadLocal.set(context.putResponseHeaders(threadLocal.get().responseHeaders)); } else { threadLocal.set(context); } }; } /** * Returns a supplier that gathers a {@link #newStoredContext(boolean)} and restores it once the * returned supplier is invoked. The context returned from the supplier is a stored version of the * suppliers callers context that should be restored once the originally gathered context is not needed anymore. * For instance this method should be used like this: * * <pre> * Supplier&lt;ThreadContext.StoredContext&gt; restorable = context.newRestorableContext(true); * new Thread() { * public void run() { * try (ThreadContext.StoredContext ctx = restorable.get()) { * // execute with the parents context and restore the threads context afterwards * } * } * * }.start(); * </pre> * * @param preserveResponseHeaders if set to <code>true</code> the response headers of the restore thread will be preserved. * @return a restorable context supplier */ public Supplier<StoredContext> newRestorableContext(boolean preserveResponseHeaders) { return wrapRestorable(newStoredContext(preserveResponseHeaders)); } /** * Same as {@link #newRestorableContext(boolean)} but wraps an existing context to restore. * @param storedContext the context to restore */ public Supplier<StoredContext> wrapRestorable(StoredContext storedContext) { return () -> { StoredContext context = newStoredContext(false); storedContext.restore(); return context; }; } @Override public void writeTo(StreamOutput out) throws IOException { threadLocal.get().writeTo(out, defaultHeader); } /** * Reads the headers from the stream into the current context */ public void readHeaders(StreamInput in) throws IOException { threadLocal.set(new ThreadContext.ThreadContextStruct(in)); } /** * Returns the header for the given key or <code>null</code> if not present */ public String getHeader(String key) { String value = threadLocal.get().requestHeaders.get(key); if (value == null) { return defaultHeader.get(key); } return value; } /** * Returns all of the request contexts headers */ public Map<String, String> getHeaders() { HashMap<String, String> map = new HashMap<>(defaultHeader); map.putAll(threadLocal.get().requestHeaders); return Collections.unmodifiableMap(map); } /** * Get a copy of all <em>response</em> headers. * * @return Never {@code null}. */ public Map<String, List<String>> getResponseHeaders() { Map<String, List<String>> responseHeaders = threadLocal.get().responseHeaders; HashMap<String, List<String>> map = new HashMap<>(responseHeaders.size()); for (Map.Entry<String, List<String>> entry : responseHeaders.entrySet()) { map.put(entry.getKey(), Collections.unmodifiableList(entry.getValue())); } return Collections.unmodifiableMap(map); } /** * Copies all header key, value pairs into the current context */ public void copyHeaders(Iterable<Map.Entry<String, String>> headers) { threadLocal.set(threadLocal.get().copyHeaders(headers)); } /** * Puts a header into the context */ public void putHeader(String key, String value) { threadLocal.set(threadLocal.get().putRequest(key, value)); } /** * Puts all of the given headers into this context */ public void putHeader(Map<String, String> header) { threadLocal.set(threadLocal.get().putHeaders(header)); } /** * Puts a transient header object into this context */ public void putTransient(String key, Object value) { threadLocal.set(threadLocal.get().putTransient(key, value)); } /** * Returns a transient header object or <code>null</code> if there is no header for the given key */ @SuppressWarnings("unchecked") // (T)object public <T> T getTransient(String key) { return (T) threadLocal.get().transientHeaders.get(key); } /** * Add the {@code value} for the specified {@code key} Any duplicate {@code value} is ignored. * * @param key the header name * @param value the header value */ public void addResponseHeader(final String key, final String value) { addResponseHeader(key, value, v -> v); } /** * Add the {@code value} for the specified {@code key} with the specified {@code uniqueValue} used for de-duplication. Any duplicate * {@code value} after applying {@code uniqueValue} is ignored. * * @param key the header name * @param value the header value * @param uniqueValue the function that produces de-duplication values */ public void addResponseHeader(final String key, final String value, final Function<String, String> uniqueValue) { threadLocal.set(threadLocal.get().putResponse(key, value, uniqueValue, maxWarningHeaderCount, maxWarningHeaderSize)); } /** * Saves the current thread context and wraps command in a Runnable that restores that context before running command. If * <code>command</code> has already been passed through this method then it is returned unaltered rather than wrapped twice. */ public Runnable preserveContext(Runnable command) { if (command instanceof ContextPreservingAbstractRunnable) { return command; } if (command instanceof ContextPreservingRunnable) { return command; } if (command instanceof AbstractRunnable) { return new ContextPreservingAbstractRunnable((AbstractRunnable) command); } return new ContextPreservingRunnable(command); } /** * Unwraps a command that was previously wrapped by {@link #preserveContext(Runnable)}. */ public Runnable unwrap(Runnable command) { if (command instanceof ContextPreservingAbstractRunnable) { return ((ContextPreservingAbstractRunnable) command).unwrap(); } if (command instanceof ContextPreservingRunnable) { return ((ContextPreservingRunnable) command).unwrap(); } return command; } /** * Returns true if the current context is the default context. */ boolean isDefaultContext() { return threadLocal.get() == DEFAULT_CONTEXT; } /** * Marks this thread context as an internal system context. This signals that actions in this context are issued * by the system itself rather than by a user action. */ public void markAsSystemContext() { threadLocal.set(threadLocal.get().setSystemContext()); } /** * Returns <code>true</code> iff this context is a system context */ public boolean isSystemContext() { return threadLocal.get().isSystemContext; } /** * Returns <code>true</code> if the context is closed, otherwise <code>true</code> */ boolean isClosed() { return threadLocal.closed.get(); } @FunctionalInterface public interface StoredContext extends AutoCloseable { @Override void close(); default void restore() { close(); } } private static final class ThreadContextStruct { private final Map<String, String> requestHeaders; private final Map<String, Object> transientHeaders; private final Map<String, List<String>> responseHeaders; private final boolean isSystemContext; private long warningHeadersSize; //saving current warning headers' size not to recalculate the size with every new warning header private ThreadContextStruct(StreamInput in) throws IOException { final int numRequest = in.readVInt(); Map<String, String> requestHeaders = numRequest == 0 ? Collections.emptyMap() : new HashMap<>(numRequest); for (int i = 0; i < numRequest; i++) { requestHeaders.put(in.readString(), in.readString()); } this.requestHeaders = requestHeaders; this.responseHeaders = in.readMapOfLists(StreamInput::readString, StreamInput::readString); this.transientHeaders = Collections.emptyMap(); isSystemContext = false; // we never serialize this it's a transient flag this.warningHeadersSize = 0L; } private ThreadContextStruct setSystemContext() { if (isSystemContext) { return this; } return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, true); } private ThreadContextStruct(Map<String, String> requestHeaders, Map<String, List<String>> responseHeaders, Map<String, Object> transientHeaders, boolean isSystemContext) { this.requestHeaders = requestHeaders; this.responseHeaders = responseHeaders; this.transientHeaders = transientHeaders; this.isSystemContext = isSystemContext; this.warningHeadersSize = 0L; } private ThreadContextStruct(Map<String, String> requestHeaders, Map<String, List<String>> responseHeaders, Map<String, Object> transientHeaders, boolean isSystemContext, long warningHeadersSize) { this.requestHeaders = requestHeaders; this.responseHeaders = responseHeaders; this.transientHeaders = transientHeaders; this.isSystemContext = isSystemContext; this.warningHeadersSize = warningHeadersSize; } /** * This represents the default context and it should only ever be called by {@link #DEFAULT_CONTEXT}. */ private ThreadContextStruct() { this(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), false); } private ThreadContextStruct putRequest(String key, String value) { Map<String, String> newRequestHeaders = new HashMap<>(this.requestHeaders); putSingleHeader(key, value, newRequestHeaders); return new ThreadContextStruct(newRequestHeaders, responseHeaders, transientHeaders, isSystemContext); } private void putSingleHeader(String key, String value, Map<String, String> newHeaders) { if (newHeaders.putIfAbsent(key, value) != null) { throw new IllegalArgumentException("value for key [" + key + "] already present"); } } private ThreadContextStruct putHeaders(Map<String, String> headers) { if (headers.isEmpty()) { return this; } else { final Map<String, String> newHeaders = new HashMap<>(this.requestHeaders); for (Map.Entry<String, String> entry : headers.entrySet()) { putSingleHeader(entry.getKey(), entry.getValue(), newHeaders); } return new ThreadContextStruct(newHeaders, responseHeaders, transientHeaders, isSystemContext); } } private ThreadContextStruct putResponseHeaders(Map<String, List<String>> headers) { assert headers != null; if (headers.isEmpty()) { return this; } final Map<String, List<String>> newResponseHeaders = new HashMap<>(this.responseHeaders); for (Map.Entry<String, List<String>> entry : headers.entrySet()) { String key = entry.getKey(); final List<String> existingValues = newResponseHeaders.get(key); if (existingValues != null) { List<String> newValues = Stream.concat(entry.getValue().stream(), existingValues.stream()).distinct().collect(Collectors.toList()); newResponseHeaders.put(key, Collections.unmodifiableList(newValues)); } else { newResponseHeaders.put(key, entry.getValue()); } } return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders, isSystemContext); } private ThreadContextStruct putResponse(final String key, final String value, final Function<String, String> uniqueValue, final int maxWarningHeaderCount, final long maxWarningHeaderSize) { assert value != null; long newWarningHeaderSize = warningHeadersSize; //check if we can add another warning header - if max size within limits if (key.equals("Warning") && (maxWarningHeaderSize != -1)) { //if size is NOT unbounded, check its limits if (warningHeadersSize > maxWarningHeaderSize) { // if max size has already been reached before logger.warn("Dropping a warning header, as their total size reached the maximum allowed of [" + maxWarningHeaderSize + "] bytes set in [" + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!"); return this; } newWarningHeaderSize += "Warning".getBytes(StandardCharsets.UTF_8).length + value.getBytes(StandardCharsets.UTF_8).length; if (newWarningHeaderSize > maxWarningHeaderSize) { logger.warn("Dropping a warning header, as their total size reached the maximum allowed of [" + maxWarningHeaderSize + "] bytes set in [" + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE.getKey() + "]!"); return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize); } } final Map<String, List<String>> newResponseHeaders = new HashMap<>(this.responseHeaders); final List<String> existingValues = newResponseHeaders.get(key); if (existingValues != null) { final Set<String> existingUniqueValues = existingValues.stream().map(uniqueValue).collect(Collectors.toSet()); assert existingValues.size() == existingUniqueValues.size() : "existing values: [" + existingValues + "], existing unique values [" + existingUniqueValues + "]"; if (existingUniqueValues.contains(uniqueValue.apply(value))) { return this; } final List<String> newValues = new ArrayList<>(existingValues); newValues.add(value); newResponseHeaders.put(key, Collections.unmodifiableList(newValues)); } else { newResponseHeaders.put(key, Collections.singletonList(value)); } //check if we can add another warning header - if max count within limits if ((key.equals("Warning")) && (maxWarningHeaderCount != -1)) { //if count is NOT unbounded, check its limits final int warningHeaderCount = newResponseHeaders.containsKey("Warning") ? newResponseHeaders.get("Warning").size() : 0; if (warningHeaderCount > maxWarningHeaderCount) { logger.warn("Dropping a warning header, as their total count reached the maximum allowed of [" + maxWarningHeaderCount + "] set in [" + HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT.getKey() + "]!"); return this; } } return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders, isSystemContext, newWarningHeaderSize); } private ThreadContextStruct putTransient(String key, Object value) { Map<String, Object> newTransient = new HashMap<>(this.transientHeaders); if (newTransient.putIfAbsent(key, value) != null) { throw new IllegalArgumentException("value for key [" + key + "] already present"); } return new ThreadContextStruct(requestHeaders, responseHeaders, newTransient, isSystemContext); } boolean isEmpty() { return requestHeaders.isEmpty() && responseHeaders.isEmpty() && transientHeaders.isEmpty(); } private ThreadContextStruct copyHeaders(Iterable<Map.Entry<String, String>> headers) { Map<String, String> newHeaders = new HashMap<>(); for (Map.Entry<String, String> header : headers) { newHeaders.put(header.getKey(), header.getValue()); } return putHeaders(newHeaders); } private void writeTo(StreamOutput out, Map<String, String> defaultHeaders) throws IOException { final Map<String, String> requestHeaders; if (defaultHeaders.isEmpty()) { requestHeaders = this.requestHeaders; } else { requestHeaders = new HashMap<>(defaultHeaders); requestHeaders.putAll(this.requestHeaders); } out.writeVInt(requestHeaders.size()); for (Map.Entry<String, String> entry : requestHeaders.entrySet()) { out.writeString(entry.getKey()); out.writeString(entry.getValue()); } out.writeMapOfLists(responseHeaders, StreamOutput::writeString, StreamOutput::writeString); } } private static class ContextThreadLocal extends CloseableThreadLocal<ThreadContextStruct> { private final AtomicBoolean closed = new AtomicBoolean(false); @Override public void set(ThreadContextStruct object) { try { if (object == DEFAULT_CONTEXT) { super.set(null); } else { super.set(object); } } catch (NullPointerException ex) { /* This is odd but CloseableThreadLocal throws a NPE if it was closed but still accessed. to get a real exception we call ensureOpen() to tell the user we are already closed.*/ ensureOpen(); throw ex; } } @Override public ThreadContextStruct get() { try { ThreadContextStruct threadContextStruct = super.get(); if (threadContextStruct != null) { return threadContextStruct; } return DEFAULT_CONTEXT; } catch (NullPointerException ex) { /* This is odd but CloseableThreadLocal throws a NPE if it was closed but still accessed. to get a real exception we call ensureOpen() to tell the user we are already closed.*/ ensureOpen(); throw ex; } } private void ensureOpen() { if (closed.get()) { throw new IllegalStateException("threadcontext is already closed"); } } @Override public void close() { if (closed.compareAndSet(false, true)) { super.close(); } } } /** * Wraps a Runnable to preserve the thread context. */ private class ContextPreservingRunnable implements Runnable { private final Runnable in; private final ThreadContext.StoredContext ctx; private ContextPreservingRunnable(Runnable in) { ctx = newStoredContext(false); this.in = in; } @Override public void run() { boolean whileRunning = false; try (ThreadContext.StoredContext ignore = stashContext()){ ctx.restore(); whileRunning = true; in.run(); if (in instanceof RunnableFuture) { /* * The wrapped runnable arose from asynchronous submission of a task to an executor. If an uncaught exception was thrown * during the execution of this task, we need to inspect this runnable and see if it is an error that should be * propagated to the uncaught exception handler. */ try { ((RunnableFuture) in).get(); } catch (final Exception e) { /* * In theory, Future#get can only throw a cancellation exception, an interrupted exception, or an execution * exception. We want to ignore cancellation exceptions, restore the interrupt status on interrupted exceptions, and * inspect the cause of an execution. We are going to be extra paranoid here though and completely unwrap the * exception to ensure that there is not a buried error anywhere. We assume that a general exception has been * handled by the executed task or the task submitter. */ assert e instanceof CancellationException || e instanceof InterruptedException || e instanceof ExecutionException : e; final Optional<Error> maybeError = ExceptionsHelper.maybeError(e, logger); if (maybeError.isPresent()) { // throw this error where it will propagate to the uncaught exception handler throw maybeError.get(); } if (e instanceof InterruptedException) { // restore the interrupt status Thread.currentThread().interrupt(); } } } whileRunning = false; } catch (IllegalStateException ex) { if (whileRunning || threadLocal.closed.get() == false) { throw ex; } // if we hit an ISE here we have been shutting down // this comes from the threadcontext and barfs if // our threadpool has been shutting down } } @Override public String toString() { return in.toString(); } public Runnable unwrap() { return in; } } /** * Wraps an AbstractRunnable to preserve the thread context. */ private class ContextPreservingAbstractRunnable extends AbstractRunnable { private final AbstractRunnable in; private final ThreadContext.StoredContext creatorsContext; private ThreadContext.StoredContext threadsOriginalContext = null; private ContextPreservingAbstractRunnable(AbstractRunnable in) { creatorsContext = newStoredContext(false); this.in = in; } @Override public boolean isForceExecution() { return in.isForceExecution(); } @Override public void onAfter() { try { in.onAfter(); } finally { if (threadsOriginalContext != null) { threadsOriginalContext.restore(); } } } @Override public void onFailure(Exception e) { in.onFailure(e); } @Override public void onRejection(Exception e) { in.onRejection(e); } @Override protected void doRun() throws Exception { boolean whileRunning = false; threadsOriginalContext = stashContext(); try { creatorsContext.restore(); whileRunning = true; in.doRun(); whileRunning = false; } catch (IllegalStateException ex) { if (whileRunning || threadLocal.closed.get() == false) { throw ex; } // if we hit an ISE here we have been shutting down // this comes from the threadcontext and barfs if // our threadpool has been shutting down } } @Override public String toString() { return in.toString(); } public AbstractRunnable unwrap() { return in; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.hops.rewrite; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.sysml.conf.CompilerConfig.ConfigType; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.hops.Hop; import org.apache.sysml.hops.HopsException; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.parser.DMLProgram; import org.apache.sysml.parser.ForStatement; import org.apache.sysml.parser.ForStatementBlock; import org.apache.sysml.parser.FunctionStatement; import org.apache.sysml.parser.FunctionStatementBlock; import org.apache.sysml.parser.IfStatement; import org.apache.sysml.parser.IfStatementBlock; import org.apache.sysml.parser.LanguageException; import org.apache.sysml.parser.ParForStatementBlock; import org.apache.sysml.parser.StatementBlock; import org.apache.sysml.parser.WhileStatement; import org.apache.sysml.parser.WhileStatementBlock; /** * This program rewriter applies a variety of rule-based rewrites * on all hop dags of the given program in one pass over the entire * program. * */ public class ProgramRewriter { //internal local debug level private static final boolean LDEBUG = false; private static final boolean CHECK = false; private ArrayList<HopRewriteRule> _dagRuleSet = null; private ArrayList<StatementBlockRewriteRule> _sbRuleSet = null; static { // for internal debugging only if( LDEBUG ) { Logger.getLogger("org.apache.sysml.hops.rewrite") .setLevel((Level) Level.DEBUG); } } public ProgramRewriter() { // by default which is used during initial compile // apply all (static and dynamic) rewrites this( true, true ); } public ProgramRewriter( boolean staticRewrites, boolean dynamicRewrites ) { //initialize HOP DAG rewrite ruleSet (with fixed rewrite order) _dagRuleSet = new ArrayList<>(); //initialize StatementBlock rewrite ruleSet (with fixed rewrite order) _sbRuleSet = new ArrayList<>(); //STATIC REWRITES (which do not rely on size information) if( staticRewrites ) { //add static HOP DAG rewrite rules _dagRuleSet.add( new RewriteTransientWriteParentHandling() ); _dagRuleSet.add( new RewriteRemoveReadAfterWrite() ); //dependency: before blocksize _dagRuleSet.add( new RewriteBlockSizeAndReblock() ); _dagRuleSet.add( new RewriteRemoveUnnecessaryCasts() ); if( OptimizerUtils.ALLOW_COMMON_SUBEXPRESSION_ELIMINATION ) _dagRuleSet.add( new RewriteCommonSubexpressionElimination() ); if( OptimizerUtils.ALLOW_CONSTANT_FOLDING ) _dagRuleSet.add( new RewriteConstantFolding() ); //dependency: cse if( OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION ) _dagRuleSet.add( new RewriteAlgebraicSimplificationStatic() ); //dependencies: cse if( OptimizerUtils.ALLOW_COMMON_SUBEXPRESSION_ELIMINATION ) //dependency: simplifications (no need to merge leafs again) _dagRuleSet.add( new RewriteCommonSubexpressionElimination() ); if( OptimizerUtils.ALLOW_AUTO_VECTORIZATION ) _dagRuleSet.add( new RewriteIndexingVectorization() ); //dependency: cse, simplifications _dagRuleSet.add( new RewriteInjectSparkPReadCheckpointing() ); //dependency: reblock //add statement block rewrite rules if( OptimizerUtils.ALLOW_BRANCH_REMOVAL ) { _sbRuleSet.add( new RewriteRemoveUnnecessaryBranches() ); //dependency: constant folding _sbRuleSet.add( new RewriteMergeBlockSequence() ); //dependency: remove branches } _sbRuleSet.add( new RewriteCompressedReblock() ); if( OptimizerUtils.ALLOW_SPLIT_HOP_DAGS ) _sbRuleSet.add( new RewriteSplitDagUnknownCSVRead() ); //dependency: reblock, merge blocks if( ConfigurationManager.getCompilerConfigFlag(ConfigType.ALLOW_INDIVIDUAL_SB_SPECIFIC_OPS) ) _sbRuleSet.add( new RewriteSplitDagDataDependentOperators() ); //dependency: merge blocks if( OptimizerUtils.ALLOW_AUTO_VECTORIZATION ) _sbRuleSet.add( new RewriteForLoopVectorization() ); //dependency: reblock (reblockop) _sbRuleSet.add( new RewriteInjectSparkLoopCheckpointing(true) ); //dependency: reblock (blocksizes) if( OptimizerUtils.ALLOW_LOOP_UPDATE_IN_PLACE ) _sbRuleSet.add( new RewriteMarkLoopVariablesUpdateInPlace() ); } // DYNAMIC REWRITES (which do require size information) if( dynamicRewrites ) { _dagRuleSet.add( new RewriteMatrixMultChainOptimization() ); //dependency: cse if ( OptimizerUtils.ALLOW_SUM_PRODUCT_REWRITES) _dagRuleSet.add( new RewriteElementwiseMultChainOptimization() ); //dependency: cse if( OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION ) { _dagRuleSet.add( new RewriteAlgebraicSimplificationDynamic() ); //dependencies: cse _dagRuleSet.add( new RewriteAlgebraicSimplificationStatic() ); //dependencies: cse } } // cleanup after all rewrites applied // (newly introduced operators, introduced redundancy after rewrites w/ multiple parents) _dagRuleSet.add( new RewriteRemoveUnnecessaryCasts() ); if( OptimizerUtils.ALLOW_COMMON_SUBEXPRESSION_ELIMINATION ) _dagRuleSet.add( new RewriteCommonSubexpressionElimination(true) ); } /** * Construct a program rewriter for a given rewrite which is passed from outside. * * @param rewrites the HOP rewrite rules */ public ProgramRewriter( HopRewriteRule... rewrites ) { //initialize HOP DAG rewrite ruleSet (with fixed rewrite order) _dagRuleSet = new ArrayList<>(); for( HopRewriteRule rewrite : rewrites ) _dagRuleSet.add( rewrite ); _sbRuleSet = new ArrayList<>(); } /** * Construct a program rewriter for a given rewrite which is passed from outside. * * @param rewrites the statement block rewrite rules */ public ProgramRewriter( StatementBlockRewriteRule... rewrites ) { //initialize HOP DAG rewrite ruleSet (with fixed rewrite order) _dagRuleSet = new ArrayList<>(); _sbRuleSet = new ArrayList<>(); for( StatementBlockRewriteRule rewrite : rewrites ) _sbRuleSet.add( rewrite ); } /** * Construct a program rewriter for the given rewrite sets which are passed from outside. * * @param hRewrites HOP rewrite rules * @param sbRewrites statement block rewrite rules */ public ProgramRewriter(ArrayList<HopRewriteRule> hRewrites, ArrayList<StatementBlockRewriteRule> sbRewrites) { //initialize HOP DAG rewrite ruleSet (with fixed rewrite order) _dagRuleSet = new ArrayList<>(); _dagRuleSet.addAll( hRewrites ); _sbRuleSet = new ArrayList<>(); _sbRuleSet.addAll( sbRewrites ); } public void removeHopRewrite(Class<? extends HopRewriteRule> clazz) { _dagRuleSet.removeIf(r -> r.getClass().equals(clazz)); } public void removeStatementBlockRewrite(Class<? extends StatementBlockRewriteRule> clazz) { _sbRuleSet.removeIf(r -> r.getClass().equals(clazz)); } public ProgramRewriteStatus rewriteProgramHopDAGs(DMLProgram dmlp) throws LanguageException, HopsException { ProgramRewriteStatus state = new ProgramRewriteStatus(); // for each namespace, handle function statement blocks for (String namespaceKey : dmlp.getNamespaces().keySet()) for (String fname : dmlp.getFunctionStatementBlocks(namespaceKey).keySet()) { FunctionStatementBlock fsblock = dmlp.getFunctionStatementBlock(namespaceKey,fname); rRewriteStatementBlockHopDAGs(fsblock, state); rRewriteStatementBlock(fsblock, state); } // handle regular statement blocks in "main" method for (int i = 0; i < dmlp.getNumStatementBlocks(); i++) { StatementBlock current = dmlp.getStatementBlock(i); rRewriteStatementBlockHopDAGs(current, state); } dmlp.setStatementBlocks( rRewriteStatementBlocks(dmlp.getStatementBlocks(), state) ); return state; } public void rRewriteStatementBlockHopDAGs(StatementBlock current, ProgramRewriteStatus state) throws LanguageException, HopsException { //ensure robustness for calls from outside if( state == null ) state = new ProgramRewriteStatus(); if (current instanceof FunctionStatementBlock) { FunctionStatementBlock fsb = (FunctionStatementBlock)current; FunctionStatement fstmt = (FunctionStatement)fsb.getStatement(0); for (StatementBlock sb : fstmt.getBody()) rRewriteStatementBlockHopDAGs(sb, state); } else if (current instanceof WhileStatementBlock) { WhileStatementBlock wsb = (WhileStatementBlock) current; WhileStatement wstmt = (WhileStatement)wsb.getStatement(0); wsb.setPredicateHops(rewriteHopDAG(wsb.getPredicateHops(), state)); for (StatementBlock sb : wstmt.getBody()) rRewriteStatementBlockHopDAGs(sb, state); } else if (current instanceof IfStatementBlock) { IfStatementBlock isb = (IfStatementBlock) current; IfStatement istmt = (IfStatement)isb.getStatement(0); isb.setPredicateHops(rewriteHopDAG(isb.getPredicateHops(), state)); for (StatementBlock sb : istmt.getIfBody()) rRewriteStatementBlockHopDAGs(sb, state); for (StatementBlock sb : istmt.getElseBody()) rRewriteStatementBlockHopDAGs(sb, state); } else if (current instanceof ForStatementBlock) //incl parfor { ForStatementBlock fsb = (ForStatementBlock) current; ForStatement fstmt = (ForStatement)fsb.getStatement(0); fsb.setFromHops(rewriteHopDAG(fsb.getFromHops(), state)); fsb.setToHops(rewriteHopDAG(fsb.getToHops(), state)); fsb.setIncrementHops(rewriteHopDAG(fsb.getIncrementHops(), state)); for (StatementBlock sb : fstmt.getBody()) rRewriteStatementBlockHopDAGs(sb, state); } else //generic (last-level) { current.set_hops( rewriteHopDAG(current.get_hops(), state) ); } } public ArrayList<Hop> rewriteHopDAG(ArrayList<Hop> roots, ProgramRewriteStatus state) throws HopsException { for( HopRewriteRule r : _dagRuleSet ) { Hop.resetVisitStatus( roots ); //reset for each rule roots = r.rewriteHopDAGs(roots, state); if( CHECK ) HopDagValidator.validateHopDag(roots, r); } return roots; } public Hop rewriteHopDAG(Hop root, ProgramRewriteStatus state) throws HopsException { if( root == null ) return null; for( HopRewriteRule r : _dagRuleSet ) { root.resetVisitStatus(); //reset for each rule root = r.rewriteHopDAG(root, state); if( CHECK ) HopDagValidator.validateHopDag(root, r); } return root; } public ArrayList<StatementBlock> rRewriteStatementBlocks( ArrayList<StatementBlock> sbs, ProgramRewriteStatus status ) throws HopsException { //ensure robustness for calls from outside if( status == null ) status = new ProgramRewriteStatus(); //apply rewrite rules to list of statement blocks List<StatementBlock> tmp = sbs; for( StatementBlockRewriteRule r : _sbRuleSet ) tmp = r.rewriteStatementBlocks(tmp, status); //recursively rewrite statement blocks (with potential expansion) List<StatementBlock> tmp2 = new ArrayList<>(); for( StatementBlock sb : tmp ) tmp2.addAll( rRewriteStatementBlock(sb, status) ); //apply rewrite rules to list of statement blocks (with potential contraction) for( StatementBlockRewriteRule r : _sbRuleSet ) tmp2 = r.rewriteStatementBlocks(tmp2, status); //prepare output list sbs.clear(); sbs.addAll(tmp2); return sbs; } public ArrayList<StatementBlock> rRewriteStatementBlock( StatementBlock sb, ProgramRewriteStatus status ) throws HopsException { ArrayList<StatementBlock> ret = new ArrayList<>(); ret.add(sb); //recursive invocation if (sb instanceof FunctionStatementBlock) { FunctionStatementBlock fsb = (FunctionStatementBlock)sb; FunctionStatement fstmt = (FunctionStatement)fsb.getStatement(0); fstmt.setBody( rRewriteStatementBlocks(fstmt.getBody(), status) ); } else if (sb instanceof WhileStatementBlock) { WhileStatementBlock wsb = (WhileStatementBlock) sb; WhileStatement wstmt = (WhileStatement)wsb.getStatement(0); wstmt.setBody( rRewriteStatementBlocks( wstmt.getBody(), status ) ); } else if (sb instanceof IfStatementBlock) { IfStatementBlock isb = (IfStatementBlock) sb; IfStatement istmt = (IfStatement)isb.getStatement(0); istmt.setIfBody( rRewriteStatementBlocks( istmt.getIfBody(), status ) ); istmt.setElseBody( rRewriteStatementBlocks( istmt.getElseBody(), status ) ); } else if (sb instanceof ForStatementBlock) //incl parfor { //maintain parfor context information (e.g., for checkpointing) boolean prestatus = status.isInParforContext(); if( sb instanceof ParForStatementBlock ) status.setInParforContext(true); ForStatementBlock fsb = (ForStatementBlock) sb; ForStatement fstmt = (ForStatement)fsb.getStatement(0); fstmt.setBody( rRewriteStatementBlocks(fstmt.getBody(), status) ); status.setInParforContext(prestatus); } //apply rewrite rules to individual statement blocks for( StatementBlockRewriteRule r : _sbRuleSet ) { ArrayList<StatementBlock> tmp = new ArrayList<>(); for( StatementBlock sbc : ret ) tmp.addAll( r.rewriteStatementBlock(sbc, status) ); //take over set of rewritten sbs ret.clear(); ret.addAll(tmp); } return ret; } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static com.facebook.buck.testutil.RegexMatcher.containsPattern; import static com.facebook.buck.testutil.RegexMatcher.containsRegex; import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.TestProjectFilesystems; import com.facebook.buck.jvm.java.testutil.AbiCompilationModeTest; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.testutil.ProcessResult; import com.facebook.buck.testutil.TemporaryPaths; import com.facebook.buck.testutil.integration.BuckBuildLog; import com.facebook.buck.testutil.integration.DexInspector; import com.facebook.buck.testutil.integration.ProjectWorkspace; import com.facebook.buck.testutil.integration.TestDataHelper; import com.facebook.buck.testutil.integration.ZipInspector; import com.facebook.buck.util.json.ObjectMappers; import com.facebook.buck.util.zip.ZipConstants; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.hash.Hashing; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; import org.apache.commons.compress.archivers.zip.ZipUtil; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.hamcrest.collection.IsIn; import org.junit.Before; import org.junit.Rule; import org.junit.Test; public class AndroidBinaryIntegrationTest extends AbiCompilationModeTest { @Rule public TemporaryPaths tmpFolder = new TemporaryPaths(true); private ProjectWorkspace workspace; private ProjectFilesystem filesystem; private static final String SIMPLE_TARGET = "//apps/multidex:app"; private static final String RAW_DEX_TARGET = "//apps/multidex:app-art"; private static final String APP_REDEX_TARGET = "//apps/sample:app_redex"; @Before public void setUp() throws InterruptedException, IOException { AssumeAndroidPlatform.assumeSdkIsAvailable(); AssumeAndroidPlatform.assumeNdkIsAvailable(); workspace = TestDataHelper.createProjectWorkspaceForScenario( new AndroidBinaryIntegrationTest(), "android_project", tmpFolder); workspace.setUp(); setWorkspaceCompilationMode(workspace); filesystem = TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath()); } @Test public void testNonExopackageHasSecondary() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( BuildTargets.getGenPath( filesystem, BuildTargetFactory.newInstance(SIMPLE_TARGET), "%s.apk"))); zipInspector.assertFileExists("assets/secondary-program-dex-jars/metadata.txt"); zipInspector.assertFileExists("assets/secondary-program-dex-jars/secondary-1.dex.jar"); zipInspector.assertFileDoesNotExist("classes2.dex"); zipInspector.assertFileExists("classes.dex"); zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so"); } @Test public void testProguardBuild() throws IOException { String target = "//apps/multidex:app_with_proguard"; workspace.runBuckCommand("build", target).assertSuccess(); ZipInspector zipInspector = new ZipInspector(workspace.buildAndReturnOutput(target)); zipInspector.assertFileExists("assets/secondary-program-dex-jars/metadata.txt"); zipInspector.assertFileExists("assets/secondary-program-dex-jars/secondary-1.dex.jar"); zipInspector.assertFileDoesNotExist("classes2.dex"); zipInspector.assertFileExists("classes.dex"); zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so"); } @Test public void testRawSplitDexHasSecondary() throws IOException { ProcessResult result = workspace.runBuckCommand("build", RAW_DEX_TARGET); result.assertSuccess(); ZipInspector zipInspector = new ZipInspector( workspace.getPath( BuildTargets.getGenPath( filesystem, BuildTargetFactory.newInstance(RAW_DEX_TARGET), "%s.apk"))); zipInspector.assertFileDoesNotExist("assets/secondary-program-dex-jars/metadata.txt"); zipInspector.assertFileDoesNotExist("assets/secondary-program-dex-jars/secondary-1.dex.jar"); zipInspector.assertFileExists("classes2.dex"); zipInspector.assertFileExists("classes.dex"); zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so"); } @Test public void testDisguisedExecutableIsRenamed() throws IOException { Path output = workspace.buildAndReturnOutput("//apps/sample:app_with_disguised_exe"); ZipInspector zipInspector = new ZipInspector(output); zipInspector.assertFileExists("lib/armeabi/libmybinary.so"); } @Test public void testNdkLibraryIsIncluded() throws IOException { Path output = workspace.buildAndReturnOutput("//apps/sample:app_with_ndk_library"); ZipInspector zipInspector = new ZipInspector(output); zipInspector.assertFileExists("lib/armeabi/libfakenative.so"); } @Test public void testEditingNdkLibraryForcesRebuild() throws IOException, InterruptedException { String apkWithNdkLibrary = "//apps/sample:app_with_ndk_library"; Path output = workspace.buildAndReturnOutput(apkWithNdkLibrary); ZipInspector zipInspector = new ZipInspector(output); zipInspector.assertFileExists("lib/armeabi/libfakenative.so"); // Sleep 1 second (plus another half to be super duper safe) to make sure that // fakesystem.c gets a later timestamp than the fakesystem.o that was produced // during the build in setUp. If we don't do this, there's a chance that the // ndk-build we run during the upcoming build will not rebuild it (on filesystems // that have 1-second granularity for last modified). // To verify this, create a Makefile with the following rule (don't forget to use a tab): // out: in // cat $< > $@ // Run: echo foo > in ; make ; cat out ; echo bar > in ; make ; cat out // On a filesystem with 1-second mtime granularity, the last "cat" should print "foo" // (with very high probability). Thread.sleep(1500); workspace.replaceFileContents( "native/fakenative/jni/fakesystem.c", "exit(status)", "exit(1+status)"); workspace.resetBuildLogFile(); workspace.buildAndReturnOutput(apkWithNdkLibrary); workspace.getBuildLog().assertTargetBuiltLocally(apkWithNdkLibrary); } @Test public void testEditingPrimaryDexClassForcesRebuildForSimplePackage() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); workspace.replaceFileContents( "java/com/sample/app/MyApplication.java", "package com", "package\ncom"); workspace.resetBuildLogFile(); ProcessResult result = workspace.runBuckCommand("build", SIMPLE_TARGET); result.assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(SIMPLE_TARGET); } @Test public void testEditingSecondaryDexClassForcesRebuildForSimplePackage() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); workspace.replaceFileContents("java/com/sample/lib/Sample.java", "package com", "package\ncom"); workspace.resetBuildLogFile(); ProcessResult result = workspace.runBuckCommand("build", SIMPLE_TARGET); result.assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(SIMPLE_TARGET); } @Test public void testNotAllJavaLibrariesFetched() throws IOException { String target = "//apps/multidex:app_with_deeper_deps"; workspace.runBuckCommand("build", target).assertSuccess(); workspace.replaceFileContents( "java/com/sample/app/MyApplication.java", "package com", "package\ncom"); workspace.resetBuildLogFile(); workspace.runBuckCommand("build", target).assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(target); buildLog.assertTargetIsAbsent("//java/com/sample/lib:lib"); } @Test public void testProvidedDependenciesAreExcludedEvenIfSpecifiedInOtherDeps() throws IOException { String target = "//apps/sample:app_with_exported_and_provided_deps"; ProcessResult result = workspace.runBuckBuild(target); result.assertSuccess(); DexInspector dexInspector = new DexInspector( workspace.getPath( BuildTargets.getGenPath( filesystem, BuildTargetFactory.newInstance(target), "%s.apk"))); dexInspector.assertTypeExists("Lcom/facebook/sample/Dep;"); dexInspector.assertTypeExists("Lcom/facebook/sample/ExportedDep;"); dexInspector.assertTypeDoesNotExist("Lcom/facebook/sample/ProvidedDep;"); dexInspector.assertTypeDoesNotExist("Lcom/facebook/sample/DepProvidedDep;"); dexInspector.assertTypeDoesNotExist("Lcom/facebook/sample/ExportedProvidedDep;"); } @Test public void testPreprocessorForcesReDex() throws IOException { String target = "//java/com/preprocess:disassemble"; Path outputFile = workspace.buildAndReturnOutput(target); String output = new String(Files.readAllBytes(outputFile), UTF_8); assertThat(output, containsString("content=2")); workspace.replaceFileContents("java/com/preprocess/convert.py", "content=2", "content=3"); outputFile = workspace.buildAndReturnOutput(target); output = new String(Files.readAllBytes(outputFile), UTF_8); assertThat(output, containsString("content=3")); } @Test public void testDxFindsReferencedResources() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); BuildTarget dexTarget = BuildTargetFactory.newInstance("//java/com/sample/lib:lib#dex"); ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(tmpFolder.getRoot()); Optional<String> resourcesFromMetadata = DexProducedFromJavaLibrary.readMetadataValue( filesystem, dexTarget, DexProducedFromJavaLibrary.REFERENCED_RESOURCES); assertTrue(resourcesFromMetadata.isPresent()); assertEquals("[\"com.sample.top_layout\",\"com.sample2.title\"]", resourcesFromMetadata.get()); } @Test public void testDexingIsInputBased() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally("//java/com/sample/lib:lib#dex"); workspace.replaceFileContents( "java/com/sample/lib/Sample.java", "import", "import /* no output change */"); workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); buildLog = workspace.getBuildLog(); buildLog.assertNotTargetBuiltLocally("//java/com/sample/lib:lib#dex"); buildLog.assertTargetHadMatchingInputRuleKey("//java/com/sample/lib:lib#dex"); workspace.replaceFileContents( "java/com/sample/lib/Sample.java", "import", "import /* \n some output change */"); workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally("//java/com/sample/lib:lib#dex"); } @Test public void testProguardDontObfuscateGeneratesMappingFile() throws IOException { String target = "//apps/sample:app_proguard_dontobfuscate"; workspace.runBuckCommand("build", target).assertSuccess(); Path mapping = workspace.getPath( BuildTargets.getGenPath( filesystem, BuildTargetFactory.newInstance(target), "%s/proguard/mapping.txt")); assertTrue(Files.exists(mapping)); } private static Path unzip(Path tmpDir, Path zipPath, String name) throws IOException { Path outPath = tmpDir.resolve(zipPath.getFileName()); try (ZipFile zipFile = new ZipFile(zipPath.toFile())) { Files.copy( zipFile.getInputStream(zipFile.getEntry(name)), outPath, StandardCopyOption.REPLACE_EXISTING); return outPath; } } @Test public void testApksHaveDeterministicTimestamps() throws IOException { String target = "//apps/sample:app"; ProcessResult result = workspace.runBuckCommand("build", target); result.assertSuccess(); // Iterate over each of the entries, expecting to see all zeros in the time fields. Path apk = workspace.getPath( BuildTargets.getGenPath(filesystem, BuildTargetFactory.newInstance(target), "%s.apk")); Date dosEpoch = new Date(ZipUtil.dosToJavaTime(ZipConstants.DOS_FAKE_TIME)); try (ZipInputStream is = new ZipInputStream(Files.newInputStream(apk))) { for (ZipEntry entry = is.getNextEntry(); entry != null; entry = is.getNextEntry()) { assertThat(entry.getName(), new Date(entry.getTime()), Matchers.equalTo(dosEpoch)); } } } @Test public void testLibraryMetadataChecksum() throws IOException { String target = "//apps/sample:app_cxx_lib_asset"; workspace.runBuckCommand("build", target).assertSuccess(); Path pathToZip = workspace.getPath( BuildTargets.getGenPath(filesystem, BuildTargetFactory.newInstance(target), "%s.apk")); ZipFile file = new ZipFile(pathToZip.toFile()); ZipEntry metadata = file.getEntry("assets/lib/metadata.txt"); assertNotNull(metadata); BufferedReader contents = new BufferedReader(new InputStreamReader(file.getInputStream(metadata))); String line = contents.readLine(); byte[] buffer = new byte[512]; while (line != null) { // Each line is of the form <filename> <filesize> <SHA256 checksum> String[] tokens = line.split(" "); assertSame(tokens.length, 3); String filename = tokens[0]; int filesize = Integer.parseInt(tokens[1]); String checksum = tokens[2]; ZipEntry lib = file.getEntry("assets/lib/" + filename); assertNotNull(lib); InputStream is = file.getInputStream(lib); ByteArrayOutputStream out = new ByteArrayOutputStream(); while (filesize > 0) { int read = is.read(buffer, 0, Math.min(buffer.length, filesize)); assertTrue(read >= 0); out.write(buffer, 0, read); filesize -= read; } String actualChecksum = Hashing.sha256().hashBytes(out.toByteArray()).toString(); assertEquals(checksum, actualChecksum); is.close(); out.close(); line = contents.readLine(); } file.close(); contents.close(); } @Test public void testStripRulesAreShared() throws IOException { workspace.runBuckCommand("build", "//apps/sample:app_cxx_lib_asset").assertSuccess(); workspace.resetBuildLogFile(); workspace.runBuckCommand("build", "//apps/sample:app_cxx_different_rule_name").assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); for (BuildTarget target : buildLog.getAllTargets()) { String rawTarget = target.toString(); if (rawTarget.contains("libgnustl_shared.so")) { // Stripping the C++ runtime is currently not shared. continue; } if (rawTarget.contains("strip")) { buildLog.assertNotTargetBuiltLocally(rawTarget); } } } @Test public void testApkWithNoResourcesBuildsCorrectly() throws IOException { workspace.runBuckBuild("//apps/sample:app_with_no_res").assertSuccess(); workspace.runBuckBuild("//apps/sample:app_with_no_res_or_predex").assertSuccess(); } @Test public void testApkWithNoResourcesBuildsCorrectlyWithAapt2() throws Exception { AssumeAndroidPlatform.assumeAapt2WithOutputTextSymbolsIsAvailable(); workspace.runBuckBuild("//apps/sample:app_aapt2_with_no_res").assertSuccess(); } @Test public void testSimpleAapt2App() throws Exception { AssumeAndroidPlatform.assumeAapt2WithOutputTextSymbolsIsAvailable(); ImmutableMap<String, Path> outputs = workspace.buildMultipleAndReturnOutputs( "//apps/sample:app_with_aapt2", "//apps/sample:disassemble_app_with_aapt2", "//apps/sample:resource_dump_app_with_aapt2"); ZipInspector zipInspector = new ZipInspector(outputs.get("//apps/sample:app_with_aapt2")); zipInspector.assertFileExists("res/drawable/tiny_black.png"); zipInspector.assertFileExists("res/layout/top_layout.xml"); zipInspector.assertFileExists("assets/asset_file.txt"); zipInspector.assertFileIsNotCompressed("res/drawable/tiny_black.png"); Map<String, String> rDotJavaContents = parseRDotJavaSmali(outputs.get("//apps/sample:disassemble_app_with_aapt2")); Map<String, String> resourceBundleContents = parseResourceDump(outputs.get("//apps/sample:resource_dump_app_with_aapt2")); assertEquals( resourceBundleContents.get("string/title"), rDotJavaContents.get("com/sample2/R$string:title")); assertEquals( resourceBundleContents.get("layout/top_layout"), rDotJavaContents.get("com/sample/R$layout:top_layout")); assertEquals( resourceBundleContents.get("drawable/app_icon"), rDotJavaContents.get("com/sample/R$drawable:app_icon")); } @Test public void testSimpleD8App() throws IOException { workspace.runBuckBuild("//apps/sample:app_with_d8").assertSuccess(); } @Test public void testResourceOverrides() throws IOException { Path path = workspace.buildAndReturnOutput("//apps/sample:strings_dump_overrides"); assertThat( workspace.getFileContents(path), containsPattern(Pattern.compile("^String #[0-9]*: Real App Name$", Pattern.MULTILINE))); } @Test public void testResourceOverridesAapt2() throws Exception { AssumeAndroidPlatform.assumeAapt2WithOutputTextSymbolsIsAvailable(); workspace.replaceFileContents( "apps/sample/BUCK", "'aapt1', # app_with_res_overrides", "'aapt2',"); testResourceOverrides(); } @Test public void testApkEmptyResDirectoriesBuildsCorrectly() throws IOException { workspace.runBuckBuild("//apps/sample:app_with_aar_and_no_res").assertSuccess(); } @Test public void testNativeLibGeneratedProguardConfigIsUsedByProguard() throws IOException { String target = "//apps/sample:app_with_native_lib_proguard"; workspace.runBuckBuild(target).assertSuccess(); Path generatedConfig = workspace.getPath( BuildTargets.getGenPath( filesystem, BuildTargetFactory.newInstance(target) .withFlavors(AndroidBinaryGraphEnhancer.NATIVE_LIBRARY_PROGUARD_FLAVOR), NativeLibraryProguardGenerator.OUTPUT_FORMAT)); Path proguardDir = workspace.getPath( BuildTargets.getGenPath( filesystem, BuildTargetFactory.newInstance(target), "%s/proguard")); Path proguardCommandLine = proguardDir.resolve("command-line.txt"); // Check that the proguard command line references the native lib proguard config. assertTrue(workspace.getFileContents(proguardCommandLine).contains(generatedConfig.toString())); assertEquals( workspace.getFileContents("native/proguard_gen/expected.pro"), workspace.getFileContents(generatedConfig)); } @Test public void testReDexIsCalledAppropriatelyFromAndroidBinary() throws IOException { Path apk = workspace.buildAndReturnOutput(APP_REDEX_TARGET); Path unzippedApk = unzip(apk.getParent(), apk, "app_redex"); // We use a fake ReDex binary that writes out the arguments it received as JSON so that we can // verify that it was called in the right way. @SuppressWarnings("unchecked") Map<String, Object> userData = ObjectMappers.readValue(unzippedApk, Map.class); String androidSdk = (String) userData.get("ANDROID_SDK"); assertTrue( "ANDROID_SDK environment variable must be set so ReDex runs with zipalign", androidSdk != null && !androidSdk.isEmpty()); assertEquals(workspace.getDestPath().toString(), userData.get("PWD")); assertTrue(userData.get("config").toString().endsWith("apps/sample/redex-config.json")); assertEquals("buck-out/gen/apps/sample/app_redex/proguard/seeds.txt", userData.get("keep")); assertEquals("my_alias", userData.get("keyalias")); assertEquals("android", userData.get("keypass")); assertEquals( workspace.resolve("keystores/debug.keystore").toString(), userData.get("keystore")); assertEquals( "buck-out/gen/apps/sample/app_redex__redex/app_redex.redex.apk", userData.get("out")); assertEquals("buck-out/gen/apps/sample/app_redex/proguard/command-line.txt", userData.get("P")); assertEquals( "buck-out/gen/apps/sample/app_redex/proguard/mapping.txt", userData.get("proguard-map")); assertTrue((Boolean) userData.get("sign")); assertEquals("my_param_name={\"foo\": true}", userData.get("J")); assertTrue( "redex_extra_args: -j $(location ...) is not properly expanded!", userData.get("j").toString().endsWith(".jar")); assertTrue( "redex_extra_args: -S $(location ...) is not properly expanded!", userData.get("S").toString().contains("coldstart_classes=") && !userData.get("S").toString().contains("location")); } @Test public void testEditingRedexToolForcesRebuild() throws IOException { workspace.runBuckBuild(APP_REDEX_TARGET).assertSuccess(); workspace.replaceFileContents("tools/redex/fake_redex.py", "main()\n", "main() \n"); workspace.resetBuildLogFile(); workspace.runBuckBuild(APP_REDEX_TARGET).assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(APP_REDEX_TARGET); } @Test public void testEditingSecondaryDexHeadListForcesRebuild() throws IOException { workspace.runBuckBuild(APP_REDEX_TARGET).assertSuccess(); workspace.replaceFileContents("tools/redex/secondary_dex_head.list", "", " "); workspace.resetBuildLogFile(); workspace.runBuckBuild(APP_REDEX_TARGET).assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally(APP_REDEX_TARGET); } @Test public void testInstrumentationApkWithEmptyResDepBuildsCorrectly() throws IOException { workspace.runBuckBuild("//apps/sample:instrumentation_apk").assertSuccess(); } @Test public void testInvalidKeystoreKeyAlias() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); workspace.replaceFileContents( "keystores/debug.keystore.properties", "key.alias=my_alias", "key.alias=invalid_alias"); workspace.resetBuildLogFile(); ProcessResult result = workspace.runBuckCommand("build", SIMPLE_TARGET); result.assertFailure("Invalid keystore key alias should fail."); assertThat( "error message for invalid keystore key alias is incorrect.", result.getStderr(), containsRegex("The keystore \\[.*\\] key\\.alias \\[.*\\].*does not exist")); } @Test public void testResourcesTrimming() throws IOException { workspace.runBuckBuild(SIMPLE_TARGET).assertSuccess(); // Enable trimming. workspace.replaceFileContents( "apps/multidex/BUCK", "# ARGS_FOR_APP", "trim_resource_ids = True, # ARGS_FOR_APP"); workspace.runBuckCommand("build", "//apps/multidex:disassemble_app_r_dot_java").assertSuccess(); // Make sure we only see what we expect. verifyTrimmedRDotJava(ImmutableSet.of("top_layout", "title")); // Make a change. workspace.replaceFileContents( "java/com/sample/lib/Sample.java", "R.layout.top_layout", "0 /* NO RESOURCE HERE */"); // Make sure everything gets rebuilt, and we only see what we expect. workspace.resetBuildLogFile(); workspace.runBuckCommand("build", "//apps/multidex:disassemble_app_r_dot_java").assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally("//apps/multidex:app#compile_uber_r_dot_java"); buildLog.assertTargetBuiltLocally("//apps/multidex:app#dex,dex_uber_r_dot_java"); verifyTrimmedRDotJava(ImmutableSet.of("title")); // Turn off trimming and turn on exopackage, and rebuilt. workspace.replaceFileContents( "apps/multidex/BUCK", "trim_resource_ids = True, # ARGS_FOR_APP", "exopackage_modes = ['secondary_dex'], # ARGS_FOR_APP"); workspace.runBuckCommand("build", SIMPLE_TARGET).assertSuccess(); // Make a change. workspace.replaceFileContents( "java/com/sample/lib/Sample.java", "0 /* NO RESOURCE HERE */", "R.layout.top_layout"); // rebuilt and verify that we get an ABI hit. workspace.resetBuildLogFile(); workspace.runBuckCommand("build", SIMPLE_TARGET).assertSuccess(); buildLog = workspace.getBuildLog(); buildLog.assertTargetHadMatchingInputRuleKey(SIMPLE_TARGET); } @Test public void testResourcesTrimmingWithPattern() throws IOException { // Enable trimming. workspace.replaceFileContents( "apps/multidex/BUCK", "# ARGS_FOR_APP", "keep_resource_pattern = '^app_.*', trim_resource_ids = True, # ARGS_FOR_APP"); workspace.runBuckCommand("build", "//apps/multidex:disassemble_app_r_dot_java").assertSuccess(); // Make sure we only see what we expect. verifyTrimmedRDotJava(ImmutableSet.of("app_icon", "app_name", "top_layout", "title")); // Make a change. workspace.replaceFileContents( "java/com/sample/lib/Sample.java", "R.layout.top_layout", "0 /* NO RESOURCE HERE */"); // Make sure everything gets rebuilt, and we only see what we expect. workspace.resetBuildLogFile(); workspace.runBuckCommand("build", "//apps/multidex:disassemble_app_r_dot_java").assertSuccess(); BuckBuildLog buildLog = workspace.getBuildLog(); buildLog.assertTargetBuiltLocally("//apps/multidex:app#compile_uber_r_dot_java"); buildLog.assertTargetBuiltLocally("//apps/multidex:app#dex,dex_uber_r_dot_java"); verifyTrimmedRDotJava(ImmutableSet.of("app_icon", "app_name", "title")); } private static final Pattern SMALI_PUBLIC_CLASS_PATTERN = Pattern.compile("\\.class public L([\\w/$]+);"); private static final Pattern SMALI_STATIC_FINAL_INT_PATTERN = Pattern.compile("\\.field public static final (\\w+):I = (0x[0-9A-fa-f]+)"); private void verifyTrimmedRDotJava(ImmutableSet<String> expected) throws IOException { List<String> lines = filesystem.readLines( Paths.get("buck-out/gen/apps/multidex/disassemble_app_r_dot_java/all_r_fields.smali")); ImmutableSet.Builder<String> found = ImmutableSet.builder(); for (String line : lines) { Matcher m = SMALI_STATIC_FINAL_INT_PATTERN.matcher(line); assertTrue("Could not match line: " + line, m.matches()); assertThat(m.group(1), IsIn.in(expected)); found.add(m.group(1)); } assertEquals(expected, found.build()); } private Map<String, String> parseRDotJavaSmali(Path smaliPath) throws IOException { List<String> lines = filesystem.readLines(smaliPath); ImmutableMap.Builder<String, String> output = ImmutableMap.builder(); String currentClass = null; for (String line : lines) { Matcher m; m = SMALI_PUBLIC_CLASS_PATTERN.matcher(line); if (m.matches()) { currentClass = m.group(1); continue; } m = SMALI_STATIC_FINAL_INT_PATTERN.matcher(line); if (m.matches()) { output.put(currentClass + ":" + m.group(1), m.group(2)); continue; } } return output.build(); } private static final Pattern RESOURCE_DUMP_SPEC_PATTERN = Pattern.compile(" *spec resource (0x[0-9A-fa-f]+) [\\w.]+:(\\w+/\\w+):.*"); private Map<String, String> parseResourceDump(Path dumpPath) throws IOException { List<String> lines = filesystem.readLines(dumpPath); ImmutableMap.Builder<String, String> output = ImmutableMap.builder(); for (String line : lines) { Matcher m = RESOURCE_DUMP_SPEC_PATTERN.matcher(line); if (m.matches()) { output.put(m.group(2), m.group(1)); } } return output.build(); } @Test public void testManifestMerge() throws IOException { Path mergedPath = workspace.buildAndReturnOutput("//manifests:manifest"); String contents = workspace.getFileContents(mergedPath); Pattern readCalendar = Pattern.compile( "<uses-permission-sdk-23 android:name=\"android\\.permission\\.READ_CALENDAR\" />"); int matchCount = 0; Matcher matcher = readCalendar.matcher(contents); while (matcher.find()) { matchCount++; } assertEquals( String.format( "Expected one uses-permission-sdk-23=READ_CALENDAR tag, but found %d: %s", matchCount, contents), 1, matchCount); } @Test public void testAutomaticManifestMerge() throws IOException { Path dumpPath = workspace.buildAndReturnOutput("//apps/sample:dump_merged_manifest"); String contents = workspace.getFileContents(dumpPath); assertThat(contents, containsString("READ_CALENDAR")); } @Test public void testProguardOutput() throws IOException { ImmutableMap<String, Path> outputs = workspace.buildMultipleAndReturnOutputs( "//apps/sample:proguard_output_dontobfuscate", "//apps/sample:proguard_output_dontobfuscate_no_aapt"); String withAapt = workspace.getFileContents(outputs.get("//apps/sample:proguard_output_dontobfuscate")); String withoutAapt = workspace.getFileContents( outputs.get("//apps/sample:proguard_output_dontobfuscate_no_aapt")); assertThat(withAapt, containsString("-printmapping")); assertThat(withAapt, containsString("#generated")); assertThat(withoutAapt, containsString("-printmapping")); assertThat(withoutAapt, CoreMatchers.not(containsString("#generated"))); } }
package jycessing.mode; import javax.swing.text.Segment; import processing.app.syntax.Token; import processing.app.syntax.TokenMarker; public class PythonTokenMarker extends TokenMarker { private static final byte TRIPLEQUOTE1 = Token.INTERNAL_FIRST; private static final byte TRIPLEQUOTE2 = Token.INTERNAL_LAST; private static PyKeywordMap pyKeywords; private final PyKeywordMap keywords; private int lastOffset; private int lastKeyword; public PythonTokenMarker() { this.keywords = getKeywords(); } @Override public void addColoring(final String keyword, final String coloring) { // KEYWORD1 -> 0, KEYWORD2 -> 1, etc final int num = coloring.charAt(coloring.length() - 1) - '1'; int id = 0; boolean paren = false; switch (coloring.charAt(0)) { case 'K': id = Token.KEYWORD1 + num; break; case 'L': id = Token.LITERAL1 + num; break; case 'F': id = Token.FUNCTION1 + num; paren = true; break; } keywords.add(keyword, (byte)id); } @Override public byte markTokensImpl(byte token, final Segment line, final int lineIndex) { final char[] array = line.array; final int offset = line.offset; lastOffset = offset; lastKeyword = offset; final int length = line.count + offset; boolean backslash = false; loop: for (int i = offset; i < length; i++) { final int i1 = (i + 1); final char c = array[i]; if (c == '\\') { backslash = !backslash; continue; } switch (token) { case Token.NULL: switch (c) { case '#': if (backslash) { backslash = false; } else { doKeyword(line, i, c); addToken(i - lastOffset, token); addToken(length - i, Token.COMMENT1); lastOffset = lastKeyword = length; break loop; } break; case '"': doKeyword(line, i, c); if (backslash) { backslash = false; } else { addToken(i - lastOffset, token); if (SyntaxUtilities.regionMatches(line, i1, "\"\"")) { token = TRIPLEQUOTE1; } else { token = Token.LITERAL1; } lastOffset = lastKeyword = i; } break; case '\'': doKeyword(line, i, c); if (backslash) { backslash = false; } else { addToken(i - lastOffset, token); if (SyntaxUtilities.regionMatches(line, i1, "''")) { token = TRIPLEQUOTE2; } else { token = Token.LITERAL2; } lastOffset = lastKeyword = i; } break; default: backslash = false; if (!Character.isLetterOrDigit(c) && c != '_') { doKeyword(line, i, c); } break; } break; case Token.LITERAL1: if (backslash) { backslash = false; } else if (c == '"') { addToken(i1 - lastOffset, token); token = Token.NULL; lastOffset = lastKeyword = i1; } break; case Token.LITERAL2: if (backslash) { backslash = false; } else if (c == '\'') { addToken(i1 - lastOffset, Token.LITERAL1); token = Token.NULL; lastOffset = lastKeyword = i1; } break; case TRIPLEQUOTE1: if (backslash) { backslash = false; } else if (SyntaxUtilities.regionMatches(line, i, "\"\"\"")) { addToken((i += 3) - lastOffset, Token.LITERAL1); token = Token.NULL; lastOffset = lastKeyword = i; } break; case TRIPLEQUOTE2: if (backslash) { backslash = false; } else if (SyntaxUtilities.regionMatches(line, i, "'''")) { addToken((i += 3) - lastOffset, Token.LITERAL1); token = Token.NULL; lastOffset = lastKeyword = i; } break; default: throw new InternalError("Invalid state: " + token); } } switch (token) { case TRIPLEQUOTE1: case TRIPLEQUOTE2: addToken(length - lastOffset, Token.LITERAL1); break; case Token.NULL: doKeyword(line, length, '\0'); //$FALL-THROUGH$ default: addToken(length - lastOffset, token); break; } return token; } public static PyKeywordMap getKeywords() { if (pyKeywords == null) { pyKeywords = new PyKeywordMap(); pyKeywords.add("__init__", Token.FUNCTION2); pyKeywords.add("and", Token.KEYWORD3); pyKeywords.add("as", Token.KEYWORD3); pyKeywords.add("assert", Token.KEYWORD1); pyKeywords.add("break", Token.KEYWORD1); pyKeywords.add("chr", Token.FUNCTION1); pyKeywords.add("class", Token.KEYWORD2); pyKeywords.add("continue", Token.KEYWORD1); pyKeywords.add("def", Token.KEYWORD2); pyKeywords.add("del", Token.KEYWORD2); pyKeywords.add("elif", Token.KEYWORD1); pyKeywords.add("else", Token.KEYWORD1); pyKeywords.add("except", Token.KEYWORD1); pyKeywords.add("exec", Token.KEYWORD1); pyKeywords.add("finally", Token.KEYWORD1); pyKeywords.add("for", Token.KEYWORD3); pyKeywords.add("from", Token.KEYWORD2); pyKeywords.add("global", Token.KEYWORD2); pyKeywords.add("if", Token.KEYWORD1); pyKeywords.add("import", Token.KEYWORD2); pyKeywords.add("in", Token.KEYWORD2); pyKeywords.add("is", Token.KEYWORD2); pyKeywords.add("lambda", Token.KEYWORD2); pyKeywords.add("not", Token.KEYWORD3); pyKeywords.add("or", Token.KEYWORD3); pyKeywords.add("pass", Token.KEYWORD2); pyKeywords.add("print", Token.KEYWORD2); pyKeywords.add("raise", Token.KEYWORD1); pyKeywords.add("range", Token.KEYWORD3); pyKeywords.add("return", Token.KEYWORD1); pyKeywords.add("self", Token.KEYWORD2); pyKeywords.add("try", Token.KEYWORD1); pyKeywords.add("with", Token.KEYWORD3); pyKeywords.add("while", Token.KEYWORD3); } return pyKeywords; } private boolean doKeyword(final Segment line, final int i, final char c) { final int i1 = i + 1; final int len = i - lastKeyword; final byte id = keywords.lookup(line, lastKeyword, len); if (id != Token.NULL) { if (lastKeyword != lastOffset) { addToken(lastKeyword - lastOffset, Token.NULL); } addToken(len, id); lastOffset = i; } lastKeyword = i1; return false; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chromoting.cardboard; import android.app.Activity; import android.graphics.Point; import android.graphics.PointF; import android.opengl.GLES20; import android.opengl.Matrix; import com.google.vrtoolkit.cardboard.CardboardView; import com.google.vrtoolkit.cardboard.Eye; import com.google.vrtoolkit.cardboard.HeadTransform; import com.google.vrtoolkit.cardboard.Viewport; import org.chromium.chromoting.jni.JniInterface; import javax.microedition.khronos.egl.EGLConfig; /** * Renderer for Cardboard view. */ public class CardboardRenderer implements CardboardView.StereoRenderer { private static final String TAG = "cr.CardboardRenderer"; private static final int BYTE_PER_FLOAT = 4; private static final int POSITION_DATA_SIZE = 3; private static final int TEXTURE_COORDINATE_DATA_SIZE = 2; private static final float Z_NEAR = 0.1f; private static final float Z_FAR = 1000.0f; // The following object positions are relative to the view point. private static final float DESKTOP_POSITION_X = 0.0f; private static final float DESKTOP_POSITION_Y = 0.0f; private static final float DESKTOP_POSITION_Z = -2.0f; private static final float MENU_BAR_POSITION_X = 0.0f; private static final float MENU_BAR_POSITION_Y = 0.0f; private static final float MENU_BAR_POSITION_Z = -0.9f; private static final float HALF_SKYBOX_SIZE = 100.0f; private static final float VIEW_POSITION_MIN = -1.0f; private static final float VIEW_POSITION_MAX = 3.0f; // Allows user to click even when looking outside the desktop // but within edge margin. private static final float EDGE_MARGIN = 0.1f; // Distance to move camera each time. private static final float CAMERA_MOTION_STEP = 0.5f; private final Activity mActivity; private float mCameraPosition; // Lock to allow multithreaded access to mCameraPosition. private final Object mCameraPositionLock = new Object(); private float[] mCameraMatrix; private float[] mViewMatrix; private float[] mProjectionMatrix; // Make matrix member variable to avoid unnecessary initialization. private float[] mDesktopModelMatrix; private float[] mDesktopCombinedMatrix; private float[] mEyePointModelMatrix; private float[] mEyePointCombinedMatrix; private float[] mPhotosphereCombinedMatrix; // Direction that user is looking towards. private float[] mForwardVector; // Eye position at the desktop distance. private PointF mEyeDesktopPosition; // Eye position at the menu bar distance; private PointF mEyeMenuBarPosition; private Desktop mDesktop; private MenuBar mMenuBar; private Photosphere mPhotosphere; private Cursor mCursor; // Lock for eye position related operations. // This protects access to mEyeDesktopPosition. private final Object mEyeDesktopPositionLock = new Object(); // Flag to indicate whether to show menu bar. private boolean mMenuBarVisible; public CardboardRenderer(Activity activity) { mActivity = activity; mCameraPosition = 0.0f; mCameraMatrix = new float[16]; mViewMatrix = new float[16]; mProjectionMatrix = new float[16]; mDesktopModelMatrix = new float[16]; mDesktopCombinedMatrix = new float[16]; mEyePointModelMatrix = new float[16]; mEyePointCombinedMatrix = new float[16]; mPhotosphereCombinedMatrix = new float[16]; mForwardVector = new float[3]; } private void initializeRedrawCallback() { mActivity.runOnUiThread(new Runnable() { public void run() { JniInterface.provideRedrawCallback(new Runnable() { @Override public void run() { mDesktop.reloadTexture(); mCursor.reloadTexture(); } }); JniInterface.redrawGraphics(); } }); } @Override public void onSurfaceCreated(EGLConfig config) { // Set the background clear color to black. GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); // Use culling to remove back faces. GLES20.glEnable(GLES20.GL_CULL_FACE); // Enable depth testing. GLES20.glEnable(GLES20.GL_DEPTH_TEST); mDesktop = new Desktop(); mMenuBar = new MenuBar(mActivity); mPhotosphere = new Photosphere(mActivity); mCursor = new Cursor(); initializeRedrawCallback(); } @Override public void onSurfaceChanged(int width, int height) { } @Override public void onNewFrame(HeadTransform headTransform) { // Position the eye at the origin. float eyeX = 0.0f; float eyeY = 0.0f; float eyeZ; synchronized (mCameraPositionLock) { eyeZ = mCameraPosition; } // We are looking toward the negative Z direction. float lookX = DESKTOP_POSITION_X; float lookY = DESKTOP_POSITION_Y; float lookZ = DESKTOP_POSITION_Z; // Set our up vector. This is where our head would be pointing were we holding the camera. float upX = 0.0f; float upY = 1.0f; float upZ = 0.0f; Matrix.setLookAtM(mCameraMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ); headTransform.getForwardVector(mForwardVector, 0); mEyeDesktopPosition = getLookingPosition(Math.abs(DESKTOP_POSITION_Z - eyeZ)); mEyeMenuBarPosition = getLookingPosition(Math.abs(MENU_BAR_POSITION_Z)); mDesktop.maybeLoadDesktopTexture(); mPhotosphere.maybeLoadTextureAndCleanImage(); mCursor.maybeLoadTexture(mDesktop); mCursor.moveTo(getMouseCoordinates()); } @Override public void onDrawEye(Eye eye) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); // Apply the eye transformation to the camera. Matrix.multiplyMM(mViewMatrix, 0, eye.getEyeView(), 0, mCameraMatrix, 0); mProjectionMatrix = eye.getPerspective(Z_NEAR, Z_FAR); drawDesktop(); drawPhotosphere(eye.getType()); drawMenuBar(); drawCursor(); } @Override public void onRendererShutdown() { mDesktop.cleanup(); mMenuBar.cleanup(); mPhotosphere.cleanup(); mCursor.cleanup(); } @Override public void onFinishFrame(Viewport viewport) { } private void drawCursor() { if (!isLookingAtDesktop() || (isMenuBarVisible() && isLookingAtMenuBar()) || !mCursor.hasImageFrame()) { return; } float eyePointX = clamp(mEyeDesktopPosition.x, -mDesktop.getHalfWidth(), mDesktop.getHalfWidth()); float eyePointY = clamp(mEyeDesktopPosition.y, -mDesktop.getHalfHeight(), mDesktop.getHalfHeight()); Matrix.setIdentityM(mEyePointModelMatrix, 0); Matrix.translateM(mEyePointModelMatrix, 0, eyePointX , eyePointY, DESKTOP_POSITION_Z); Matrix.multiplyMM(mEyePointCombinedMatrix, 0, mViewMatrix, 0, mEyePointModelMatrix, 0); Matrix.multiplyMM(mEyePointCombinedMatrix, 0, mProjectionMatrix, 0, mEyePointCombinedMatrix, 0); mCursor.draw(mEyePointCombinedMatrix); } private void drawPhotosphere(int eyeType) { // Since we will always put the photosphere center in the origin, the // model matrix will always be identity matrix which we can ignore. Matrix.multiplyMM(mPhotosphereCombinedMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0); mPhotosphere.draw(mPhotosphereCombinedMatrix, eyeType); } private void drawDesktop() { if (!mDesktop.hasVideoFrame()) { // This can happen if the client is connected, but a complete // video frame has not yet been decoded. return; } Matrix.setIdentityM(mDesktopModelMatrix, 0); Matrix.translateM(mDesktopModelMatrix, 0, DESKTOP_POSITION_X, DESKTOP_POSITION_Y, DESKTOP_POSITION_Z); // Pass in Model View Matrix and Model View Project Matrix. Matrix.multiplyMM(mDesktopCombinedMatrix, 0, mViewMatrix, 0, mDesktopModelMatrix, 0); Matrix.multiplyMM(mDesktopCombinedMatrix, 0, mProjectionMatrix, 0, mDesktopCombinedMatrix, 0); mDesktop.draw(mDesktopCombinedMatrix, mMenuBarVisible); } private void drawMenuBar() { if (!mMenuBarVisible) { return; } float menuBarZ; synchronized (mCameraPositionLock) { menuBarZ = mCameraPosition + MENU_BAR_POSITION_Z; } mMenuBar.draw(mViewMatrix, mProjectionMatrix, mEyeMenuBarPosition, MENU_BAR_POSITION_X, MENU_BAR_POSITION_Y, menuBarZ); } /** * Return menu item that is currently looking at or null if not looking at menu bar. */ public MenuItem getMenuItem() { // Transform world view to model view. return mMenuBar.getLookingItem(new PointF(mEyeMenuBarPosition.x - MENU_BAR_POSITION_X, mEyeMenuBarPosition.y - MENU_BAR_POSITION_Y)); } /** * Returns coordinates in units of pixels in the desktop bitmap. * This can be called on any thread. */ public PointF getMouseCoordinates() { PointF result = new PointF(); Point shapePixels = mDesktop.getFrameSizePixels(); int widthPixels = shapePixels.x; int heightPixels = shapePixels.y; synchronized (mEyeDesktopPositionLock) { // Due to the coordinate direction, we only have to inverse x. result.x = (mEyeDesktopPosition.x + mDesktop.getHalfWidth()) / (2 * mDesktop.getHalfWidth()) * widthPixels; result.y = (-mEyeDesktopPosition.y + mDesktop.getHalfHeight()) / (2 * mDesktop.getHalfHeight()) * heightPixels; result.x = clamp(result.x, 0, widthPixels); result.y = clamp(result.y, 0, heightPixels); } return result; } /** * Returns the passed in value if it resides within the specified range (inclusive). If not, * it will return the closest boundary from the range. The ordering of the boundary values * does not matter. * * @param value The value to be compared against the range. * @param a First boundary range value. * @param b Second boundary range value. * @return The passed in value if it is within the range, otherwise the closest boundary value. */ private static float clamp(float value, float a, float b) { float min = (a > b) ? b : a; float max = (a > b) ? a : b; if (value < min) { value = min; } else if (value > max) { value = max; } return value; } /** * Move the camera towards desktop. * This method can be called on any thread. */ public void moveTowardsDesktop() { synchronized (mCameraPositionLock) { float newPosition = mCameraPosition - CAMERA_MOTION_STEP; if (newPosition >= VIEW_POSITION_MIN) { mCameraPosition = newPosition; } } } /** * Move the camera away from desktop. * This method can be called on any thread. */ public void moveAwayFromDesktop() { synchronized (mCameraPositionLock) { float newPosition = mCameraPosition + CAMERA_MOTION_STEP; if (newPosition <= VIEW_POSITION_MAX) { mCameraPosition = newPosition; } } } /** * Return true if user is looking at the desktop. * This method can be called on any thread. */ public boolean isLookingAtDesktop() { synchronized (mEyeDesktopPositionLock) { // TODO(shichengfeng): Move logic to CardboardActivityDesktop. return Math.abs(mEyeDesktopPosition.x) <= (mDesktop.getHalfWidth() + EDGE_MARGIN) && Math.abs(mEyeDesktopPosition.y) <= (mDesktop.getHalfHeight() + EDGE_MARGIN); } } /** * Return true if user is looking at the menu bar. */ public boolean isLookingAtMenuBar() { return mMenuBar.contains(new PointF(mEyeMenuBarPosition.x - MENU_BAR_POSITION_X, mEyeMenuBarPosition.y - MENU_BAR_POSITION_Y)); } /** * Get eye position at the given distance. */ private PointF getLookingPosition(float distance) { if (Math.abs(mForwardVector[2]) < 0.00001f) { return new PointF(Math.copySign(Float.MAX_VALUE, mForwardVector[0]), Math.copySign(Float.MAX_VALUE, mForwardVector[1])); } else { return new PointF(mForwardVector[0] * distance / mForwardVector[2], mForwardVector[1] * distance / mForwardVector[2]); } } /** * Set the visibility of the menu bar. */ public void setMenuBarVisible(boolean visible) { mMenuBarVisible = visible; } /** * Return true if menu bar is visible. */ public boolean isMenuBarVisible() { return mMenuBarVisible; } }
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the Smart Developer Hub Project: * http://www.smartdeveloperhub.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2015-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.smartdeveloperhub.harvesters.ci.backend:ci-backend-core:0.3.0 * Bundle : ci-backend-core-0.3.0.jar * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.smartdeveloperhub.harvesters.ci.backend.enrichment; import java.io.IOException; import java.lang.Thread.UncaughtExceptionHandler; import java.net.URI; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.smartdeveloperhub.curator.connector.Connector; import org.smartdeveloperhub.curator.connector.ConnectorException; import org.smartdeveloperhub.curator.connector.EnrichmentRequest; import org.smartdeveloperhub.curator.connector.EnrichmentResult; import org.smartdeveloperhub.curator.connector.EnrichmentResultHandler; import org.smartdeveloperhub.harvesters.util.concurrent.MemoizingScheduledExecutorService; import org.smartdeveloperhub.harvesters.util.concurrent.MoreExecutors; import com.google.common.base.MoreObjects; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ThreadFactoryBuilder; final class EnrichmentRequestor { final class RequestJob implements Runnable { private final long id; private final EnrichmentContext context; private long retries; private volatile boolean cancelled; private RequestJob(final EnrichmentContext context) { this.context=context; this.retries=0; this.id=EnrichmentRequestor.this.metrics.createJob(context); } long id() { return this.id; } boolean requiresTermination() { return this.context==null; } EnrichmentContext context() { return this.context; } void cancel() { this.cancelled=true; } long retry(final long duration, final TimeUnit unit) { if(!this.cancelled) { this.retries++; EnrichmentRequestor.this.executor.schedule(this,duration,unit); } return this.retries; } String description() { return "#"+this.id+" ("+(this.context==null?"<termination>":this.context.pendingEnrichment())+")"; } @Override public void run() { if(!this.cancelled) { EnrichmentRequestor.this.worker.queueJob(RequestJob.this); } } @Override public String toString() { return MoreObjects. toStringHelper(getClass()). add("id",this.id). add("retries",this.retries). add("context",this.context). toString(); } } private final class Worker implements Runnable { private final Connector connector; private final ResolverService resolver; private final BlockingQueue<RequestJob> queue; private volatile boolean terminated; private Worker(final Connector connector, final ResolverService resolver) { this.connector=connector; this.resolver=resolver; this.queue=new LinkedBlockingDeque<RequestJob>(); this.terminated=false; } @Override public void run() { try { LOGGER.debug("Starting Enrichment Requestor worker. Awaiting resolver availability..."); awaitAvailability(); LOGGER.debug("Resolver is available. Started processing queued requests..."); processJobs(); LOGGER.debug("Enrichment Requestor worker terminated"); } catch (final InterruptedException e) { LOGGER.warn("Enrichment Requestor worker interrupted",e); } } void queueJob(final RequestJob job) { if(!job.requiresTermination() && this.terminated) { LOGGER.info("Rejected request job {} for execution {}",job.description(),job.context().targetExecution().executionId()); return; } while(true) { try { this.queue.put(job); LOGGER.trace("Queued job {}",job.description()); break; } catch (final InterruptedException e) { LOGGER.info("Enrichment Requestor interrupted while awaiting for enqueueing job {}",job.description(),e); } } } void triggerTermination() { LOGGER.debug("Requested Enrichment Requestor worker termination."); this.terminated=true; final RequestJob job = new RequestJob(null); LOGGER.trace("Created job {}",job.description()); queueJob(job); } private void processJobs() { while(!this.terminated) { try { final RequestJob job=this.queue.take(); if(!job.requiresTermination()) { processJob(job); } } catch (final InterruptedException e) { // Ignore interruption. Worker can only be terminated via // the triggerTermination method LOGGER.trace("Interrupted while waiting for job",e); Thread.currentThread().interrupt(); } } cancelPendingJobs(); } private void processJob(final RequestJob job) { final URI executionResource = this.resolver. resolveExecution( job.context().targetExecution()); if(executionResource==null) { retryJob(job); } else { completeJob(job, executionResource); } } private void retryJob(final RequestJob job) { final long retries = job.retry(5,TimeUnit.SECONDS); LOGGER. trace( "Retrying job #{} ({}): could not resolve resource for execution {} ", job.id(), retries, job.context().targetExecution().executionId()); } private void completeJob(final RequestJob job, final URI executionResource) { final EnrichmentContext ctx = job.context(); boolean completed=false; try { submitEnrichmentRequest(ctx,UseCase.createRequest(executionResource,ctx)); completed=true; } catch (final Exception e) { LOGGER.error("Could not process {} ({}). Full stacktrace follows",ctx.pendingEnrichment(),executionResource,e); } finally { LOGGER.trace("{} processing job {}",completed?"Completed ":"Failed ",job.description()); EnrichmentRequestor.this.metrics.jobProcessed(job,completed); } } private void submitEnrichmentRequest(final EnrichmentContext context, final EnrichmentRequest request) { try { LOGGER.trace("{} submitting {}",context,request); this.connector.requestEnrichment( request, new EnrichmentResultHandler() { @Override public void onResult(final EnrichmentResult result) { processEnrichmentResult(context,request,result); } } ); } catch (final Exception e) { LOGGER.error("Could not submit {} related to {}. Full stacktrace follows",request,context,e); } } private void cancelPendingJobs() { final List<RequestJob> pendingJobs=Lists.newArrayList(); this.queue.drainTo(pendingJobs); for(final RequestJob job:pendingJobs) { job.cancel(); } logCancelledRequestJobs(pendingJobs); } private void logCancelledRequestJobs(final List<RequestJob> pendingJobs) { if(LOGGER.isTraceEnabled()) { final List<URI> executionIds=Lists.newArrayList(); for(final RequestJob job:pendingJobs) { if(!job.requiresTermination()) { executionIds.add(job.context().targetExecution().executionId()); } } if(!executionIds.isEmpty()) { LOGGER.trace("Cancelled {} pending execution enrichment request jobs ({})",pendingJobs.size(),executionIds); } else { LOGGER.trace("All execution enrichment jobs were executed"); } } } private void processEnrichmentResult(final EnrichmentContext context, final EnrichmentRequest request, final EnrichmentResult result) { LOGGER.debug("Processing enrichment result {} about {} ({})",result,request,context); final ExecutionEnrichment enrichment=UseCase.processResult(context,result); try { EnrichmentRequestor.this.service.addEnrichment(context,enrichment); } catch (final IOException e) { LOGGER.warn("Processing of enrichment result {} about {} ({}) failed. Full stacktrace follows",result,request,context,e); } } // TODO: Use an exponential back-off delay private void awaitAvailability() throws InterruptedException { while(!this.resolver.isReady() && !this.terminated) { TimeUnit.MILLISECONDS.sleep(3000); } } } private static final Logger LOGGER=LoggerFactory.getLogger(EnrichmentRequestor.class); private final Worker worker; private final MemoizingScheduledExecutorService executor; private final EnrichmentService service; private final RequestorMetrics metrics; private boolean started; EnrichmentRequestor(final EnrichmentService service, final Connector connector, final ResolverService resolver) { this.service = service; this.metrics = new RequestorMetrics(); this.worker = new Worker(connector,resolver); final ThreadFactory threadFactory = new ThreadFactoryBuilder(). setNameFormat("EnrichmentRequestor-worker-%d"). setPriority(Thread.MAX_PRIORITY). setUncaughtExceptionHandler( new UncaughtExceptionHandler() { @Override public void uncaughtException(final Thread t, final Throwable e) { LOGGER.error("Requestor thread {} died unexpectedly",t,e); } }). build(); this.executor=MoreExecutors.newMemoizingScheduledExecutorService(2, threadFactory); this.started=false; } void enqueueRequest(final EnrichmentContext context) { if(context.requiresCommit()) { final RequestJob job = new RequestJob(context); LOGGER.trace("Created job {}",job.description()); job.retry(1,TimeUnit.SECONDS); } } RequestorMetrics metrics() { return this.metrics; } void start() throws IOException { LOGGER.info("Starting Enrichment Requestor..."); try { this.worker.connector.connect(); this.executor.submit(this.worker); this.started=true; LOGGER.info("Enrichment Requestor started."); } catch (final ConnectorException e) { LOGGER.error("Could not initialize the curator connector. Full stacktrace follows",e); throw new IOException("Could not initialize the curator connector",e); } } void stop() throws IOException { LOGGER.info("Stopping Enrichment Requestor..."); if(this.started) { this.worker.triggerTermination(); shutdownPoolGracefully(); try { this.worker.connector.disconnect(); } catch (final ConnectorException e) { LOGGER.error("Could not disconnect the curator connector. Full stacktrace follows",e); throw new IOException("Could not disconnect the curator connector",e); } } LOGGER.info("Enrichment Requestor stopped."); } private void shutdownPoolGracefully() { if(!this.executor.isTerminated()) { final List<Runnable> unfinished = this.executor.shutdownNow(); logAbortedRequestJobs(unfinished); } } private void logAbortedRequestJobs(final List<Runnable> unfinished) { if(LOGGER.isTraceEnabled()) { final List<URI> aborted=Lists.newArrayList(); for(final Runnable runnable:unfinished) { final RequestJob job=this.executor.unwrap(runnable,RequestJob.class); if(job!=null && !job.requiresTermination()) { aborted.add(job.context().targetExecution().executionId()); } } if(!aborted.isEmpty()) { LOGGER.trace("Aborted {} pending execution enrichment requests ({})",aborted.size(),aborted); } else { LOGGER.trace("All scheduled pending execution enrichment requests were queued."); } } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.util; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.eventbus.EventBus; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.ActionGraph; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.BuildView; import com.google.devtools.build.lib.analysis.BuildView.AnalysisResult; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.analysis.ServerDirectories; import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory; import com.google.devtools.build.lib.analysis.configuredtargets.InputFileConfiguredTarget; import com.google.devtools.build.lib.buildtool.BuildRequest.BuildRequestOptions; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.LabelSyntaxException; import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.exec.ExecutionOptions; import com.google.devtools.build.lib.packages.NativeAspectClass; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.SkylarkSemanticsOptions; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.packages.util.MockToolsConfig; import com.google.devtools.build.lib.pkgcache.LoadingOptions; import com.google.devtools.build.lib.pkgcache.LoadingPhaseRunner; import com.google.devtools.build.lib.pkgcache.LoadingResult; import com.google.devtools.build.lib.pkgcache.PackageCacheOptions; import com.google.devtools.build.lib.pkgcache.PackageManager; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.rules.repository.RepositoryDelegatorFunction; import com.google.devtools.build.lib.skyframe.BazelSkyframeExecutorConstants; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.PrecomputedValue; import com.google.devtools.build.lib.skyframe.SequencedSkyframeExecutor; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.testutil.TestConstants.InternalTestExecutionMode; import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.common.options.InvocationPolicyEnforcer; import com.google.devtools.common.options.Options; import com.google.devtools.common.options.OptionsParser; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.UUID; import org.junit.Before; /** * Testing framework for tests of the analysis phase that uses the BuildView and LoadingPhaseRunner * APIs correctly (compared to {@link BuildViewTestCase}). * * <p>The intended usage pattern is to first call {@link #update} with the set of targets, and then * assert properties of the configured targets obtained from {@link #getConfiguredTarget}. * * <p>This class intentionally does not inherit from {@link BuildViewTestCase}; BuildViewTestCase * abuses the BuildView API in ways that are incompatible with the goals of this test, i.e. the * convenience methods provided there wouldn't work here. */ public abstract class AnalysisTestCase extends FoundationTestCase { private static final int LOADING_PHASE_THREADS = 20; /** All the flags that can be passed to {@link BuildView#update}. */ public enum Flag { KEEP_GOING, SKYFRAME_LOADING_PHASE, // Configurations that only include the fragments a target needs to properly analyze. TRIMMED_CONFIGURATIONS } /** Helper class to make it easy to enable and disable flags. */ public static final class FlagBuilder { private final Set<Flag> flags = new HashSet<>(); public FlagBuilder with(Flag flag) { flags.add(flag); return this; } public FlagBuilder without(Flag flag) { flags.remove(flag); return this; } public boolean contains(Flag flag) { return flags.contains(flag); } } protected BlazeDirectories directories; protected MockToolsConfig mockToolsConfig; protected AnalysisMock analysisMock; protected BuildOptions buildOptions; private OptionsParser optionsParser; protected PackageManager packageManager; private LoadingPhaseRunner loadingPhaseRunner; private BuildView buildView; // Note that these configurations are virtual (they use only VFS) private BuildConfigurationCollection masterConfig; private AnalysisResult analysisResult; protected SkyframeExecutor skyframeExecutor = null; protected ConfiguredRuleClassProvider ruleClassProvider; protected AnalysisTestUtil.DummyWorkspaceStatusActionFactory workspaceStatusActionFactory; private PathPackageLocator pkgLocator; @Before public final void createMocks() throws Exception { analysisMock = getAnalysisMock(); pkgLocator = new PathPackageLocator(outputBase, ImmutableList.of(rootDirectory)); directories = new BlazeDirectories( new ServerDirectories(outputBase, outputBase), rootDirectory, analysisMock.getProductName()); workspaceStatusActionFactory = new AnalysisTestUtil.DummyWorkspaceStatusActionFactory(directories); mockToolsConfig = new MockToolsConfig(rootDirectory); analysisMock.setupMockClient(mockToolsConfig); analysisMock.setupMockWorkspaceFiles(directories.getEmbeddedBinariesRoot()); useRuleClassProvider(analysisMock.createRuleClassProvider()); } protected SkyframeExecutor createSkyframeExecutor( PackageFactory pkgFactory, ImmutableList<BuildInfoFactory> buildInfoFactories) { return SequencedSkyframeExecutor.create( pkgFactory, fileSystem, directories, workspaceStatusActionFactory, buildInfoFactories, ImmutableList.of(), input -> false, analysisMock.getSkyFunctions(directories), ImmutableList.of(), PathFragment.EMPTY_FRAGMENT, BazelSkyframeExecutorConstants.CROSS_REPOSITORY_LABEL_VIOLATION_STRATEGY, BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY, BazelSkyframeExecutorConstants.ACTION_ON_IO_EXCEPTION_READING_BUILD_FILE); } /** * Changes the rule class provider to be used for the loading and the analysis phase. */ protected void useRuleClassProvider(ConfiguredRuleClassProvider ruleClassProvider) throws Exception { this.ruleClassProvider = ruleClassProvider; PackageFactory pkgFactory = analysisMock .getPackageFactoryBuilderForTesting(directories) .build(ruleClassProvider, scratch.getFileSystem()); skyframeExecutor = createSkyframeExecutor(pkgFactory, ruleClassProvider.getBuildInfoFactories()); TestConstants.processSkyframeExecutorForTesting(skyframeExecutor); PackageCacheOptions packageCacheOptions = Options.getDefaults(PackageCacheOptions.class); packageCacheOptions.showLoadingProgress = true; packageCacheOptions.globbingThreads = 3; skyframeExecutor.preparePackageLoading( pkgLocator, packageCacheOptions, Options.getDefaults(SkylarkSemanticsOptions.class), ruleClassProvider.getDefaultsPackageContent( analysisMock.getInvocationPolicyEnforcer().getInvocationPolicy()), UUID.randomUUID(), ImmutableMap.<String, String>of(), ImmutableMap.<String, String>of(), new TimestampGranularityMonitor(BlazeClock.instance())); skyframeExecutor.injectExtraPrecomputedValues(ImmutableList.of(PrecomputedValue.injected( RepositoryDelegatorFunction.REPOSITORY_OVERRIDES, ImmutableMap.<RepositoryName, PathFragment>of()))); packageManager = skyframeExecutor.getPackageManager(); loadingPhaseRunner = skyframeExecutor.getLoadingPhaseRunner( pkgFactory.getRuleClassNames(), defaultFlags().contains(Flag.SKYFRAME_LOADING_PHASE)); buildView = new BuildView(directories, ruleClassProvider, skyframeExecutor, null); useConfiguration(); } protected AnalysisMock getAnalysisMock() { return AnalysisMock.get(); } protected InternalTestExecutionMode getInternalTestExecutionMode() { return InternalTestExecutionMode.NORMAL; } /** * Sets host and target configuration using the specified options, falling back to the default * options for unspecified ones, and recreates the build view. */ protected final void useConfiguration(String... args) throws Exception { optionsParser = OptionsParser.newOptionsParser(Iterables.concat(Arrays.asList( ExecutionOptions.class, PackageCacheOptions.class, SkylarkSemanticsOptions.class, BuildRequestOptions.class, BuildView.Options.class), ruleClassProvider.getConfigurationOptions())); optionsParser.parse(new String[] {"--default_visibility=public" }); optionsParser.parse(args); if (defaultFlags().contains(Flag.TRIMMED_CONFIGURATIONS)) { optionsParser.parse("--experimental_dynamic_configs=on"); } InvocationPolicyEnforcer optionsPolicyEnforcer = analysisMock.getInvocationPolicyEnforcer(); optionsPolicyEnforcer.enforce(optionsParser); buildOptions = ruleClassProvider.createBuildOptions(optionsParser); } protected FlagBuilder defaultFlags() { return new FlagBuilder(); } protected Action getGeneratingAction(Artifact artifact) { ensureUpdateWasCalled(); ActionAnalysisMetadata action = analysisResult.getActionGraph().getGeneratingAction(artifact); if (action != null) { Preconditions.checkState( action instanceof Action, "%s is not a proper Action object", action.prettyPrint()); return (Action) action; } else { return null; } } protected BuildConfigurationCollection getBuildConfigurationCollection() { return masterConfig; } /** * Returns the target configuration for the most recent build, as created in Blaze's * master configuration creation phase. */ protected BuildConfiguration getTargetConfiguration() throws InterruptedException { return Iterables.getOnlyElement(masterConfig.getTargetConfigurations()); } protected BuildConfiguration getHostConfiguration() { return masterConfig.getHostConfiguration(); } protected final void ensureUpdateWasCalled() { Preconditions.checkState(analysisResult != null, "You must run update() first!"); } /** * Update the BuildView: syncs the package cache; loads and analyzes the given labels. */ protected AnalysisResult update( EventBus eventBus, FlagBuilder config, ImmutableList<String> aspects, String... labels) throws Exception { Set<Flag> flags = config.flags; LoadingOptions loadingOptions = Options.getDefaults(LoadingOptions.class); BuildView.Options viewOptions = optionsParser.getOptions(BuildView.Options.class); viewOptions.keepGoing = flags.contains(Flag.KEEP_GOING); viewOptions.loadingPhaseThreads = LOADING_PHASE_THREADS; PackageCacheOptions packageCacheOptions = optionsParser.getOptions(PackageCacheOptions.class); PathPackageLocator pathPackageLocator = PathPackageLocator.create( outputBase, packageCacheOptions.packagePath, reporter, rootDirectory, rootDirectory); packageCacheOptions.showLoadingProgress = true; packageCacheOptions.globbingThreads = 7; SkylarkSemanticsOptions skylarkSemanticsOptions = optionsParser.getOptions(SkylarkSemanticsOptions.class); skyframeExecutor.preparePackageLoading( pathPackageLocator, packageCacheOptions, skylarkSemanticsOptions, ruleClassProvider.getDefaultsPackageContent( analysisMock.getInvocationPolicyEnforcer().getInvocationPolicy()), UUID.randomUUID(), ImmutableMap.<String, String>of(), ImmutableMap.<String, String>of(), new TimestampGranularityMonitor(BlazeClock.instance())); skyframeExecutor.invalidateFilesUnderPathForTesting(reporter, ModifiedFileSet.EVERYTHING_MODIFIED, rootDirectory); LoadingResult loadingResult = loadingPhaseRunner.execute( reporter, ImmutableList.copyOf(labels), PathFragment.EMPTY_FRAGMENT, loadingOptions, viewOptions.keepGoing, /*determineTests=*/false, /*callback=*/null); BuildRequestOptions requestOptions = optionsParser.getOptions(BuildRequestOptions.class); ImmutableSortedSet<String> multiCpu = ImmutableSortedSet.copyOf(requestOptions.multiCpus); masterConfig = skyframeExecutor.createConfigurations( reporter, ruleClassProvider.getConfigurationFragments(), buildOptions, multiCpu, false); analysisResult = buildView.update( loadingResult, masterConfig, aspects, viewOptions, AnalysisTestUtil.TOP_LEVEL_ARTIFACT_CONTEXT, reporter, eventBus); return analysisResult; } protected AnalysisResult update(EventBus eventBus, FlagBuilder config, String... labels) throws Exception { return update(eventBus, config, /*aspects=*/ImmutableList.<String>of(), labels); } protected AnalysisResult update(FlagBuilder config, String... labels) throws Exception { return update(new EventBus(), config, /*aspects=*/ImmutableList.<String>of(), labels); } /** * Update the BuildView: syncs the package cache; loads and analyzes the given labels. */ protected AnalysisResult update(String... labels) throws Exception { return update(new EventBus(), defaultFlags(), /*aspects=*/ImmutableList.<String>of(), labels); } protected AnalysisResult update(ImmutableList<String> aspects, String... labels) throws Exception { return update(new EventBus(), defaultFlags(), aspects, labels); } protected Target getTarget(String label) throws InterruptedException { try { return SkyframeExecutorTestUtils.getExistingTarget(skyframeExecutor, Label.parseAbsolute(label)); } catch (LabelSyntaxException e) { throw new AssertionError(e); } } protected ConfiguredTarget getConfiguredTarget(String label, BuildConfiguration configuration) { ensureUpdateWasCalled(); return getConfiguredTargetForSkyframe(label, configuration); } private ConfiguredTarget getConfiguredTargetForSkyframe(String label, BuildConfiguration configuration) { Label parsedLabel; try { parsedLabel = Label.parseAbsolute(label); } catch (LabelSyntaxException e) { throw new AssertionError(e); } return skyframeExecutor.getConfiguredTargetForTesting(reporter, parsedLabel, configuration); } /** * Returns the corresponding configured target, if it exists. Note that this will only return * anything useful after a call to update() with the same label. */ protected ConfiguredTarget getConfiguredTarget(String label) throws InterruptedException { return getConfiguredTarget(label, getTargetConfiguration()); } /** * Returns the corresponding configured target, if it exists. Note that this will only return * anything useful after a call to update() with the same label. The label passed in must * represent an input file. */ protected InputFileConfiguredTarget getInputFileConfiguredTarget(String label) { return (InputFileConfiguredTarget) getConfiguredTarget(label, null); } protected boolean hasErrors(ConfiguredTarget configuredTarget) { return buildView.hasErrors(configuredTarget); } protected Artifact getBinArtifact(String packageRelativePath, ConfiguredTarget owner) throws InterruptedException { Label label = owner.getLabel(); return buildView.getArtifactFactory().getDerivedArtifact( label.getPackageFragment().getRelative(packageRelativePath), getTargetConfiguration().getBinDirectory(label.getPackageIdentifier().getRepository()), new ConfiguredTargetKey(owner)); } protected Set<SkyKey> getSkyframeEvaluatedTargetKeys() { return buildView.getSkyframeEvaluatedTargetKeysForTesting(); } protected int getTargetsVisited() { return buildView.getTargetsVisited(); } protected String getAnalysisError() { ensureUpdateWasCalled(); return analysisResult.getError(); } protected BuildView getView() { return buildView; } protected ActionGraph getActionGraph() { return skyframeExecutor.getActionGraph(reporter); } protected AnalysisResult getAnalysisResult() { return analysisResult; } protected void clearAnalysisResult() { analysisResult = null; } /** * Makes {@code rules} available in tests, in addition to all the rules available to Blaze at * running time (e.g., java_library). * * Also see {@link AnalysisTestCase#setRulesAndAspectsAvailableInTests(Iterable, Iterable)}. */ protected final void setRulesAvailableInTests(RuleDefinition... rules) throws Exception { setRulesAndAspectsAvailableInTests( ImmutableList.<NativeAspectClass>of(), ImmutableList.copyOf(rules)); } /** * Makes {@code aspects} and {@code rules} available in tests, in addition to * all the rules available to Blaze at running time (e.g., java_library). */ protected final void setRulesAndAspectsAvailableInTests( Iterable<NativeAspectClass> aspects, Iterable<RuleDefinition> rules) throws Exception { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); TestRuleClassProvider.addStandardRules(builder); for (NativeAspectClass aspect : aspects) { builder.addNativeAspectClass(aspect); } for (RuleDefinition rule : rules) { builder.addRuleDefinition(rule); } useRuleClassProvider(builder.build()); update(); } /** * Makes custom configuration fragments available in tests. */ protected final void setConfigFragmentsAvailableInTests( ConfigurationFragmentFactory... factories) throws Exception { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); TestRuleClassProvider.addStandardRules(builder); for (ConfigurationFragmentFactory factory : factories) { builder.addConfigurationFragment(factory); } useRuleClassProvider(builder.build()); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.augment.PsiAugmentProvider; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.infos.MethodCandidateInfo; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiTypesUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.Nullable; import java.util.List; public class PsiDiamondTypeUtil { private static final Logger LOG = Logger.getInstance(PsiDiamondTypeUtil.class); private PsiDiamondTypeUtil() { } public static boolean canCollapseToDiamond(final PsiNewExpression expression, final PsiNewExpression context, @Nullable final PsiType expectedType) { return canCollapseToDiamond(expression, context, expectedType, false); } public static boolean canChangeContextForDiamond(final PsiNewExpression expression, final PsiType expectedType) { final PsiNewExpression copy = (PsiNewExpression)expression.copy(); return canCollapseToDiamond(copy, copy, expectedType, true); } private static boolean canCollapseToDiamond(final PsiNewExpression expression, final PsiNewExpression context, @Nullable final PsiType expectedType, boolean skipDiamonds) { if (PsiUtil.getLanguageLevel(context).isAtLeast(LanguageLevel.JDK_1_7)) { final PsiJavaCodeReferenceElement classReference = expression.getClassOrAnonymousClassReference(); if (classReference != null) { final PsiReferenceParameterList parameterList = classReference.getParameterList(); if (parameterList != null) { final PsiTypeElement[] typeElements = parameterList.getTypeParameterElements(); if (typeElements.length > 0) { if (!skipDiamonds && typeElements.length == 1 && typeElements[0].getType() instanceof PsiDiamondType) return false; final PsiDiamondTypeImpl.DiamondInferenceResult inferenceResult = PsiDiamondTypeImpl.resolveInferredTypes(expression, context); if (inferenceResult.getErrorMessage() == null) { final List<PsiType> types = inferenceResult.getInferredTypes(); PsiType[] typeArguments = null; if (expectedType instanceof PsiClassType) { typeArguments = ((PsiClassType)expectedType).getParameters(); } if (typeArguments == null) { typeArguments = parameterList.getTypeArguments(); } if (types.size() == typeArguments.length) { final PsiMethod method = expression.resolveMethod(); final PsiElement resolve = classReference.resolve(); if (resolve instanceof PsiClass) { final PsiTypeParameter[] typeParameters = ((PsiClass)resolve).getTypeParameters(); return areTypeArgumentsRedundant(typeArguments, context, true, method, typeParameters); } } } } } } } return false; } public static PsiElement replaceExplicitWithDiamond(PsiElement psiElement) { if (psiElement instanceof PsiReferenceParameterList) { final PsiNewExpression expression = (PsiNewExpression)JavaPsiFacade.getElementFactory(psiElement.getProject()).createExpressionFromText("new a<>()", psiElement); final PsiJavaCodeReferenceElement classReference = expression.getClassReference(); LOG.assertTrue(classReference != null); final PsiReferenceParameterList parameterList = classReference.getParameterList(); LOG.assertTrue(parameterList != null); return psiElement.replace(parameterList); } return psiElement; } public static PsiElement replaceDiamondWithExplicitTypes(PsiElement element) { final PsiElement parent = element.getParent(); if (!(parent instanceof PsiJavaCodeReferenceElement)) { return parent; } final PsiJavaCodeReferenceElement javaCodeReferenceElement = (PsiJavaCodeReferenceElement) parent; PsiReferenceParameterList parameterList = javaCodeReferenceElement.getParameterList(); if (parameterList == null) return javaCodeReferenceElement; final StringBuilder text = new StringBuilder(); text.append(javaCodeReferenceElement.getQualifiedName()); text.append('<'); final PsiNewExpression newExpression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class); final PsiDiamondType.DiamondInferenceResult result = PsiDiamondTypeImpl.resolveInferredTypesNoCheck(newExpression, newExpression); text.append(StringUtil.join(result.getInferredTypes(), psiType -> psiType.getCanonicalText(), ",")); text.append('>'); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(element.getProject()); final PsiJavaCodeReferenceElement newReference = elementFactory.createReferenceFromText(text.toString(), element); PsiReferenceParameterList newReferenceParameterList = newReference.getParameterList(); LOG.assertTrue(newReferenceParameterList != null); CodeStyleManager.getInstance(javaCodeReferenceElement.getProject()).reformat(parameterList.replace(newReferenceParameterList)); return javaCodeReferenceElement; } public static PsiExpression expandTopLevelDiamondsInside(PsiExpression expr) { if (expr instanceof PsiNewExpression) { final PsiJavaCodeReferenceElement classReference = ((PsiNewExpression)expr).getClassReference(); if (classReference != null) { final PsiReferenceParameterList parameterList = classReference.getParameterList(); if (parameterList != null) { final PsiTypeElement[] typeParameterElements = parameterList.getTypeParameterElements(); if (typeParameterElements.length == 1 && typeParameterElements[0].getType() instanceof PsiDiamondType) { return (PsiExpression)replaceDiamondWithExplicitTypes(parameterList).getParent(); } } } } return expr; } public static String getCollapsedType(PsiType type, PsiElement context) { String typeText = type.getCanonicalText(); if (PsiUtil.isLanguageLevel7OrHigher(context)) { final int idx = typeText.indexOf('<'); if (idx >= 0) { return typeText.substring(0, idx) + "<>"; } } return typeText; } private static boolean isAugmented(PsiExpression expression) { PsiElement gParent = PsiUtil.skipParenthesizedExprUp(expression.getParent()); PsiTypeElement typeElement = null; if (gParent instanceof PsiVariable) { typeElement = ((PsiVariable)gParent).getTypeElement(); } else if (gParent instanceof PsiReturnStatement) { PsiElement method = PsiTreeUtil.getParentOfType(gParent, PsiMethod.class, PsiLambdaExpression.class); typeElement = method instanceof PsiMethod ? ((PsiMethod)method).getReturnTypeElement() : null; } return typeElement != null && PsiAugmentProvider.getInferredType(typeElement) != null; } public static boolean areTypeArgumentsRedundant(PsiType[] typeArguments, PsiExpression context, boolean constructorRef, @Nullable PsiMethod method, PsiTypeParameter[] typeParameters) { try { final PsiElement copy; final PsiType typeByParent = PsiTypesUtil.getExpectedTypeByParent(context); if (typeByParent != null) { if (isAugmented(context)) { return false; } copy = LambdaUtil.copyWithExpectedType(context, typeByParent); } else { final PsiExpressionList argumentList = context instanceof PsiCallExpression ? ((PsiCallExpression)context).getArgumentList() : null; final Object marker = new Object(); PsiTreeUtil.mark(argumentList != null ? argumentList : context, marker); final PsiCall call = LambdaUtil.treeWalkUp(context); if (call != null) { final PsiCall callCopy = LambdaUtil.copyTopLevelCall(call); copy = callCopy != null ? PsiTreeUtil.releaseMark(callCopy, marker) : null; } else { final InjectedLanguageManager injectedLanguageManager = InjectedLanguageManager.getInstance(context.getProject()); if (injectedLanguageManager.getInjectionHost(context) != null) { return false; } final PsiFile containingFile = context.getContainingFile(); final PsiFile fileCopy = (PsiFile)containingFile.copy(); copy = PsiTreeUtil.releaseMark(fileCopy, marker); if (method != null && method.getContainingFile() == containingFile) { final PsiElement startMethodElementInCopy = fileCopy.findElementAt(method.getTextOffset()); method = PsiTreeUtil.getParentOfType(startMethodElementInCopy, PsiMethod.class); if (method == null) { //lombok generated builder return false; } } } } if (context instanceof PsiMethodReferenceExpression) { PsiMethodReferenceExpression methodRefCopy = PsiTreeUtil.getParentOfType(copy, PsiMethodReferenceExpression.class, false); if (methodRefCopy != null && !isInferenceEquivalent(typeArguments, typeParameters, method, methodRefCopy)) { return false; } return true; } final PsiCallExpression exprCopy = PsiTreeUtil.getParentOfType(copy, PsiCallExpression.class, false); if (exprCopy != null) { final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(exprCopy.getProject()).getElementFactory(); if (constructorRef) { if (!(exprCopy instanceof PsiNewExpression) || !isInferenceEquivalent(typeArguments, elementFactory, (PsiNewExpression)exprCopy)) { return false; } } else { LOG.assertTrue(method != null); if (!isInferenceEquivalent(typeArguments, elementFactory, exprCopy, method, typeParameters)) { return false; } } } } catch (IncorrectOperationException e) { LOG.info(e); return false; } return true; } private static boolean isInferenceEquivalent(PsiType[] typeArguments, PsiTypeParameter[] typeParameters, PsiMethod method, PsiMethodReferenceExpression methodRefCopy) { final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(methodRefCopy.getProject()).getElementFactory(); PsiTypeElement qualifierType = methodRefCopy.getQualifierType(); if (qualifierType != null) { qualifierType.replace(elementFactory.createTypeElement(((PsiClassType)qualifierType.getType()).rawType())); } else { PsiReferenceParameterList parameterList = methodRefCopy.getParameterList(); if (parameterList != null) { parameterList.delete(); } } JavaResolveResult result = methodRefCopy.advancedResolve(false); if (method != null && result.getElement() != method) return false; final PsiSubstitutor psiSubstitutor = result.getSubstitutor(); for (int i = 0; i < typeParameters.length; i++) { PsiTypeParameter typeParameter = typeParameters[i]; final PsiType inferredType = psiSubstitutor.getSubstitutionMap().get(typeParameter); if (!typeArguments[i].equals(inferredType)) { return false; } } return checkParentApplicability(methodRefCopy); } private static boolean isInferenceEquivalent(PsiType[] typeArguments, PsiElementFactory elementFactory, PsiCallExpression exprCopy, PsiMethod method, PsiTypeParameter[] typeParameters) throws IncorrectOperationException { PsiReferenceParameterList list = ((PsiCallExpression)elementFactory.createExpressionFromText("foo()", null)).getTypeArgumentList(); exprCopy.getTypeArgumentList().replace(list); final JavaResolveResult copyResult = exprCopy.resolveMethodGenerics(); if (method != copyResult.getElement()) return false; final PsiSubstitutor psiSubstitutor = copyResult.getSubstitutor(); for (int i = 0, length = typeParameters.length; i < length; i++) { PsiTypeParameter typeParameter = typeParameters[i]; final PsiType inferredType = psiSubstitutor.getSubstitutionMap().get(typeParameter); if (!typeArguments[i].equals(inferredType)) { return false; } if (PsiUtil.resolveClassInType(method.getReturnType()) == typeParameter && PsiPrimitiveType.getUnboxedType(inferredType) != null) { return false; } } return checkParentApplicability(exprCopy); } private static boolean isInferenceEquivalent(PsiType[] typeArguments, PsiElementFactory elementFactory, PsiNewExpression exprCopy) throws IncorrectOperationException { final PsiJavaCodeReferenceElement collapsedClassReference = ((PsiNewExpression)elementFactory.createExpressionFromText("new A<>()", null)).getClassOrAnonymousClassReference(); LOG.assertTrue(collapsedClassReference != null); final PsiReferenceParameterList diamondParameterList = collapsedClassReference.getParameterList(); LOG.assertTrue(diamondParameterList != null); final PsiJavaCodeReferenceElement classReference = exprCopy.getClassOrAnonymousClassReference(); LOG.assertTrue(classReference != null); final PsiReferenceParameterList parameterList = classReference.getParameterList(); LOG.assertTrue(parameterList != null); parameterList.replace(diamondParameterList); final PsiType[] inferredArgs = classReference.getParameterList().getTypeArguments(); if (typeArguments.length != inferredArgs.length) { return false; } for (int i = 0; i < typeArguments.length; i++) { PsiType typeArgument = typeArguments[i]; if (inferredArgs[i] instanceof PsiWildcardType) { final PsiWildcardType wildcardType = (PsiWildcardType)inferredArgs[i]; final PsiType bound = wildcardType.getBound(); if (bound != null) { if (wildcardType.isExtends()) { if (bound.isAssignableFrom(typeArgument)) continue; } else { if (typeArgument.isAssignableFrom(bound)) continue; } } } if (!typeArgument.equals(inferredArgs[i])) { return false; } } return checkParentApplicability(exprCopy); } private static boolean checkParentApplicability(PsiExpression exprCopy) { while (exprCopy != null){ JavaResolveResult resolveResult = exprCopy instanceof PsiCallExpression ? PsiDiamondType.getDiamondsAwareResolveResult((PsiCall)exprCopy) : null; if (resolveResult instanceof MethodCandidateInfo && !((MethodCandidateInfo)resolveResult).isApplicable()) { return false; } exprCopy = PsiTreeUtil.getParentOfType(exprCopy, PsiCallExpression.class, true); } return true; } /** * Removes redundant type arguments which appear in any descendants of the supplied element. * * @param element element to start the replacement from */ public static void removeRedundantTypeArguments(PsiElement element) { for(PsiNewExpression newExpression : PsiTreeUtil.collectElementsOfType(element, PsiNewExpression.class)) { PsiJavaCodeReferenceElement classReference = newExpression.getClassOrAnonymousClassReference(); if(classReference != null && canCollapseToDiamond(newExpression, newExpression, null)) { replaceExplicitWithDiamond(classReference.getParameterList()); } } PsiElementFactory factory = JavaPsiFacade.getInstance(element.getProject()).getElementFactory(); for(PsiMethodCallExpression call : PsiTreeUtil.collectElementsOfType(element, PsiMethodCallExpression.class)) { PsiType[] arguments = call.getTypeArguments(); if (arguments.length == 0) continue; PsiMethod method = call.resolveMethod(); if(method != null) { PsiTypeParameter[] parameters = method.getTypeParameters(); if(arguments.length == parameters.length && areTypeArgumentsRedundant(arguments, call, false, method, parameters)) { PsiMethodCallExpression expr = (PsiMethodCallExpression)factory .createExpressionFromText("foo()", null); call.getTypeArgumentList().replace(expr.getTypeArgumentList()); } } } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.fileEditor.impl; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.actionSystem.PlatformDataKeys; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.fileEditor.*; import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx; import com.intellij.openapi.fileEditor.ex.FileEditorProviderManager; import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.NullUtils; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.FocusWatcher; import com.intellij.ui.JBColor; import com.intellij.ui.PrevNextActionsDescriptor; import com.intellij.ui.SideBorder; import com.intellij.ui.TabbedPaneWrapper; import com.intellij.ui.tabs.UiDecorator; import com.intellij.util.ArrayUtil; import com.intellij.util.SmartList; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.util.*; import java.util.List; /** * This class hides internal structure of UI component which represent * set of opened editors. For example, one myEditor is represented by its * component, more then one myEditor is wrapped into tabbed pane. * * @author Vladimir Kondratyev */ public abstract class EditorComposite implements Disposable { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.EditorComposite"); /** * File for which composite is created */ @NotNull private final VirtualFile myFile; /** * Whether the composite is pinned or not */ private boolean myPinned; /** * Editors which are opened in the composite */ protected FileEditor[] myEditors; /** * This is initial timestamp of the file. It uses to implement * "close non modified editors first" feature. */ private final long myInitialFileTimeStamp; protected TabbedPaneWrapper myTabbedPaneWrapper; private final MyComponent myComponent; private final FocusWatcher myFocusWatcher; /** * Currently selected myEditor */ private FileEditor mySelectedEditor; private final FileEditorManagerEx myFileEditorManager; private final Map<FileEditor, JComponent> myTopComponents = new HashMap<FileEditor, JComponent>(); private final Map<FileEditor, JComponent> myBottomComponents = new HashMap<FileEditor, JComponent>(); /** * @param file <code>file</code> for which composite is being constructed * * @param editors <code>edittors</code> that should be placed into the composite * * @exception java.lang.IllegalArgumentException if <code>editors</code> * is <code>null</code> or <code>providers</code> is <code>null</code> or <code>myEditor</code> arrays is empty */ EditorComposite(@NotNull final VirtualFile file, @NotNull final FileEditor[] editors, @NotNull final FileEditorManagerEx fileEditorManager) { myFile = file; myEditors = editors; if (NullUtils.hasNull(editors)) throw new IllegalArgumentException("Must not pass null editors in " + Arrays.asList(editors)); myFileEditorManager = fileEditorManager; myInitialFileTimeStamp = myFile.getTimeStamp(); Disposer.register(fileEditorManager.getProject(), this); if(editors.length > 1){ myTabbedPaneWrapper = createTabbedPaneWrapper(editors); JComponent component = myTabbedPaneWrapper.getComponent(); myComponent = new MyComponent(component, component); } else if(editors.length==1){ myTabbedPaneWrapper=null; FileEditor editor = editors[0]; myComponent = new MyComponent(createEditorComponent(editor), editor.getPreferredFocusedComponent()); } else{ throw new IllegalArgumentException("editors array cannot be empty"); } mySelectedEditor = editors[0]; myFocusWatcher = new FocusWatcher(); myFocusWatcher.install(myComponent); myFileEditorManager.addFileEditorManagerListener(new FileEditorManagerAdapter() { @Override public void selectionChanged(@NotNull final FileEditorManagerEvent event) { final VirtualFile oldFile = event.getOldFile(); final VirtualFile newFile = event.getNewFile(); if (Comparing.equal(oldFile, newFile) && Comparing.equal(getFile(), newFile)) { Runnable runnable = new Runnable() { @Override public void run() { final FileEditor oldEditor = event.getOldEditor(); if (oldEditor != null) oldEditor.deselectNotify(); final FileEditor newEditor = event.getNewEditor(); if (newEditor != null) newEditor.selectNotify(); ((FileEditorProviderManagerImpl)FileEditorProviderManager.getInstance()).providerSelected(EditorComposite.this); ((IdeDocumentHistoryImpl)IdeDocumentHistory.getInstance(myFileEditorManager.getProject())).onSelectionChanged(); } }; if (ApplicationManager.getApplication().isDispatchThread()) { CommandProcessor.getInstance().executeCommand(myFileEditorManager.getProject(), runnable, "Switch Active Editor", null); } else { runnable.run(); // not invoked by user } } } }, this); } @NotNull private TabbedPaneWrapper.AsJBTabs createTabbedPaneWrapper(FileEditor[] editors) { PrevNextActionsDescriptor descriptor = new PrevNextActionsDescriptor(IdeActions.ACTION_NEXT_EDITOR_TAB, IdeActions.ACTION_PREVIOUS_EDITOR_TAB); final TabbedPaneWrapper.AsJBTabs wrapper = new TabbedPaneWrapper.AsJBTabs(myFileEditorManager.getProject(), SwingConstants.BOTTOM, descriptor, this); wrapper.getTabs().getPresentation().setPaintBorder(0, 0, 0, 0).setTabSidePaintBorder(1).setGhostsAlwaysVisible(true).setUiDecorator(new UiDecorator() { @Override @NotNull public UiDecoration getDecoration() { return new UiDecoration(null, new Insets(0, 8, 0, 8)); } }); wrapper.getTabs().getComponent().setBorder(new EmptyBorder(0, 0, 1, 0)); for (FileEditor editor : editors) { JComponent component = myEditors.length == 1 && editor == myEditors[0] ? (JComponent)myComponent.getComponent(0) : createEditorComponent(editor); wrapper.addTab(editor.getName(), component); } wrapper.addChangeListener(new MyChangeListener()); return wrapper; } private JComponent createEditorComponent(final FileEditor editor) { JPanel component = new JPanel(new BorderLayout()); JComponent comp = editor.getComponent(); if (!FileEditorManagerImpl.isDumbAware(editor)) { comp = DumbService.getInstance(myFileEditorManager.getProject()).wrapGently(comp, editor); } component.add(comp, BorderLayout.CENTER); JPanel topPanel = new TopBottomPanel(); myTopComponents.put(editor, topPanel); component.add(topPanel, BorderLayout.NORTH); final JPanel bottomPanel = new TopBottomPanel(); myBottomComponents.put(editor, bottomPanel); component.add(bottomPanel, BorderLayout.SOUTH); return component; } /** * @return whether myEditor composite is pinned */ public boolean isPinned(){ return myPinned; } /** * Sets new "pinned" state */ void setPinned(final boolean pinned){ myPinned = pinned; } private void fireSelectedEditorChanged(final FileEditor oldSelectedEditor, final FileEditor newSelectedEditor){ if ((!EventQueue.isDispatchThread() || !myFileEditorManager.isInsideChange()) && !Comparing.equal(oldSelectedEditor, newSelectedEditor)) { myFileEditorManager.notifyPublisher(new Runnable() { @Override public void run() { final FileEditorManagerEvent event = new FileEditorManagerEvent(myFileEditorManager, myFile, oldSelectedEditor, myFile, newSelectedEditor); final FileEditorManagerListener publisher = myFileEditorManager.getProject().getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER); publisher.selectionChanged(event); } }); final JComponent component = newSelectedEditor.getComponent(); final EditorWindowHolder holder = UIUtil.getParentOfType(EditorWindowHolder.class, component); if (holder != null) { ((FileEditorManagerImpl)myFileEditorManager).addSelectionRecord(myFile, holder.getEditorWindow()); } } } /** * @return preferred focused component inside myEditor composite. Composite uses FocusWatcher to * track focus movement inside the myEditor. */ @Nullable public JComponent getPreferredFocusedComponent(){ if (mySelectedEditor == null) return null; final Component component = myFocusWatcher.getFocusedComponent(); if(!(component instanceof JComponent) || !component.isShowing() || !component.isEnabled() || !component.isFocusable()){ return getSelectedEditor().getPreferredFocusedComponent(); } return (JComponent)component; } /** * @return file for which composite was created. */ @NotNull public VirtualFile getFile() { return myFile; } public FileEditorManager getFileEditorManager() { return myFileEditorManager; } /** * @return initial time stamp of the file (on moment of creation of * the composite) */ public long getInitialFileTimeStamp() { return myInitialFileTimeStamp; } /** * @return editors which are opened in the composite. <b>Do not modify * this array</b>. */ @NotNull public FileEditor[] getEditors() { return myEditors; } @NotNull public List<JComponent> getTopComponents(@NotNull FileEditor editor) { return getTopBottomComponents(editor, true); } @NotNull public List<JComponent> getBottomComponents(@NotNull FileEditor editor) { return getTopBottomComponents(editor, false); } @NotNull private List<JComponent> getTopBottomComponents(@NotNull FileEditor editor, boolean top) { SmartList<JComponent> result = new SmartList<JComponent>(); JComponent container = top ? myTopComponents.get(editor) : myBottomComponents.get(editor); for (Component each : container.getComponents()) { if (each instanceof TopBottomComponentWrapper) { result.add(((TopBottomComponentWrapper)each).getWrappee()); } } return Collections.unmodifiableList(result); } public void addTopComponent(FileEditor editor, JComponent component) { manageTopOrBottomComponent(editor, component, true, false); } public void removeTopComponent(FileEditor editor, JComponent component) { manageTopOrBottomComponent(editor, component, true, true); } public void addBottomComponent(FileEditor editor, JComponent component) { manageTopOrBottomComponent(editor, component, false, false); } public void removeBottomComponent(FileEditor editor, JComponent component) { manageTopOrBottomComponent(editor, component, false, true); } private void manageTopOrBottomComponent(FileEditor editor, JComponent component, boolean top, boolean remove) { final JComponent container = top ? myTopComponents.get(editor) : myBottomComponents.get(editor); assert container != null; if (remove) { container.remove(component.getParent()); } else { container.add(new TopBottomComponentWrapper(component, top)); } container.revalidate(); } /** * @return currently selected myEditor. */ @NotNull FileEditor getSelectedEditor() { return getSelectedEditorWithProvider().getFirst (); } public boolean isDisposed() { return myTabbedPaneWrapper != null && myTabbedPaneWrapper.isDisposed(); } /** * @return currently selected myEditor with its provider. */ @NotNull public abstract Pair<FileEditor, FileEditorProvider> getSelectedEditorWithProvider(); void setSelectedEditor(final int index){ if(myEditors.length == 1){ // nothing to do LOG.assertTrue(myTabbedPaneWrapper == null); } else{ LOG.assertTrue(myTabbedPaneWrapper != null); myTabbedPaneWrapper.setSelectedIndex(index); } } /** * @return component which represents set of file editors in the UI */ public JComponent getComponent() { return myComponent; } /** * @return <code>true</code> if the composite contains at least one * modified myEditor */ public boolean isModified(){ for(int i=myEditors.length-1;i>=0;i--){ if(myEditors[i].isModified()){ return true; } } return false; } /** * Handles changes of selected myEditor */ private final class MyChangeListener implements ChangeListener{ @Override public void stateChanged(ChangeEvent e) { FileEditor oldSelectedEditor = mySelectedEditor; LOG.assertTrue(oldSelectedEditor != null); int selectedIndex = myTabbedPaneWrapper.getSelectedIndex(); LOG.assertTrue(selectedIndex != -1); mySelectedEditor = myEditors[selectedIndex]; fireSelectedEditorChanged(oldSelectedEditor, mySelectedEditor); } } private class MyComponent extends JPanel implements DataProvider{ @Nullable private JComponent myFocusComponent; public MyComponent(@NotNull JComponent realComponent, @Nullable JComponent focusComponent){ super(new BorderLayout()); myFocusComponent = focusComponent; add(realComponent, BorderLayout.CENTER); } void setComponent(JComponent newComponent) { add(newComponent, BorderLayout.CENTER); myFocusComponent = newComponent; } @Override public boolean requestFocusInWindow() { return myFocusComponent == null ? false : myFocusComponent.requestFocusInWindow(); } @Override public void requestFocus() { if (myFocusComponent != null) { myFocusComponent.requestFocus(); } } @Override public boolean requestDefaultFocus() { return myFocusComponent == null ? false : myFocusComponent.requestDefaultFocus(); } @Override public final Object getData(String dataId){ if (PlatformDataKeys.FILE_EDITOR.is(dataId)) { return getSelectedEditor(); } else if(CommonDataKeys.VIRTUAL_FILE.is(dataId)){ return myFile.isValid() ? myFile : null; } else if(CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)){ return myFile.isValid() ? new VirtualFile[] {myFile} : null; } else{ JComponent component = getPreferredFocusedComponent(); if(component instanceof DataProvider && component != this){ return ((DataProvider)component).getData(dataId); } else{ return null; } } } } @Override public void dispose() { for (FileEditor editor : myEditors) { if (!Disposer.isDisposed(editor)) { Disposer.dispose(editor); } } myFocusWatcher.deinstall(myFocusWatcher.getTopComponent()); } void addEditor(@NotNull FileEditor editor) { ApplicationManager.getApplication().assertIsDispatchThread(); FileEditor[] editors = ArrayUtil.append(myEditors, editor); if (myTabbedPaneWrapper == null) { myTabbedPaneWrapper = createTabbedPaneWrapper(editors); myComponent.setComponent(myTabbedPaneWrapper.getComponent()); } else { JComponent component = createEditorComponent(editor); myTabbedPaneWrapper.addTab(editor.getName(), component); } myFocusWatcher.deinstall(myFocusWatcher.getTopComponent()); myFocusWatcher.install(myComponent); myEditors = editors; } private static class TopBottomPanel extends JPanel { private TopBottomPanel() { setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); } @Override public Color getBackground() { Color color = EditorColorsManager.getInstance().getGlobalScheme().getColor(EditorColors.GUTTER_BACKGROUND); return color == null ? EditorColors.GUTTER_BACKGROUND.getDefaultColor() : color; } } private static class TopBottomComponentWrapper extends JPanel { private final JComponent myWrappee; public TopBottomComponentWrapper(JComponent component, boolean top) { super(new BorderLayout()); myWrappee = component; setOpaque(false); setBorder(new SideBorder(null, top ? SideBorder.BOTTOM : SideBorder.TOP) { @Override public Color getLineColor() { Color result = EditorColorsManager.getInstance().getGlobalScheme().getColor(EditorColors.TEARLINE_COLOR); return result == null ? JBColor.BLACK : result; } }); add(component); } @NotNull public JComponent getWrappee() { return myWrappee; } } }
/* Copyright 2006 by Sean Luke Licensed under the Academic Free License version 3.0 See the file "LICENSE" for more information */ package ec; import ec.util.Parameter; import java.io.*; import ec.util.*; /* * Individual.java * Created: Tue Aug 10 19:58:13 1999 */ /** * An Individual is an item in the EC population stew which is evaluated * and assigned a fitness which determines its likelihood of selection. * Individuals are created most commonly by the newIndividual(...) method * of the ec.Species class. * * <P>In general Individuals are immutable. That is, once they are created * their genetic material should not be modified. This protocol helps insure that they are * safe to read under multithreaded conditions. You can violate this protocol, * but try to do so when you know you have only have a single thread. * * <p>In addition to serialization for checkpointing, Individuals may read and write themselves to streams in three ways. * * <ul> * <li><b>writeIndividual(...,DataOutput)/readIndividual(...,DataInput)</b>&nbsp;&nbsp;&nbsp;This method * transmits or receives an individual in binary. It is the most efficient approach to sending * individuals over networks, etc. These methods write the evaluated flag and the fitness, then * call <b>readGenotype/writeGenotype</b>, which you must implement to write those parts of your * Individual special to your functions-- the default versions of readGenotype/writeGenotype throw errors. * You don't need to implement them if you don't plan on using read/writeIndividual. * * <li><b>printIndividual(...,PrintWriter)/readIndividual(...,LineNumberReader)</b>&nbsp;&nbsp;&nbsp;This * approach transmits or receives an indivdual in text encoded such that the individual is largely readable * by humans but can be read back in 100% by ECJ as well. To do this, these methods will encode numbers * using the <tt>ec.util.Code</tt> class. These methods are mostly used to write out populations to * files for inspection, slight modification, then reading back in later on. <b>readIndividual</b>reads * in the fitness and the evaluation flag, then calls <b>parseGenotype</b> to read in the remaining individual. * You are responsible for implementing parseGenotype: the Code class is there to help you. * <b>printIndividual</b> writes out the fitness and evaluation flag, then calls <b>genotypeToString</b> * and printlns the resultant string. You are responsible for implementing the genotypeToString method in such * a way that parseGenotype can read back in the individual println'd with genotypeToString. The default form * of genotypeToString simply calls <b>toString</b>, which you may override instead if you like. The default * form of <b>parseGenotype</b> throws an error. You are not required to implement these methods, but without * them you will not be able to write individuals to files in a simultaneously computer- and human-readable fashion. * * <li><b>printIndividualForHumans(...,PrintWriter)</b>&nbsp;&nbsp;&nbsp;This * approach prints an individual in a fashion intended for human consumption only. * <b>printIndividualForHumans</b> writes out the fitness and evaluation flag, then calls <b>genotypeToStringForHumans<b> * and printlns the resultant string. You are responsible for implementing the genotypeToStringForHumans method. * The default form of genotypeToStringForHumans simply calls <b>toString</b>, which you may override instead if you like * (though note that genotypeToString's default also calls toString). You should handle one of these methods properly * to ensure individuals can be printed by ECJ. * </ul> * * <p>Since individuals should be largely immutable, why is there a <b>readIndividual</b> method? * after all this method doesn't create a <i>new</i> individual -- it just erases the existing one. This is * largely historical; but the method is used underneath by the various <b>newIndividual</b> methods in Species, * which <i>do</i> create new individuals read from files. If you're trying to create a brand new individual * read from a file, look in Species. * * <p> Individuals are Comparable: if you sort Individuals, the FITTER individuals will appear EARLIER in a list or array. * * * @author Sean Luke * @version 1.0 */ public abstract class Individual implements Prototype, Comparable { /** A reasonable parameter base element for individuals*/ public static final String P_INDIVIDUAL = "individual"; /** A string appropriate to put in front of whether or not the individual has been printed. */ public static final String EVALUATED_PREAMBLE = "Evaluated: "; /** The fitness of the Individual. */ public Fitness fitness; /** The species of the Individual.*/ public Species species; /** Has the individual been evaluated and its fitness determined yet? */ public boolean evaluated; public Object clone() { try { Individual myobj = (Individual) (super.clone()); if (myobj.fitness != null) myobj.fitness = (Fitness)(fitness.clone()); return myobj; } catch (CloneNotSupportedException e) { throw new InternalError(); } // never happens } /** Returns the "size" of the individual. This is used for things like parsimony pressure. The default form of this method returns 0 -- if you care about parsimony pressure, you'll need to override the default to provide a more descriptive measure of size. */ public long size() { return 0; } /** Returns true if I am genetically "equal" to ind. This should mostly be interpreted as saying that we are of the same class and that we hold the same data. It should NOT be a pointer comparison. */ public abstract boolean equals(Object ind); /** Returns a hashcode for the individual, such that individuals which are equals(...) each other always return the same hash code. */ public abstract int hashCode(); /** Overridden here because hashCode() is not expected to return the pointer to the object. toString() normally uses hashCode() to print a unique identifier, and that's no longer the case. You're welcome to override this anyway you like to make the individual print out in a more lucid fashion. */ public String toString() { return "" + this.getClass().getName() + "@" + System.identityHashCode(this) + "{" + hashCode() + "}"; } /** Print to a string the genotype of the Individual in a fashion readable by humans, and not intended to be parsed in again. The fitness and evaluated flag should not be included. The default form simply calls toString(), but you'll probably want to override this to something else. */ public String genotypeToStringForHumans() { return toString(); } /** Print to a string the genotype of the Individual in a fashion intended to be parsed in again via parseGenotype(...). The fitness and evaluated flag should not be included. The default form simply calls toString(), which is almost certainly wrong, and you'll probably want to override this to something else. */ public String genotypeToString() { return toString(); } /** This should be used to set up only those things which you share in common with all other individuals in your species; individual-specific items which make you <i>you</i> should be filled in by Species.newIndividual(...), and modified by breeders. @see Prototype#setup(EvolutionState,Parameter) */ public void setup(final EvolutionState state, final Parameter base) { // does nothing by default. // So where is the species set? The Species does so after it // loads me but before it calls setup on me. } /** Should print the individual out in a pleasing way for humans, with a verbosity of Output.V_NO_GENERAL. */ public void printIndividualForHumans(final EvolutionState state, final int log) { state.output.println(EVALUATED_PREAMBLE + Code.encode(evaluated), log); fitness.printFitnessForHumans(state,log); state.output.println( genotypeToStringForHumans(), log ); } /** Should print the individual out in a pleasing way for humans, including its fitness, using state.output.println(...,verbosity,log) You can get fitness to print itself at the appropriate time by calling fitness.printFitnessForHumans(state,log,verbosity); <p>The default form of this method simply prints out whether or not the individual has been evaluated, its fitness, and then calls Individual.genotypeToStringForHumans(). Feel free to override this to produce more sophisticated behavior, though it is rare to need to -- instead you could just override genotypeToStringForHumans(). @deprecated Verbosity no longer used. */ public final void printIndividualForHumans(final EvolutionState state, final int log, final int verbosity) { printIndividualForHumans(state, log); } /** Should print the individual in a way that can be read by computer, including its fitness, with a verbosity of Output.V_NO_GENERAL. */ public void printIndividual(final EvolutionState state, final int log) { state.output.println(EVALUATED_PREAMBLE + Code.encode(evaluated), log); fitness.printFitness(state,log); state.output.println( genotypeToString(), log ); } /** Should print the individual in a way that can be read by computer, including its fitness, using state.output.println(...,verbosity,log) You can get fitness to print itself at the appropriate time by calling fitness.printFitness(state,log,verbosity); <p>The default form of this method simply prints out whether or not the individual has been evaluated, its fitness, and then calls Individual.genotypeToString(). Feel free to override this to produce more sophisticated behavior, though it is rare to need to -- instead you could just override genotypeToString(). @deprecated Verbosity no longer used. */ public final void printIndividual(final EvolutionState state, final int log, final int verbosity) { printIndividual( state, log, Output.V_NO_GENERAL); } /** Should print the individual in a way that can be read by computer, including its fitness. You can get fitness to print itself at the appropriate time by calling fitness.printFitness(state,log,writer); Usually you should try to use printIndividual(state,log,verbosity) instead -- use this method only if you can't print through the Output facility for some reason. <p>The default form of this method simply prints out whether or not the individual has been evaluated, its fitness, and then calls Individual.genotypeToString(). Feel free to override this to produce more sophisticated behavior, though it is rare to need to -- instead you could just override genotypeToString(). */ public void printIndividual(final EvolutionState state, final PrintWriter writer) { writer.println(EVALUATED_PREAMBLE + Code.encode(evaluated)); fitness.printFitness(state,writer); writer.println( genotypeToString() ); } /** Reads in the individual from a form printed by printIndividual(), erasing the previous information stored in this Individual. If you are trying to <i>create</i> an Individual from information read in from a stream or DataInput, see the various newIndividual() methods in Species. The default form of this method simply reads in evaluation information, then fitness information, and then calls parseGenotype() (which you should implement). The Species is not changed or attached, so you may need to do that elsewhere. Feel free to override this method to produce more sophisticated behavior, though it is rare to need to -- instead you could just override parseGenotype(). */ public void readIndividual(final EvolutionState state, final LineNumberReader reader) throws IOException { evaluated = Code.readBooleanWithPreamble(EVALUATED_PREAMBLE, state, reader); // Next, what's my fitness? fitness.readFitness(state, reader); // next, read me in parseGenotype(state, reader); } /** This method is used only by the default version of readIndividual(state,reader), and it is intended to be overridden to parse in that part of the individual that was outputted in the genotypeToString() method. The default version of this method exits the program with an "unimplemented" error. You'll want to override this method, or to override readIndividual(...) to not use this method. */ protected void parseGenotype(final EvolutionState state, final LineNumberReader reader) throws IOException { state.output.fatal("parseGenotype(EvolutionState, LineNumberReader) not implemented in " + this.getClass()); } /** Writes the binary form of an individual out to a DataOutput. This is not for serialization: the object should only write out the data relevant to the object sufficient to rebuild it from a DataInput. The Species will be reattached later, and you should not write it. The default version of this method writes the evaluated and fitness information, then calls writeGenotype() to write the genotype information. Feel free to override this method to produce more sophisticated behavior, though it is rare to need to -- instead you could just override writeGenotype(). */ public void writeIndividual(final EvolutionState state, final DataOutput dataOutput) throws IOException { dataOutput.writeBoolean(evaluated); fitness.writeFitness(state,dataOutput); writeGenotype(state,dataOutput); } /** Writes the genotypic information to a DataOutput. Largely called by writeIndividual(), and nothing else. The default simply throws an error. Various subclasses of Individual override this as appropriate. For example, if your custom individual's genotype consists of an array of integers, you might do this: * <pre><tt> * dataOutput.writeInt(integers.length); * for(int x=0;x<integers.length;x++) * dataOutput.writeInt(integers[x]); * </tt></pre> */ public void writeGenotype(final EvolutionState state, final DataOutput dataOutput) throws IOException { state.output.fatal("writeGenotype(EvolutionState, DataOutput) not implemented in " + this.getClass()); } /** Reads in the genotypic information from a DataInput, erasing the previous genotype of this Individual. Largely called by readIndividual(), and nothing else. If you are trying to <i>create</i> an Individual from information read in from a stream or DataInput, see the various newIndividual() methods in Species. The default simply throws an error. Various subclasses of Individual override this as appropriate. For example, if your custom individual's genotype consists of an array of integers, you might do this: * <pre><tt> * integers = new int[dataInput.readInt()]; * for(int x=0;x<integers.length;x++) * integers[x] = dataInput.readInt(); * </tt></pre> */ public void readGenotype(final EvolutionState state, final DataInput dataInput) throws IOException { state.output.fatal("readGenotype(EvolutionState, DataOutput) not implemented in " + this.getClass()); } /** Reads the binary form of an individual from a DataInput, erasing the previous information stored in this Individual. This is not for serialization: the object should only read in the data written out via printIndividual(state,dataInput). If you are trying to <i>create</i> an Individual from information read in from a stream or DataInput, see the various newIndividual() methods in Species. The default form of this method simply reads in evaluation information, then fitness information, and then calls readGenotype() (which you will need to override -- its default form simply throws an error). The Species is not changed or attached, so you may need to do that elsewhere. Feel free to override this method to produce more sophisticated behavior, though it is rare to need to -- instead you could just override readGenotype(). */ public void readIndividual(final EvolutionState state, final DataInput dataInput) throws IOException { evaluated = dataInput.readBoolean(); fitness.readFitness(state,dataInput); readGenotype(state,dataInput); } /** Returns the metric distance to another individual, if such a thing can be measured. Subclassess of Individual should implement this if it exists for their representation. The default implementation here, which isn't very helpful, returns 0 if the individuals are equal and infinity if they are not. */ public double distanceTo(Individual otherInd) { return (equals(otherInd) ? 0 : Double.POSITIVE_INFINITY); } /** Returns -1 if I am BETTER in some way than the other Individual, 1 if the other Individual is BETTER than me, and 0 if we are equivalent. The default implementation assumes BETTER means FITTER, by simply calling compareTo on the fitnesses themse.ves */ public int compareTo(Object o) { Individual other = (Individual) o; return fitness.compareTo(other.fitness); } /** Replaces myself with the other Individual, while merging our evaluation results together. May destroy the other Individual in the process. By default this procedure calls fitness(merge) to merge the old fitness (backwards) into the new fitness, then entirely overwrites myself with the other Individual (including the merged fitness). <p>What is the purpose of this method? When coevolution is done in combination with distributed evaluation, an Individual may be sent to multiple remote sites to be tested in different trials prior to having a completed fitness assessed. As those trials complete, we need a way to merge them together. By default this method simply merges the trial arrays (using fitness.merge(...)), and determines the "best" context, then copies the other Individual to me. But if you store additional trial results outside fitness---for example, if you keep around the best collaborators from coevolution, say---you may need a way to guarantee that this Individual reflects the most up to date information about recent trials arriving via the other Individual. In this case, override the method and perform merging by hand. */ public void merge(EvolutionState state, Individual other) { // merge the fitnesses backwards: merge the fitness INTO the other fitness other.fitness.merge(state, fitness); // now push the Individual back to us, including the merged fitness try // a ridiculous hack { DataPipe p = new DataPipe(); DataInputStream in = p.input; DataOutputStream out = p.output; other.writeIndividual(state, out); readIndividual(state, in); } catch (IOException e) { e.printStackTrace(); state.output.fatal("Caught impossible IOException in Individual.merge(...)."); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.configuredtargets; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.DefaultInfo; import com.google.devtools.build.lib.analysis.FileProvider; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.RunfilesProvider; import com.google.devtools.build.lib.analysis.TransitiveInfoProvider; import com.google.devtools.build.lib.analysis.VisibilityProvider; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.packages.InfoInterface; import com.google.devtools.build.lib.packages.PackageSpecification.PackageGroupContents; import com.google.devtools.build.lib.packages.Provider; import com.google.devtools.build.lib.skyframe.BuildConfigurationValue; import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.EvalUtils; import com.google.devtools.build.lib.syntax.Mutability; import com.google.devtools.build.lib.syntax.Printer; import com.google.devtools.build.lib.syntax.SkylarkClassObject; import com.google.devtools.build.lib.syntax.SkylarkType; import com.google.devtools.build.lib.syntax.StarlarkSemantics; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import javax.annotation.Nullable; /** * An abstract implementation of ConfiguredTarget in which all properties are assigned trivial * default values. */ public abstract class AbstractConfiguredTarget implements ConfiguredTarget, VisibilityProvider, SkylarkClassObject { private final Label label; private final BuildConfigurationValue.Key configurationKey; private final NestedSet<PackageGroupContents> visibility; // Cached on-demand default provider private final AtomicReference<DefaultInfo> defaultProvider = new AtomicReference<>(); // Accessors for Skylark private static final String DATA_RUNFILES_FIELD = "data_runfiles"; private static final String DEFAULT_RUNFILES_FIELD = "default_runfiles"; // A set containing all field names which may be specially handled (and thus may not be // attributed to normal user-specified providers). private static final ImmutableSet<String> SPECIAL_FIELD_NAMES = ImmutableSet.of( LABEL_FIELD, FILES_FIELD, DEFAULT_RUNFILES_FIELD, DATA_RUNFILES_FIELD, FilesToRunProvider.SKYLARK_NAME, OutputGroupInfo.SKYLARK_NAME, RuleConfiguredTarget.ACTIONS_FIELD_NAME); public AbstractConfiguredTarget(Label label, BuildConfigurationValue.Key configurationKey) { this(label, configurationKey, NestedSetBuilder.emptySet(Order.STABLE_ORDER)); } protected AbstractConfiguredTarget( Label label, BuildConfigurationValue.Key configurationKey, NestedSet<PackageGroupContents> visibility) { this.label = label; this.configurationKey = configurationKey; this.visibility = visibility; } @Override public final NestedSet<PackageGroupContents> getVisibility() { return visibility; } @Override public BuildConfigurationValue.Key getConfigurationKey() { return configurationKey; } @Override public Label getLabel() { return label; } @Override public String toString() { return "ConfiguredTarget(" + getLabel() + ", " + getConfigurationChecksum() + ")"; } @Override public <P extends TransitiveInfoProvider> P getProvider(Class<P> provider) { AnalysisUtils.checkProvider(provider); if (provider.isAssignableFrom(getClass())) { return provider.cast(this); } else { return null; } } @Override public Object getValue(Location loc, StarlarkSemantics semantics, String name) throws EvalException { if (semantics.incompatibleDisableTargetProviderFields() && !SPECIAL_FIELD_NAMES.contains(name)) { throw new EvalException( loc, "Accessing providers via the field syntax on structs is " + "deprecated and will be removed soon. It may be temporarily re-enabled by setting " + "--incompatible_disable_target_provider_fields=false. See " + "https://github.com/bazelbuild/bazel/issues/9014 for details."); } return getValue(name); } @Override public Object getValue(String name) { switch (name) { case LABEL_FIELD: return getLabel(); case RuleConfiguredTarget.ACTIONS_FIELD_NAME: // Depending on subclass, the 'actions' field will either be unsupported or of type // java.util.List, which needs to be converted to SkylarkList before being returned. Object result = get(name); if (result != null) { result = SkylarkType.convertToSkylark(result, (Mutability) null); } return result; default: return get(name); } } @Override public final Object getIndex(Object key, Location loc) throws EvalException { if (!(key instanceof Provider)) { throw new EvalException(loc, String.format( "Type Target only supports indexing by object constructors, got %s instead", EvalUtils.getDataTypeName(key))); } Provider constructor = (Provider) key; Object declaredProvider = get(constructor.getKey()); if (declaredProvider != null) { return declaredProvider; } throw new EvalException( loc, Printer.format( "%r%s doesn't contain declared provider '%s'", this, getRuleClassString().isEmpty() ? "" : " (rule '" + getRuleClassString() + "')", constructor.getPrintableName())); } @Override public boolean containsKey(Object key, Location loc) throws EvalException { if (!(key instanceof Provider)) { throw new EvalException(loc, String.format( "Type Target only supports querying by object constructors, got %s instead", EvalUtils.getDataTypeName(key))); } return get(((Provider) key).getKey()) != null; } @Override public String getErrorMessageForUnknownField(String name) { return null; } @Override public final ImmutableCollection<String> getFieldNames() { ImmutableList.Builder<String> result = ImmutableList.builder(); result.addAll(ImmutableList.of( DATA_RUNFILES_FIELD, DEFAULT_RUNFILES_FIELD, LABEL_FIELD, FILES_FIELD, FilesToRunProvider.SKYLARK_NAME)); if (get(OutputGroupInfo.SKYLARK_CONSTRUCTOR) != null) { result.add(OutputGroupInfo.SKYLARK_NAME); } addExtraSkylarkKeys(result::add); return result.build(); } protected void addExtraSkylarkKeys(Consumer<String> result) { } private DefaultInfo getDefaultProvider() { if (defaultProvider.get() == null) { defaultProvider.compareAndSet( null, DefaultInfo.build( getProvider(RunfilesProvider.class), getProvider(FileProvider.class), getProvider(FilesToRunProvider.class))); } return defaultProvider.get(); } /** Returns a declared provider provided by this target. Only meant to use from Skylark. */ @Nullable @Override public final InfoInterface get(Provider.Key providerKey) { if (providerKey.equals(DefaultInfo.PROVIDER.getKey())) { return getDefaultProvider(); } return rawGetSkylarkProvider(providerKey); } /** Implement in subclasses to get a skylark provider for a given {@code providerKey}. */ @Nullable protected abstract InfoInterface rawGetSkylarkProvider(Provider.Key providerKey); public String getRuleClassString() { return ""; } /** * Returns a value provided by this target. Only meant to use from Skylark. */ @Override public final Object get(String providerKey) { switch (providerKey) { case FILES_FIELD: return getDefaultProvider().getFiles(); case DEFAULT_RUNFILES_FIELD: return getDefaultProvider().getDefaultRunfiles(); case DATA_RUNFILES_FIELD: return getDefaultProvider().getDataRunfiles(); case FilesToRunProvider.SKYLARK_NAME: return getDefaultProvider().getFilesToRun(); case OutputGroupInfo.SKYLARK_NAME: return get(OutputGroupInfo.SKYLARK_CONSTRUCTOR); default: return rawGetSkylarkProvider(providerKey); } } /** Implement in subclasses to get a skylark provider for a given {@code providerKey}. */ protected abstract Object rawGetSkylarkProvider(String providerKey); // All main target classes must override this method to provide more descriptive strings. // Exceptions are currently EnvironmentGroupConfiguredTarget and PackageGroupConfiguredTarget. @Override public void repr(SkylarkPrinter printer) { printer.append("<unknown target " + getLabel() + ">"); } }
package de.fu_berlin.agdb.crepe.algebra.operators.numeric; import de.fu_berlin.agdb.crepe.algebra.Match; import de.fu_berlin.agdb.crepe.algebra.Operator; import de.fu_berlin.agdb.crepe.algebra.OperatorNotSupportedException; import de.fu_berlin.agdb.crepe.data.Event; import de.fu_berlin.agdb.crepe.data.IAttribute; import de.fu_berlin.agdb.crepe.data.IEvent; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Objects; import static de.fu_berlin.agdb.crepe.algebra.operators.numeric.NumericOperationType.*; import static java.util.Objects.requireNonNull; /** * Match that performs numeric operations. * * @author Simon Kalt */ public class NumericOperation extends Match { @Nonnull private final NumericOperationType operation; @Nonnull private final String attribute; @Nonnull private Mode mode; private Operator firstOperator, secondOperator; private Object firstObject, secondObject; private NumericOperation(@Nonnull NumericOperationType operation, @Nonnull String attribute, @Nonnull Mode mode) { this.operation = requireNonNull(operation); this.attribute = requireNonNull(attribute); this.mode = requireNonNull(mode); } /** * Creates a numeric operation of the given type over two operators. * * @param operation Type of the operation * @param firstOperator First operand * @param secondOperator Second operand */ public NumericOperation(@Nonnull NumericOperationType operation, @Nonnull String attribute, @Nonnull Operator firstOperator, @Nonnull Operator secondOperator) { this(operation, attribute, Mode.OPERATORS); this.firstOperator = requireNonNull(firstOperator); this.secondOperator = requireNonNull(secondOperator); // For backwards compatibility: this.setChildren(firstOperator, secondOperator); } /** * Creates a numeric operation of the given type over an object and an operator. * * @param operation Type of the operation * @param attribute attribute whose values the operation is performed on * @param first first operand * @param second second operand */ public NumericOperation(@Nonnull NumericOperationType operation, @Nonnull String attribute, @Nonnull Object first, @Nonnull Operator second) { this(operation, attribute, Mode.OBJ_OP); this.firstObject = requireNonNull(first); this.secondOperator = requireNonNull(second); } /** * Creates a numeric operation of the given type over an operator and an object. * * @param operation Type of the operation * @param attribute attribute whose values the operation is performed on * @param first first operand * @param second second operand */ public NumericOperation(@Nonnull NumericOperationType operation, @Nonnull String attribute, @Nonnull Operator first, @Nonnull Object second) { this(operation, attribute, Mode.OP_OBJ); this.firstOperator = requireNonNull(first); this.secondObject = requireNonNull(second); } /** * Creates a numeric operation of the given type over two objects. * * @param operation Type of the operation * @param attribute attribute whose values the operation is performed on * @param first first operand * @param second second operand */ public NumericOperation(@Nonnull NumericOperationType operation, @Nonnull String attribute, @Nonnull Object first, @Nonnull Object second) { this(operation, attribute, Mode.OBJECTS); this.firstObject = requireNonNull(first); this.secondObject = requireNonNull(second); } /** * Creates an addition operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation add(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Operator second) { return new NumericOperation(ADD, attribute, first, second); } /** * Creates an addition operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation add(@Nonnull String attribute, @Nonnull Object first, @Nonnull Operator second) { return new NumericOperation(ADD, attribute, first, second); } /** * Creates an addition operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation add(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Object second) { return new NumericOperation(ADD, attribute, first, second); } /** * Creates an addition operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation add(@Nonnull String attribute, @Nonnull Object first, @Nonnull Object second) { return new NumericOperation(ADD, attribute, first, second); } /** * Creates an subtraction operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation subtract(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Operator second) { return new NumericOperation(SUBTRACT, attribute, first, second); } /** * Creates an subtraction operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation subtract(@Nonnull String attribute, @Nonnull Object first, @Nonnull Operator second) { return new NumericOperation(SUBTRACT, attribute, first, second); } /** * Creates an subtraction operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation subtract(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Object second) { return new NumericOperation(SUBTRACT, attribute, first, second); } /** * Creates an subtraction operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation subtract(@Nonnull String attribute, @Nonnull Object first, @Nonnull Object second) { return new NumericOperation(SUBTRACT, attribute, first, second); } /** * Creates an multiplication operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation multiply(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Operator second) { return new NumericOperation(MULTIPLY, attribute, first, second); } /** * Creates an multiplication operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation multiply(@Nonnull String attribute, @Nonnull Object first, @Nonnull Operator second) { return new NumericOperation(MULTIPLY, attribute, first, second); } /** * Creates an multiplication operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation multiply(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Object second) { return new NumericOperation(MULTIPLY, attribute, first, second); } /** * Creates an multiplication operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation multiply(@Nonnull String attribute, @Nonnull Object first, @Nonnull Object second) { return new NumericOperation(MULTIPLY, attribute, first, second); } /** * Creates an division operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation divide(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Operator second) { return new NumericOperation(DIVIDE, attribute, first, second); } /** * Creates an division operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation divide(@Nonnull String attribute, @Nonnull Object first, @Nonnull Operator second) { return new NumericOperation(DIVIDE, attribute, first, second); } /** * Creates an division operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation divide(@Nonnull String attribute, @Nonnull Operator first, @Nonnull Object second) { return new NumericOperation(DIVIDE, attribute, first, second); } /** * Creates an division operation of the given operands. * * @param first First operand * @param second Second operand */ @Nonnull public static NumericOperation divide(@Nonnull String attribute, @Nonnull Object first, @Nonnull Object second) { return new NumericOperation(DIVIDE, attribute, first, second); } // TODO: Move to more generic class? @Nullable public static Object getAttributeValue(@Nonnull Operator operator, String name) throws OperatorNotSupportedException { IEvent matchingEvent = operator.getMatchingEvent(); if (matchingEvent == null) throw new OperatorNotSupportedException("Matching event is null."); IAttribute attr = matchingEvent .getAttributes() .get(name); if (attr == null) throw new OperatorNotSupportedException("Attribute not found"); return attr.getValue(); } @Override public boolean apply(IEvent event) throws OperatorNotSupportedException { Object first = firstObject; Object second = secondObject; switch (mode) { case OPERATORS: first = getAttributeValue(firstOperator, attribute); second = getAttributeValue(secondOperator, attribute); break; case OBJ_OP: second = getAttributeValue(secondOperator, attribute); break; case OP_OBJ: first = getAttributeValue(firstOperator, attribute); break; case OBJECTS: break; } Objects.requireNonNull(first, "First argument to numeric operation is null."); Objects.requireNonNull(second, "Second argument to numeric operation is null."); Number result = operation.applyObj(first, second); setMatchingEvent(new Event(attribute, result)); return true; } @Override public boolean equals(@Nullable Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NumericOperation that = (NumericOperation) o; if (operation != that.operation) return false; if (!attribute.equals(that.attribute)) return false; if (mode != that.mode) return false; if (!Objects.equals(firstOperator, that.firstOperator)) return false; if (!Objects.equals(secondOperator, that.secondOperator)) return false; if (!Objects.equals(firstObject, that.firstObject)) return false; return Objects.equals(secondObject, that.secondObject); } @Override public int hashCode() { return Objects.hash(operation, attribute, firstOperator, secondOperator, firstObject, secondObject); } @Nonnull @Override public String toString() { StringBuilder result = new StringBuilder(50); result.append(operation.toString()) .append("("); switch (mode) { case OPERATORS: result.append(firstOperator).append(", ").append(secondOperator); break; case OBJECTS: result.append(firstObject).append(", ").append(secondObject); break; case OBJ_OP: result.append(firstObject).append(", ").append(secondOperator); break; case OP_OBJ: result.append(firstOperator).append(", ").append(secondObject); break; } result.append(")"); return result.toString(); } private enum Mode { OPERATORS, OBJECTS, OBJ_OP, OP_OBJ } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.index.lucene.directory; import static com.google.common.collect.Sets.newHashSet; import static org.apache.commons.io.FileUtils.ONE_GB; import static org.apache.commons.io.FileUtils.ONE_MB; import static org.apache.jackrabbit.JcrConstants.JCR_DATA; import static org.apache.jackrabbit.oak.InitialContent.INITIAL_CONTENT; import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INDEX_DATA_CHILD_NAME; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.PROP_BLOB_SIZE; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.PROP_UNIQUE_KEY; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.PROP_UNSAFE_FOR_ACTIVE_DELETION; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.UNIQUE_KEY_SIZE; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.NullInputStream; import org.apache.jackrabbit.core.data.FileDataStore; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.plugins.blob.BlobStoreBlob; import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore; import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition; import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants; import org.apache.jackrabbit.oak.plugins.memory.ArrayBasedBlob; import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; import org.apache.jackrabbit.oak.segment.SegmentTestConstants; import org.apache.jackrabbit.oak.segment.file.FileStore; import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.ReadOnlyBuilder; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.InputStreamDataInput; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; abstract public class OakDirectoryTestBase { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(new File("target")); private Random rnd = new Random(); private NodeState root = INITIAL_CONTENT; protected NodeBuilder builder = root.builder(); int fileSize = IndexDefinition.DEFAULT_BLOB_SIZE * 2 + rnd.nextInt(1000); @Test public void writes_DefaultSetup() throws Exception{ Directory dir = createDir(builder, false, "/foo"); assertWrites(dir, IndexDefinition.DEFAULT_BLOB_SIZE); } @Test public void testCompatibility() throws Exception{ builder.setProperty(LuceneIndexConstants.BLOB_SIZE, OakBufferedIndexFile.DEFAULT_BLOB_SIZE); Directory dir = createDir(builder, false, "/foo"); byte[] data = assertWrites(dir, OakBufferedIndexFile.DEFAULT_BLOB_SIZE); NodeBuilder testNode = builder.child(INDEX_DATA_CHILD_NAME).child("test"); //Remove the size property to simulate old behaviour testNode.removeProperty(PROP_BLOB_SIZE); //Read should still work even if the size property is removed IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(fileSize, i.length()); byte[] result = new byte[fileSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); } @Test //OAK-2388 public void testOverflow() throws Exception{ Directory dir = createDir(builder, false, "/foo"); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int blobSize = 32768; int dataSize = 90844; file.setProperty(OakDirectory.PROP_BLOB_SIZE, blobSize); List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); } file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); assertEquals((long) blobSize * (dataSize - 1), input.length()); } @Test public void saveListing() throws Exception{ builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true); Directory dir = createDir(builder, false, "/foo"); Set<String> fileNames = newHashSet(); for (int i = 0; i < 10; i++) { String fileName = "foo" + i; createFile(dir, fileName); fileNames.add(fileName); } dir.close(); dir = createDir(builder, true, "/foo"); assertEquals(fileNames, newHashSet(dir.listAll())); } @Test public void skipSaveListingIfUnchanged() throws Exception{ builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true); Directory dir = createDir(builder, false, "/foo"); Set<String> fileNames = newHashSet(); for (int i = 0; i < 10; i++) { String fileName = "foo" + i; createFile(dir, fileName); fileNames.add(fileName); } dir.close(); dir = createDir(new ReadOnlyBuilder(builder.getNodeState()), false, "/foo"); Set<String> files = newHashSet(dir.listAll()); dir.close(); assertEquals(fileNames, files); } // OAK-6562 @Test public void createOutputReInitsFile() throws Exception { builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true); Directory dir = createDir(builder, false, "/foo"); final String fileName = "foo"; dir.createOutput(fileName, IOContext.DEFAULT); String firstUniqueKey = builder.getChildNode(INDEX_DATA_CHILD_NAME) .getChildNode(fileName).getString(PROP_UNIQUE_KEY); dir.createOutput(fileName, IOContext.DEFAULT); String secondUniqueKey = builder.getChildNode(INDEX_DATA_CHILD_NAME) .getChildNode(fileName).getString(PROP_UNIQUE_KEY); assertFalse("Unique key must change on re-incarnating output with same name", firstUniqueKey.equals(secondUniqueKey)); } byte[] assertWrites(Directory dir, int blobSize) throws IOException { byte[] data = randomBytes(fileSize); IndexOutput o = dir.createOutput("test", IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); assertTrue(dir.fileExists("test")); assertEquals(fileSize, dir.fileLength("test")); IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(fileSize, i.length()); byte[] result = new byte[fileSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); NodeBuilder testNode = builder.child(INDEX_DATA_CHILD_NAME).child("test"); assertEquals(blobSize, testNode.getProperty(PROP_BLOB_SIZE).getValue(Type.LONG).longValue()); assertBlobSizeInWrite(testNode.getProperty(JCR_DATA), blobSize, fileSize); return data; } abstract void assertBlobSizeInWrite(PropertyState jcrData, int blobSize, int fileSize); private int createFile(Directory dir, String fileName) throws IOException { int size = rnd.nextInt(1000) + 1; byte[] data = randomBytes(size); IndexOutput o = dir.createOutput(fileName, IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); return size; } protected OakDirectory createDir(NodeBuilder builder, boolean readOnly, String indexPath){ return getOakDirectoryBuilder(builder, indexPath).setReadOnly(readOnly).build(); } byte[] randomBytes(int size) { byte[] data = new byte[size]; rnd.nextBytes(data); return data; } @Test public void testCloseOnOriginalIndexInput() throws Exception { Directory dir = createDir(builder, false, "/foo"); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int dataSize = 1024; List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); } file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); input.close(); assertClosed(input); } @Test public void testCloseOnClonedIndexInputs() throws Exception { Directory dir = createDir(builder, false, "/foo"); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int dataSize = 1024; List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); } file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); IndexInput clone1 = input.clone(); IndexInput clone2 = input.clone(); input.close(); assertClosed(input); assertClosed(clone1); assertClosed(clone2); } private void assertClosed(IndexInput input) throws IOException { try { input.length(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.seek(0); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.getFilePointer(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readInt(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readShort(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readLong(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readByte(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readString(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readStringSet(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readStringStringMap(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readVInt(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readVLong(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readBytes(null, 0, 0); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readBytes(null, 0, 0, false); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } } @Test public void largeFile() throws Exception{ FileStore store = FileStoreBuilder.fileStoreBuilder(tempFolder.getRoot()) .withMemoryMapping(false) .withBlobStore(getBlackHoleBlobStore()) .build(); SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(store).build(); IndexDefinition defn = new IndexDefinition(INITIAL_CONTENT, EmptyNodeState.EMPTY_NODE, "/foo"); Directory directory = getOakDirectoryBuilder(nodeStore.getRoot().builder(), defn).setReadOnly(false).build(); long expectedSize = ONE_GB * 2 + ONE_MB; String fileName = "test"; writeFile(directory, fileName, expectedSize); assertEquals(expectedSize, directory.fileLength(fileName)); IndexInput input = directory.openInput(fileName, IOContext.DEFAULT); readInputToEnd(expectedSize, input); store.close(); } @Test public void dirNameInExceptionMessage() throws Exception{ String indexPath = "/foo/bar"; Directory dir = createDir(builder, false, indexPath); try { dir.openInput("foo.txt", IOContext.DEFAULT); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); } int fileSize = createFile(dir, "test.txt"); IndexInput in = dir.openInput("test.txt", IOContext.DEFAULT); try { in.seek(fileSize + 1); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); } IndexInput in2 = dir.openInput("test.txt", IOContext.DEFAULT); try { byte[] data = new byte[fileSize + 1]; in2.readBytes(data, 0, fileSize + 1); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); } } @Test public void dirNameInException_Writes() throws Exception{ FailOnDemandBlobStore blobStore = new FailOnDemandBlobStore(); FileStore store = FileStoreBuilder.fileStoreBuilder(tempFolder.getRoot()) .withMemoryMapping(false) .withBlobStore(blobStore) .build(); SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(store).build(); String indexPath = "/foo/bar"; int minFileSize = SegmentTestConstants.MEDIUM_LIMIT; int blobSize = minFileSize + 1000; builder = nodeStore.getRoot().builder(); builder.setProperty(LuceneIndexConstants.BLOB_SIZE, blobSize); Directory dir = createDir(builder, false, indexPath); blobStore.startFailing(); IndexOutput o = dir.createOutput("test1.txt", IOContext.DEFAULT); try{ o.writeBytes(randomBytes(blobSize + 10), blobSize + 10); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); assertThat(e.getMessage(), containsString("test1.txt")); } store.close(); } @Test public void readOnlyDirectory() throws Exception{ Directory dir = getOakDirectoryBuilder(new ReadOnlyBuilder(builder.getNodeState()),"/foo") .setReadOnly(true).build(); assertEquals(0, dir.listAll().length); } @Test public void testDirty() throws Exception{ OakDirectory dir = createDir(builder, false, "/foo"); assertFalse(dir.isDirty()); createFile(dir, "a"); assertTrue(dir.isDirty()); dir.close(); dir = createDir(builder, false, "/foo"); assertFalse(dir.isDirty()); dir.openInput("a", IOContext.DEFAULT); assertFalse(dir.isDirty()); dir.deleteFile("a"); assertTrue(dir.isDirty()); dir.close(); } // OAK-6503 @Test public void dontMarkNonBlobStoreBlobsAsDeleted() throws Exception{ final String deletedBlobId = "blobIdentifier"; final String blobIdToString = "NeverEver-Ever-Ever-ShouldThisBeMarkedAsDeleted"; final int fileSize = 1; final AtomicBoolean identifiableBlob = new AtomicBoolean(false); IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); BlobFactory factory = new BlobFactory() { @Override public Blob createBlob(InputStream in) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(in, out); byte[] data = out.toByteArray(); return new ArrayBasedBlob(data) { @Override public String getContentIdentity() { return identifiableBlob.get()?deletedBlobId:null; } @Override public String toString() { return blobIdToString; } }; } }; OakDirectory dir = getOakDirectoryBuilder(builder, def).setReadOnly(false) .with(factory). with( new ActiveDeletedBlobCollectorFactory.BlobDeletionCallback() { @Override public void deleted(String blobId, Iterable<String> ids) { assertEquals("Only blobs with content identity must be reported as deleted", deletedBlobId, blobId); } @Override public void commitProgress(IndexProgress indexProgress) { } @Override public boolean isMarkingForActiveDeletionUnsafe() { return false; } }) .build(); writeFile(dir, "file1", fileSize); writeFile(dir, "file2", fileSize); dir.deleteFile("file1"); identifiableBlob.set(true); dir.deleteFile("file2"); dir.close(); } // OAK-7066 @Test public void dontMarkInlinedBlobsFromDataStoreAsDeleted() throws Exception { IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); final Set<String> deletedFiles = newHashSet(); FileDataStore fds = new FileDataStore(); fds.setMinRecordLength(48); fds.init(new File(tempFolder.getRoot(), "fdsRoot").getAbsolutePath()); DataStoreBlobStore dsbs = new DataStoreBlobStore(fds); BlobFactory factory = in -> new BlobStoreBlob(dsbs, dsbs.writeBlob(in)); OakDirectory dir = getOakDirectoryBuilder(builder, def).setReadOnly(false) .with(factory). with( new ActiveDeletedBlobCollectorFactory.BlobDeletionCallback() { @Override public void deleted(String blobId, Iterable<String> ids) { deletedFiles.add(Iterables.getLast(ids)); } @Override public void commitProgress(IndexProgress indexProgress) { } @Override public boolean isMarkingForActiveDeletionUnsafe() { return false; } }) .build(); writeFile(dir, "file1", 25); writeFile(dir, "file2", 50); dir.deleteFile("file1"); dir.deleteFile("file2"); dir.close(); assertFalse("file1 must be reported as deleted", deletedFiles.contains("file1")); assertTrue("file2 must be reported as deleted", deletedFiles.contains("file2")); } // OAK-7066 @Test public void markAllBlobsFromBlobStoreAsDeleted() throws Exception { IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); final Set<String> deletedFiles = newHashSet(); MemoryBlobStore bs = new MemoryBlobStore(); bs.setBlockSizeMin(48); BlobFactory factory = in -> new BlobStoreBlob(bs, bs.writeBlob(in)); OakDirectory dir = getOakDirectoryBuilder(builder, def).setReadOnly(false) .with(factory). with( new ActiveDeletedBlobCollectorFactory.BlobDeletionCallback() { @Override public void deleted(String blobId, Iterable<String> ids) { deletedFiles.add(Iterables.getLast(ids)); } @Override public void commitProgress(IndexProgress indexProgress) { } @Override public boolean isMarkingForActiveDeletionUnsafe() { return false; } }) .build(); writeFile(dir, "file1", 25); writeFile(dir, "file2", 50); dir.deleteFile("file1"); dir.deleteFile("file2"); dir.close(); assertTrue("file1 must be reported as deleted", deletedFiles.contains("file1")); assertTrue("file2 must be reported as deleted", deletedFiles.contains("file2")); } // OAK-6950 @Test public void blobsCreatedWhenActiveDeletionIsUnsafe() throws Exception { final int fileSize = 1; IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); BlobFactory factory = in -> { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(in, out); byte[] data = out.toByteArray(); return new ArrayBasedBlob(data); }; final AtomicBoolean markingForceActiveDeletionUnsafe = new AtomicBoolean(); OakDirectory dir = getOakDirectoryBuilder(builder, def).setReadOnly(false) .with(factory). with( new ActiveDeletedBlobCollectorFactory.BlobDeletionCallback() { @Override public void deleted(String blobId, Iterable<String> ids) { } @Override public void commitProgress(IndexProgress indexProgress) { } @Override public boolean isMarkingForActiveDeletionUnsafe() { return markingForceActiveDeletionUnsafe.get(); } }) .build(); // file1 created before marking was flagged as unsafe writeFile(dir, "file1", fileSize); markingForceActiveDeletionUnsafe.set(true); // file2 created after marking was flagged as unsafe writeFile(dir, "file2", fileSize); dir.close(); NodeBuilder dataBuilder = builder.getChildNode(INDEX_DATA_CHILD_NAME); assertNull("file1 must not get flagged to be unsafe to be actively deleted", dataBuilder.getChildNode("file1").getProperty(PROP_UNSAFE_FOR_ACTIVE_DELETION)); assertTrue("file2 must get flagged to be unsafe to be actively deleted", dataBuilder.getChildNode("file2").getProperty(PROP_UNSAFE_FOR_ACTIVE_DELETION).getValue(Type.BOOLEAN)); } // OAK-6950 @Test public void dontReportFilesMarkedUnsafeForActiveDeletion() throws Exception { AtomicInteger blobIdSuffix = new AtomicInteger(); IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); BlobFactory factory = in -> { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(in, out); byte[] data = out.toByteArray(); return new ArrayBasedBlob(data) { @Override public String getContentIdentity() { return Long.toString(length() - UNIQUE_KEY_SIZE) + "-id-" + blobIdSuffix.get(); } }; }; final AtomicBoolean markingForceActiveDeletionUnsafe = new AtomicBoolean(); final Set<String> deletedBlobs = Sets.newHashSet(); OakDirectory dir = getOakDirectoryBuilder(builder, def).setReadOnly(false) .with(factory). with( new ActiveDeletedBlobCollectorFactory.BlobDeletionCallback() { @Override public void deleted(String blobId, Iterable<String> ids) { deletedBlobs.add(blobId); } @Override public void commitProgress(IndexProgress indexProgress) { } @Override public boolean isMarkingForActiveDeletionUnsafe() { return markingForceActiveDeletionUnsafe.get(); } }) .build(); // file1 created before marking was flagged as unsafe blobIdSuffix.set(1); writeFile(dir, "file1", fileSize); markingForceActiveDeletionUnsafe.set(true); // file2 created after marking was flagged as unsafe blobIdSuffix.set(1); writeFile(dir, "file2", fileSize); dir.deleteFile("file1"); dir.deleteFile("file2"); dir.close(); deletedBlobs.forEach(deletedBlob -> { assertTrue("Deleted blob id " + deletedBlob + " must belong to file1", deletedBlob.endsWith("-id-1")); }); } @Test public void blobFactory() throws Exception { final AtomicInteger numBlobs = new AtomicInteger(); final int fileSize = 1024; IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); BlobFactory factory = new BlobFactory() { @Override public Blob createBlob(InputStream in) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(in, out); byte[] data = out.toByteArray(); assertEquals(fileSize + UNIQUE_KEY_SIZE, data.length); numBlobs.incrementAndGet(); return new ArrayBasedBlob(data); } }; OakDirectory dir = getOakDirectoryBuilder(builder, def).setReadOnly(false).with(factory).build(); numBlobs.set(0); writeFile(dir, "file", fileSize); assertEquals(1, numBlobs.get()); dir.close(); } @Test public void fileLength() throws Exception { final int fileSize = 1024; final String fileName = "file"; OakDirectory dir = createDir(builder, false, "/foo"); writeFile(dir, fileName, fileSize); assertEquals(fileSize, dir.fileLength(fileName)); try { dir.fileLength("unknown"); fail("must throw FileNotFoundException"); } catch (FileNotFoundException expected) { // expected } dir.close(); } static void readInputToEnd(long expectedSize, IndexInput input) throws IOException { int COPY_BUFFER_SIZE = 16384; byte[] copyBuffer = new byte[(int) ONE_MB]; long left = expectedSize; while (left > 0) { final int toCopy; if (left > COPY_BUFFER_SIZE) { toCopy = COPY_BUFFER_SIZE; } else { toCopy = (int) left; } input.readBytes(copyBuffer, 0, toCopy); left -= toCopy; } } static void writeFile(Directory directory, String fileName, long size) throws Exception{ IndexOutput o = directory.createOutput(fileName, IOContext.DEFAULT); o.copyBytes(new InputStreamDataInput(new NullInputStream(size)), size); o.close(); } OakDirectoryBuilder getOakDirectoryBuilder(NodeBuilder builder, String indexPath) { return getOakDirectoryBuilder(builder, new IndexDefinition(root, builder.getNodeState(), indexPath)); } abstract OakDirectoryBuilder getOakDirectoryBuilder(NodeBuilder builder, IndexDefinition indexDefinition); abstract MemoryBlobStore getBlackHoleBlobStore(); static class FailOnDemandBlobStore extends MemoryBlobStore { private boolean fail; @Override public String writeBlob(InputStream in) throws IOException { if (fail) { throw new IOException("Failing on demand"); } return super.writeBlob(in); } public void startFailing(){ fail = true; } public void reset(){ fail = false; } } static class OakDirectoryBuilder { private final NodeBuilder builder; private final IndexDefinition defn; private final boolean streamingEnabled; public OakDirectoryBuilder(NodeBuilder builder, IndexDefinition defn, boolean streamingEnabled) { this.builder = builder; this.defn = defn; this.streamingEnabled = streamingEnabled; } private boolean readOnly = false; public OakDirectoryBuilder setReadOnly(boolean readOnly) { this.readOnly = readOnly; return this; } private GarbageCollectableBlobStore blobStore = null; private OakDirectoryBuilder with(GarbageCollectableBlobStore blobStore) { this.blobStore = blobStore; return this; } private BlobFactory blobFactory = null; public OakDirectoryBuilder with(BlobFactory blobFactory) { this.blobFactory = blobFactory; return this; } private ActiveDeletedBlobCollectorFactory.BlobDeletionCallback blobDeletionCallback = ActiveDeletedBlobCollectorFactory.BlobDeletionCallback.NOOP; public OakDirectoryBuilder with(ActiveDeletedBlobCollectorFactory.BlobDeletionCallback blobDeletionCallback) { this.blobDeletionCallback = blobDeletionCallback; return this; } public OakDirectory build() { if (blobFactory == null) { blobFactory = blobStore != null ? BlobFactory.getBlobStoreBlobFactory(blobStore) : BlobFactory.getNodeBuilderBlobFactory(builder); } return new OakDirectory(builder, INDEX_DATA_CHILD_NAME, defn, readOnly, blobFactory, blobDeletionCallback, streamingEnabled); } } }
package com.fireflysource.net.websocket.common.decoder; import com.fireflysource.common.io.BufferUtils; import com.fireflysource.net.websocket.common.frame.Frame; import com.fireflysource.net.websocket.common.frame.WebSocketFrame; import com.fireflysource.net.websocket.common.model.IncomingFramesCapture; import com.fireflysource.net.websocket.common.model.OpCode; import com.fireflysource.net.websocket.common.model.WebSocketBehavior; import com.fireflysource.net.websocket.common.model.WebSocketPolicy; import org.junit.jupiter.api.Test; import java.nio.ByteBuffer; import static org.junit.jupiter.api.Assertions.assertEquals; /** * Collection of Example packets as found in <a href="https://tools.ietf.org/html/rfc6455#section-5.7">RFC 6455 Examples section</a> */ public class RFC6455ExamplesParserTest { @Test public void testFragmentedUnmaskedTextMessage() { WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.CLIENT); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); ByteBuffer buf = ByteBuffer.allocate(16); BufferUtils.clearToFill(buf); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // A fragmented unmasked text message (part 1 of 2 "Hel") buf.put(new byte[] {(byte) 0x01, (byte) 0x03, 0x48, (byte) 0x65, 0x6c}); // Parse #1 BufferUtils.flipToFlush(buf, 0); parser.parse(buf); // part 2 of 2 "lo" (A continuation frame of the prior text message) BufferUtils.flipToFill(buf); buf.put(new byte[] {(byte) 0x80, 0x02, 0x6c, 0x6f}); // Parse #2 BufferUtils.flipToFlush(buf, 0); parser.parse(buf); capture.assertHasFrame(OpCode.TEXT, 1); capture.assertHasFrame(OpCode.CONTINUATION, 1); WebSocketFrame txt = capture.getFrames().poll(); String actual = BufferUtils.toUTF8String(txt.getPayload()); assertEquals("Hel", actual); txt = capture.getFrames().poll(); actual = BufferUtils.toUTF8String(txt.getPayload()); assertEquals("lo", actual); } @Test public void testSingleMaskedPongRequest() { ByteBuffer buf = ByteBuffer.allocate(16); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // Unmasked Pong request buf.put(new byte[] {(byte) 0x8a, (byte) 0x85, 0x37, (byte) 0xfa, 0x21, 0x3d, 0x7f, (byte) 0x9f, 0x4d, 0x51, 0x58}); buf.flip(); WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.SERVER); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); parser.parse(buf); capture.assertHasFrame(OpCode.PONG, 1); WebSocketFrame pong = capture.getFrames().poll(); String actual = BufferUtils.toUTF8String(pong.getPayload()); assertEquals("Hello", actual); } @Test public void testSingleMaskedTextMessage() { ByteBuffer buf = ByteBuffer.allocate(16); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // A single-frame masked text message buf.put(new byte[] {(byte) 0x81, (byte) 0x85, 0x37, (byte) 0xfa, 0x21, 0x3d, 0x7f, (byte) 0x9f, 0x4d, 0x51, 0x58}); buf.flip(); WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.SERVER); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); parser.parse(buf); capture.assertHasFrame(OpCode.TEXT, 1); WebSocketFrame txt = capture.getFrames().poll(); String actual = BufferUtils.toUTF8String(txt.getPayload()); assertEquals("Hello", actual); } @Test public void testSingleUnmasked256ByteBinaryMessage() { int dataSize = 256; ByteBuffer buf = ByteBuffer.allocate(dataSize + 10); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // 256 bytes binary message in a single unmasked frame buf.put(new byte[] {(byte) 0x82, 0x7E}); buf.putShort((short) 0x01_00); // 16 bit size for (int i = 0; i < dataSize; i++) { buf.put((byte) 0x44); } buf.flip(); WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.CLIENT); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); parser.parse(buf); capture.assertHasFrame(OpCode.BINARY, 1); Frame bin = capture.getFrames().poll(); assertEquals(dataSize, bin.getPayloadLength()); ByteBuffer data = bin.getPayload(); assertEquals(dataSize, data.remaining()); for (int i = 0; i < dataSize; i++) { assertEquals((byte) 0x44, data.get(i)); } } @Test public void testSingleUnmasked64KByteBinaryMessage() { int dataSize = 1024 * 64; ByteBuffer buf = ByteBuffer.allocate((dataSize + 10)); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // 64 Kbytes binary message in a single unmasked frame buf.put(new byte[] {(byte) 0x82, 0x7F}); buf.putLong(dataSize); // 64bit size for (int i = 0; i < dataSize; i++) { buf.put((byte) 0x77); } buf.flip(); WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.CLIENT); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); parser.parse(buf); capture.assertHasFrame(OpCode.BINARY, 1); Frame bin = capture.getFrames().poll(); assertEquals(dataSize, bin.getPayloadLength()); ByteBuffer data = bin.getPayload(); assertEquals(dataSize, data.remaining()); for (int i = 0; i < dataSize; i++) { assertEquals((byte) 0x77, data.get(i)); } } @Test public void testSingleUnmaskedPingRequest() { ByteBuffer buf = ByteBuffer.allocate(16); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // Unmasked Ping request buf.put(new byte[] {(byte) 0x89, 0x05, 0x48, 0x65, 0x6c, 0x6c, 0x6f}); buf.flip(); WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.CLIENT); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); parser.parse(buf); capture.assertHasFrame(OpCode.PING, 1); WebSocketFrame ping = capture.getFrames().poll(); String actual = BufferUtils.toUTF8String(ping.getPayload()); assertEquals("Hello", actual); } @Test public void testSingleUnmaskedTextMessage() { ByteBuffer buf = ByteBuffer.allocate(16); // Raw bytes as found in RFC 6455, Section 5.7 - Examples // A single-frame unmasked text message buf.put(new byte[] {(byte) 0x81, 0x05, 0x48, 0x65, 0x6c, 0x6c, 0x6f}); buf.flip(); WebSocketPolicy policy = new WebSocketPolicy(WebSocketBehavior.CLIENT); Parser parser = new UnitParser(policy); IncomingFramesCapture capture = new IncomingFramesCapture(); parser.setIncomingFramesHandler(capture); parser.parse(buf); capture.assertHasFrame(OpCode.TEXT, 1); WebSocketFrame txt = capture.getFrames().poll(); String actual = BufferUtils.toUTF8String(txt.getPayload()); assertEquals("Hello", actual); } }
/** * Copyright (C) 2014 Esup Portail http://www.esup-portail.org * @Author (C) 2012 Julien Gribonvald <julien.gribonvald@recia.fr> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.esupportail.publisher.web.rest; import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.hasSize; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.time.Instant; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; import java.util.List; import java.util.Optional; import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.persistence.EntityManager; import org.esupportail.publisher.Application; import org.esupportail.publisher.config.Constants; import org.esupportail.publisher.domain.AbstractItem; import org.esupportail.publisher.domain.News; import org.esupportail.publisher.domain.Organization; import org.esupportail.publisher.domain.QUser; import org.esupportail.publisher.domain.Redactor; import org.esupportail.publisher.domain.User; import org.esupportail.publisher.domain.enums.ItemStatus; import org.esupportail.publisher.repository.ItemRepository; import org.esupportail.publisher.repository.ObjTest; import org.esupportail.publisher.repository.OrganizationRepository; import org.esupportail.publisher.repository.RedactorRepository; import org.esupportail.publisher.repository.UserRepository; import org.esupportail.publisher.security.AuthoritiesConstants; import org.esupportail.publisher.security.CustomUserDetails; import org.esupportail.publisher.security.IPermissionService; import org.esupportail.publisher.service.ContentService; import org.esupportail.publisher.service.FileService; import org.esupportail.publisher.service.factories.UserDTOFactory; import org.esupportail.publisher.web.rest.dto.UserDTO; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.MockitoAnnotations; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.data.web.PageableHandlerMethodArgumentResolver; import org.springframework.http.MediaType; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.security.authentication.TestingAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.transaction.annotation.Transactional; import com.google.common.collect.Lists; import org.springframework.validation.Validator; /** * Test class for the NewsResource REST controller. * * @see org.esupportail.publisher.web.rest.NewsResource */ @RunWith(SpringJUnit4ClassRunner.class) @SpringBootTest(classes = Application.class) @WebAppConfiguration public class ItemResourceTest { private static final String DEFAULT_TITLE = "SAMPLE_TEXT"; private static final String UPDATED_TITLE = "UPDATED_TEXT"; private static final String DEFAULT_SUMMARY = "SAMPLE_TEXT"; private static final String UPDATED_SUMMARY = "UPDATED_TEXT"; private static final String DEFAULT_ENCLOSURE = "http://un.domaine.fr/path/file.jpg"; private static final String UPDATED_ENCLOSURE = "http://deux.domaine.fr/path/media.png"; private static final LocalDate DEFAULT_END_DATE = LocalDate.now().plusDays(2); private static final LocalDate UPDATED_END_DATE = LocalDate.now().plusMonths(1); private static final LocalDate DEFAULT_START_DATE = LocalDate.now(); private static final LocalDate UPDATED_START_DATE = LocalDate.now().minusDays(1); private static final ItemStatus DEFAULT_STATUS = ItemStatus.DRAFT; private static final ItemStatus UPDATED_STATUS = ItemStatus.PENDING; private static final Boolean DEFAULT_RSS_ALLOWED = true; private static final Boolean UPDATED_RSS_ALLOWED = false; private static final Instant DEFAULT_VALIDATION_DATE = ObjTest.d1; private static final Instant UPDATED_VALIDATION_DATE = ObjTest.d1.plus(1, ChronoUnit.DAYS); private static final String DEFAULT_BODY = "SAMPLE_TEXT"; private static final String UPDATED_BODY = "UPDATED_TEXT"; @Inject private ItemRepository<AbstractItem> itemRepository; @Autowired private MappingJackson2HttpMessageConverter jacksonMessageConverter; @Autowired private PageableHandlerMethodArgumentResolver pageableArgumentResolver; @Autowired private Validator validator; @Autowired private EntityManager em; private MockMvc restNewsMockMvc; private AbstractItem item; @Inject private OrganizationRepository organizationRepository; private Organization organization; @Inject private RedactorRepository redactorRepository; private Redactor redactor; @Inject private ContentService contentService; @Inject private UserRepository userRepo; @Inject private UserDTOFactory userDTOFactory; private User user1;private User user2;private User user3; @Inject private IPermissionService permissionService; @PostConstruct public void setup() { MockitoAnnotations.initMocks(this); ItemResource itemResource = new ItemResource(); OrganizationResource organizationResource = new OrganizationResource(); FileService fileservice = new FileService(); RedactorResource redactorResource = new RedactorResource(); ReflectionTestUtils.setField(itemResource, "itemRepository", itemRepository); ReflectionTestUtils.setField(itemResource, "permissionService", permissionService); ReflectionTestUtils.setField(itemResource, "fileService", fileservice); ReflectionTestUtils.setField(organizationResource, "organizationRepository", organizationRepository); ReflectionTestUtils.setField(itemResource, "contentService", contentService); ReflectionTestUtils.setField(redactorResource, "redactorRepository", redactorRepository); this.restNewsMockMvc = MockMvcBuilders.standaloneSetup(itemResource, organizationResource) .setCustomArgumentResolvers(pageableArgumentResolver) //.setControllerAdvice(exceptionTranslator) .setConversionService(TestUtil.createFormattingConversionService()) .setMessageConverters(jacksonMessageConverter) .setValidator(validator) .build(); Optional<User> optionalUser = userRepo.findOne(QUser.user.login.like("system")); User userPart = optionalUser.orElse(null); UserDTO userDTOPart = userDTOFactory.from(userPart); CustomUserDetails userDetails = new CustomUserDetails(userDTOPart, userPart, Lists.newArrayList(new SimpleGrantedAuthority(AuthoritiesConstants.ADMIN))); Authentication authentication = new TestingAuthenticationToken(userDetails, "password", Lists.newArrayList(userDetails.getAuthorities())); SecurityContextHolder.getContext().setAuthentication(authentication); } @Before public void initTest() { final String name = "NAME"; organization = organizationRepository.saveAndFlush(ObjTest.newOrganization(name)); redactor = redactorRepository.saveAndFlush(ObjTest.newRedactor(name)); user1 = userRepo.findById(ObjTest.subject1).get(); user2 = userRepo.findById(ObjTest.subject2).get(); user3 = userRepo.findById(ObjTest.subject3).get(); News news = new News(); news.setTitle(DEFAULT_TITLE); news.setSummary(DEFAULT_SUMMARY); news.setEnclosure(DEFAULT_ENCLOSURE); news.setEndDate(DEFAULT_END_DATE); news.setStartDate(DEFAULT_START_DATE); news.setStatus(DEFAULT_STATUS); news.setValidatedDate(DEFAULT_VALIDATION_DATE); news.setValidatedBy(user1); news.setBody(DEFAULT_BODY); news.setRssAllowed(DEFAULT_RSS_ALLOWED); news.setOrganization(organization); news.setRedactor(redactor); item = news; } @Test @Transactional public void createItem() throws Exception { // Validate the database is empty assertThat(itemRepository.findAll()).hasSize(0); // Create the Item restNewsMockMvc.perform( post("/api/items").contentType(TestUtil.APPLICATION_JSON_UTF8).content( TestUtil.convertObjectToJsonBytes(item))).andExpect(status().isCreated()); // Validate the News in the database List<AbstractItem> items = itemRepository.findAll(); assertThat(items).hasSize(1); AbstractItem item = items.iterator().next(); org.junit.Assert.assertThat(item, instanceOf(News.class)); News testNews = (News) item; assertThat(testNews.getTitle()).isEqualTo(DEFAULT_TITLE); assertThat(testNews.getSummary()).isEqualTo(DEFAULT_SUMMARY); assertThat(testNews.getEnclosure()).isEqualTo(DEFAULT_ENCLOSURE); assertThat(testNews.getEndDate()).isEqualTo(DEFAULT_END_DATE); assertThat(testNews.getStartDate()).isEqualTo(DEFAULT_START_DATE); assertThat(testNews.getStatus()).isEqualTo(DEFAULT_STATUS); assertThat(testNews.getValidatedDate()).isEqualTo(DEFAULT_VALIDATION_DATE); assertThat(testNews.getValidatedBy().getLogin()).isEqualTo(ObjTest.subject1); assertThat(testNews.getBody()).isEqualTo(DEFAULT_BODY); assertThat(testNews.isRssAllowed()).isEqualTo(DEFAULT_RSS_ALLOWED); assertThat(testNews.getRedactor()).isEqualTo(redactor); assertThat(testNews.getOrganization()).isEqualTo(organization); } @Test @Transactional public void getAllItems() throws Exception { // Initialize the database itemRepository.saveAndFlush(item); // Get all the Items restNewsMockMvc .perform(get("/api/items")).andDo(print()) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.[*].id").value(hasItem(item.getId().intValue()))) .andExpect(jsonPath("$.[*].title").value(hasItem(DEFAULT_TITLE))) .andExpect(jsonPath("$.[*].summary").value(hasItem(DEFAULT_SUMMARY))) .andExpect(jsonPath("$.[*].enclosure").value(hasItem(DEFAULT_ENCLOSURE))) .andExpect(jsonPath("$.[*].endDate").value(hasItem(DEFAULT_END_DATE.toString()))) .andExpect(jsonPath("$.[*].startDate").value(hasItem(DEFAULT_START_DATE.toString()))) .andExpect(jsonPath("$.[*].status").value(hasItem(DEFAULT_STATUS.getName()))) .andExpect(jsonPath("$.[*].validatedBy.subject.keyId").value(hasItem(ObjTest.subject1))) .andExpect(jsonPath("$.[*].validatedDate").value( hasItem(DEFAULT_VALIDATION_DATE.toString()))) .andExpect(jsonPath("$.[*].body").value(hasItem(DEFAULT_BODY))) .andExpect(jsonPath("$.[*].rssAllowed").value(hasItem(DEFAULT_RSS_ALLOWED))) .andExpect(jsonPath("$.[*].organization.id").value(hasItem(organization.getId().intValue()))) .andExpect(jsonPath("$.[*].redactor.id").value(hasItem(redactor.getId().intValue()))); } @Test @Transactional public void getAllItemsOfStatusOfUser() throws Exception { // Initialize the database itemRepository.saveAndFlush(item); // Get all the Items of Status restNewsMockMvc .perform(get("/api/items/").requestAttr("item_status", DEFAULT_STATUS.getId()).requestAttr("owned", true)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$", hasSize(1))) .andExpect(jsonPath("$.[*].createdBy.subject.keyId").value(hasItem(Constants.SYSTEM_ACCOUNT))) .andExpect(jsonPath("$.[*].id").value(hasItem(item.getId().intValue()))) .andExpect(jsonPath("$.[*].title").value(hasItem(DEFAULT_TITLE))) .andExpect(jsonPath("$.[*].summary").value(hasItem(DEFAULT_SUMMARY))) .andExpect(jsonPath("$.[*].enclosure").value(hasItem(DEFAULT_ENCLOSURE))) .andExpect(jsonPath("$.[*].endDate").value(hasItem(DEFAULT_END_DATE.toString()))) .andExpect(jsonPath("$.[*].startDate").value(hasItem(DEFAULT_START_DATE.toString()))) .andExpect(jsonPath("$.[*].status").value(hasItem(DEFAULT_STATUS.getName()))) .andExpect(jsonPath("$.[*].validatedBy.subject.keyId").value(hasItem(ObjTest.subject1))) .andExpect(jsonPath("$.[*].validatedDate").value( hasItem(DEFAULT_VALIDATION_DATE.toString()))) .andExpect(jsonPath("$.[*].body").value(hasItem(DEFAULT_BODY))) .andExpect(jsonPath("$.[*].rssAllowed").value(DEFAULT_RSS_ALLOWED)) .andExpect(jsonPath("$.[*].organization.id").value(hasItem(organization.getId().intValue()))) .andExpect(jsonPath("$.[*].redactor.id").value(hasItem(redactor.getId().intValue()))); } @Test @Transactional public void getItems() throws Exception { // Initialize the database itemRepository.saveAndFlush(item); // Get the item restNewsMockMvc .perform(get("/api/items/{id}", item.getId())) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andExpect(jsonPath("$.id").value(item.getId().intValue())) .andExpect(jsonPath("$.title").value(DEFAULT_TITLE)) .andExpect(jsonPath("$.summary").value(DEFAULT_SUMMARY)) .andExpect(jsonPath("$.enclosure").value(DEFAULT_ENCLOSURE)) .andExpect(jsonPath("$.endDate").value(DEFAULT_END_DATE.toString())) .andExpect(jsonPath("$.startDate").value(DEFAULT_START_DATE.toString())) .andExpect(jsonPath("$.status").value(DEFAULT_STATUS.getName())) .andExpect(jsonPath("$.validatedBy.subject.keyId").value(ObjTest.subject1)) .andExpect(jsonPath("$.validatedDate").value( DEFAULT_VALIDATION_DATE.toString())) .andExpect(jsonPath("$.body").value(DEFAULT_BODY)) .andExpect(jsonPath("$.rssAllowed").value(DEFAULT_RSS_ALLOWED)) .andExpect(jsonPath("$.organization.id").value(organization.getId().intValue())) .andExpect(jsonPath("$.redactor.id").value(redactor.getId().intValue())); } @Test @Transactional public void getNonExistingItem() throws Exception { // Get the news restNewsMockMvc.perform(get("/api/items/{id}", 1L)).andExpect(status().isNotFound()); } @Test @Transactional public void updateItem() throws Exception { // Initialize the database itemRepository.saveAndFlush(item); int databaseSizeBeforeUpdate = itemRepository.findAll().size(); AbstractItem updatedItem = itemRepository.findById(item.getId()).get(); em.detach(updatedItem); // Update the news updatedItem.setTitle(UPDATED_TITLE); updatedItem.setSummary(UPDATED_SUMMARY); updatedItem.setEnclosure(UPDATED_ENCLOSURE); updatedItem.setEndDate(UPDATED_END_DATE); updatedItem.setStartDate(UPDATED_START_DATE); updatedItem.setStatus(UPDATED_STATUS); updatedItem.setRssAllowed(UPDATED_RSS_ALLOWED); updatedItem.setValidatedDate(UPDATED_VALIDATION_DATE); updatedItem.setValidatedBy(user2); ((News) updatedItem).setBody(UPDATED_BODY); restNewsMockMvc.perform( put("/api/items").contentType(TestUtil.APPLICATION_JSON_UTF8).content( TestUtil.convertObjectToJsonBytes(updatedItem))).andDo(print()).andExpect(status().isOk()); // Validate the News in the database List<AbstractItem> items = itemRepository.findAll(); assertThat(items).hasSize(databaseSizeBeforeUpdate); AbstractItem item = items.get((items.size() - 1)); org.junit.Assert.assertThat(item, instanceOf(News.class)); News testNews = (News) item; assertThat(testNews.getTitle()).isEqualTo(UPDATED_TITLE); assertThat(testNews.getSummary()).isEqualTo(UPDATED_SUMMARY); assertThat(testNews.getEnclosure()).isEqualTo(UPDATED_ENCLOSURE); assertThat(testNews.getEndDate()).isEqualTo(UPDATED_END_DATE); assertThat(testNews.getStartDate()).isEqualTo(UPDATED_START_DATE); assertThat(testNews.getStatus()).isEqualTo(UPDATED_STATUS); assertThat(testNews.getValidatedDate()).isEqualTo(UPDATED_VALIDATION_DATE); assertThat(testNews.getValidatedBy().getLogin()).isEqualTo(user2.getLogin()); assertThat(testNews.getBody()).isEqualTo(UPDATED_BODY); assertThat(testNews.isRssAllowed()).isEqualTo(UPDATED_RSS_ALLOWED); assertThat(testNews.getRedactor()).isEqualTo(redactor); assertThat(testNews.getOrganization()).isEqualTo(organization); } @Test @Transactional public void deleteItem() throws Exception { // Initialize the database itemRepository.saveAndFlush(item); int databaseSizeBeforeDelete = itemRepository.findAll().size(); // Get the news restNewsMockMvc.perform(delete("/api/items/{id}", item.getId()).accept(TestUtil.APPLICATION_JSON_UTF8)) .andExpect(status().isOk()); // Validate the database is empty List<AbstractItem> items = itemRepository.findAll(); assertThat(items).hasSize(databaseSizeBeforeDelete - 1); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.granularity.QueryGranularities; import io.druid.granularity.QueryGranularity; import io.druid.java.util.common.Pair; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator; import io.druid.query.filter.DimFilter; import io.druid.query.search.SearchResultValue; import io.druid.query.search.search.SearchHit; import io.druid.query.search.search.SearchQuery; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import io.druid.query.timeboundary.TimeBoundaryQuery; import io.druid.query.timeboundary.TimeBoundaryResultValue; import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; /** */ @Ignore public class AppendTest { private static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new DoubleSumAggregatorFactory("index", "index"), new CountAggregatorFactory("count"), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }; private static final AggregatorFactory[] METRIC_AGGS_NO_UNIQ = new AggregatorFactory[]{ new DoubleSumAggregatorFactory("index", "index"), new CountAggregatorFactory("count") }; final String dataSource = "testing"; final QueryGranularity allGran = QueryGranularities.ALL; final String dimensionValue = "dimension"; final String valueValue = "value"; final String marketDimension = "market"; final String qualityDimension = "quality"; final String placementDimension = "placement"; final String placementishDimension = "placementish"; final String indexMetric = "index"; final CountAggregatorFactory rowsCount = new CountAggregatorFactory("rows"); final DoubleSumAggregatorFactory indexDoubleSum = new DoubleSumAggregatorFactory("index", "index"); final HyperUniquesAggregatorFactory uniques = new HyperUniquesAggregatorFactory("uniques", "quality_uniques"); final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L); final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows"); final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index"); final ArithmeticPostAggregator addRowsIndexConstant = new ArithmeticPostAggregator( "addRowsIndexConstant", "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg) ); final List<AggregatorFactory> commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private Segment segment; private Segment segment2; private Segment segment3; @Before public void setUp() throws Exception { // (1, 2) cover overlapping segments of the form // |------| // |--------| QueryableIndex appendedIndex = SchemalessIndex.getAppendedIncrementalIndex( Arrays.asList( new Pair<String, AggregatorFactory[]>("append.json.1", METRIC_AGGS_NO_UNIQ), new Pair<String, AggregatorFactory[]>("append.json.2", METRIC_AGGS) ), Arrays.asList( new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), new Interval("2011-01-14T22:00:00.000Z/2011-01-16T00:00:00.000Z") ) ); segment = new QueryableIndexSegment(null, appendedIndex); // (3, 4) cover overlapping segments of the form // |------------| // |-----| QueryableIndex append2 = SchemalessIndex.getAppendedIncrementalIndex( Arrays.asList( new Pair<String, AggregatorFactory[]>("append.json.3", METRIC_AGGS_NO_UNIQ), new Pair<String, AggregatorFactory[]>("append.json.4", METRIC_AGGS) ), Arrays.asList( new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), new Interval("2011-01-13T00:00:00.000Z/2011-01-14T00:00:00.000Z") ) ); segment2 = new QueryableIndexSegment(null, append2); // (5, 6, 7) test gaps that can be created in data because of rows being discounted // |-------------| // |---| // |---| QueryableIndex append3 = SchemalessIndex.getAppendedIncrementalIndex( Arrays.asList( new Pair<String, AggregatorFactory[]>("append.json.5", METRIC_AGGS), new Pair<String, AggregatorFactory[]>("append.json.6", METRIC_AGGS), new Pair<String, AggregatorFactory[]>("append.json.7", METRIC_AGGS) ), Arrays.asList( new Interval("2011-01-12T00:00:00.000Z/2011-01-22T00:00:00.000Z"), new Interval("2011-01-13T00:00:00.000Z/2011-01-16T00:00:00.000Z"), new Interval("2011-01-18T00:00:00.000Z/2011-01-21T00:00:00.000Z") ) ); segment3 = new QueryableIndexSegment(null, append3); } @Test public void testTimeBoundary() { List<Result<TimeBoundaryResultValue>> expectedResults = Arrays.asList( new Result<TimeBoundaryResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, new DateTime("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, new DateTime("2011-01-15T02:00:00.000Z") ) ) ) ); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder() .dataSource(dataSource) .build(); QueryRunner runner = TestQueryRunners.makeTimeBoundaryQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testTimeBoundary2() { List<Result<TimeBoundaryResultValue>> expectedResults = Arrays.asList( new Result<TimeBoundaryResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, new DateTime("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, new DateTime("2011-01-15T00:00:00.000Z") ) ) ) ); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder() .dataSource(dataSource) .build(); QueryRunner runner = TestQueryRunners.makeTimeBoundaryQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testTimeSeries() { List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 8L) .put("index", 700.0D) .put("addRowsIndexConstant", 709.0D) .put("uniques", 1.0002442201269182D) .put("maxIndex", 100.0D) .put("minIndex", 0.0D) .build() ) ) ); TimeseriesQuery query = makeTimeseriesQuery(); QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testTimeSeries2() { List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 7L) .put("index", 500.0D) .put("addRowsIndexConstant", 508.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0D) .put("minIndex", 0.0D) .build() ) ) ); TimeseriesQuery query = makeTimeseriesQuery(); QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testFilteredTimeSeries() { List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 5L) .put("index", 500.0D) .put("addRowsIndexConstant", 506.0D) .put("uniques", 1.0002442201269182D) .put("maxIndex", 100.0D) .put("minIndex", 100.0D) .build() ) ) ); TimeseriesQuery query = makeFilteredTimeseriesQuery(); QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testFilteredTimeSeries2() { List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 4L) .put("index", 400.0D) .put("addRowsIndexConstant", 405.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0D) .put("minIndex", 100.0D) .build() ) ) ); TimeseriesQuery query = makeFilteredTimeseriesQuery(); QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testTopNSeries() { List<Result<TopNResultValue>> expectedResults = Arrays.asList( new Result<TopNResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.<Map<String, Object>>asList( ImmutableMap.<String, Object>builder() .put("market", "spot") .put("rows", 3L) .put("index", 300.0D) .put("addRowsIndexConstant", 304.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0) .put("minIndex", 100.0) .build(), new HashMap<String, Object>() {{ put("market", null); put("rows", 3L); put("index", 200.0D); put("addRowsIndexConstant", 204.0D); put("uniques", 0.0D); put("maxIndex", 100.0); put("minIndex", 0.0); }}, ImmutableMap.<String, Object>builder() .put("market", "total_market") .put("rows", 2L) .put("index", 200.0D) .put("addRowsIndexConstant", 203.0D) .put("uniques", 1.0002442201269182D) .put("maxIndex", 100.0D) .put("minIndex", 100.0D) .build() ) ) ) ); TopNQuery query = makeTopNQuery(); QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testTopNSeries2() { List<Result<TopNResultValue>> expectedResults = Arrays.asList( new Result<TopNResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.<Map<String, Object>>asList( ImmutableMap.<String, Object>builder() .put("market", "total_market") .put("rows", 3L) .put("index", 300.0D) .put("addRowsIndexConstant", 304.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0D) .put("minIndex", 100.0D) .build(), new HashMap<String, Object>() {{ put("market", null); put("rows", 3L); put("index", 100.0D); put("addRowsIndexConstant", 104.0D); put("uniques", 0.0D); put("maxIndex", 100.0); put("minIndex", 0.0); }}, ImmutableMap.<String, Object>builder() .put("market", "spot") .put("rows", 1L) .put("index", 100.0D) .put("addRowsIndexConstant", 102.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0) .put("minIndex", 100.0) .build() ) ) ) ); TopNQuery query = makeTopNQuery(); QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testFilteredTopNSeries() { List<Result<TopNResultValue>> expectedResults = Arrays.asList( new Result<TopNResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.<Map<String, Object>>asList( ImmutableMap.<String, Object>builder() .put("market", "spot") .put("rows", 1L) .put("index", 100.0D) .put("addRowsIndexConstant", 102.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0) .put("minIndex", 100.0) .build() ) ) ) ); TopNQuery query = makeFilteredTopNQuery(); QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testFilteredTopNSeries2() { List<Result<TopNResultValue>> expectedResults = Arrays.asList( new Result<TopNResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( Lists.<Map<String, Object>>newArrayList() ) ) ); TopNQuery query = makeFilteredTopNQuery(); QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testSearch() { List<Result<SearchResultValue>> expectedResults = Arrays.asList( new Result<SearchResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.<SearchHit>asList( new SearchHit(placementishDimension, "a"), new SearchHit(qualityDimension, "automotive"), new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") ) ) ) ); SearchQuery query = makeSearchQuery(); QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testSearchWithOverlap() { List<Result<SearchResultValue>> expectedResults = Arrays.asList( new Result<SearchResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.<SearchHit>asList( new SearchHit(placementishDimension, "a"), new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") ) ) ) ); SearchQuery query = makeSearchQuery(); QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testFilteredSearch() { List<Result<SearchResultValue>> expectedResults = Arrays.asList( new Result<SearchResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.<SearchHit>asList( new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") ) ) ) ); SearchQuery query = makeFilteredSearchQuery(); QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testFilteredSearch2() { List<Result<SearchResultValue>> expectedResults = Arrays.asList( new Result<SearchResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.<SearchHit>asList( new SearchHit(placementishDimension, "a"), new SearchHit(placementDimension, "mezzanine"), new SearchHit(marketDimension, "total_market") ) ) ) ); SearchQuery query = makeFilteredSearchQuery(); QueryRunner runner = TestQueryRunners.makeSearchQueryRunner(segment2); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test public void testRowFiltering() { List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList( new Result<TimeseriesResultValue>( new DateTime("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.<String, Object>builder() .put("rows", 5L) .put("index", 500.0D) .put("addRowsIndexConstant", 506.0D) .put("uniques", 0.0D) .put("maxIndex", 100.0D) .put("minIndex", 100.0D) .build() ) ) ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .filters(marketDimension, "breakstuff") .aggregators( Lists.<AggregatorFactory>newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)) .build(); QueryRunner runner = TestQueryRunners.makeTimeSeriesQueryRunner(segment3); HashMap<String,Object> context = new HashMap<String, Object>(); TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } private TimeseriesQuery makeTimeseriesQuery() { return Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .aggregators( Lists.<AggregatorFactory>newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)) .build(); } private TimeseriesQuery makeFilteredTimeseriesQuery() { return Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .filters( Druids.newOrDimFilterBuilder() .fields( Arrays.<DimFilter>asList( Druids.newSelectorDimFilterBuilder() .dimension(marketDimension) .value("spot") .build(), Druids.newSelectorDimFilterBuilder() .dimension(marketDimension) .value("total_market") .build() ) ).build() ) .aggregators( Lists.<AggregatorFactory>newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)) .build(); } private TopNQuery makeTopNQuery() { return new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .metric(indexMetric) .threshold(3) .intervals(fullOnInterval) .aggregators( Lists.<AggregatorFactory>newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)) .build(); } private TopNQuery makeFilteredTopNQuery() { return new TopNQueryBuilder() .dataSource(dataSource) .granularity(allGran) .dimension(marketDimension) .metric(indexMetric) .threshold(3) .filters( Druids.newAndDimFilterBuilder() .fields( Arrays.<DimFilter>asList( Druids.newSelectorDimFilterBuilder() .dimension(marketDimension) .value("spot") .build(), Druids.newSelectorDimFilterBuilder() .dimension(placementDimension) .value("preferred") .build() ) ).build() ) .intervals(fullOnInterval) .aggregators( Lists.<AggregatorFactory>newArrayList( Iterables.concat( commonAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") ) ) ) ) .postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)) .build(); } private SearchQuery makeSearchQuery() { return Druids.newSearchQueryBuilder() .dataSource(dataSource) .granularity(allGran) .intervals(fullOnInterval) .query("a") .build(); } private SearchQuery makeFilteredSearchQuery() { return Druids.newSearchQueryBuilder() .dataSource(dataSource) .filters( Druids.newNotDimFilterBuilder() .field( Druids.newSelectorDimFilterBuilder() .dimension(marketDimension) .value("spot") .build() ).build() ) .granularity(allGran) .intervals(fullOnInterval) .query("a") .build(); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datalabeling/v1beta1/operations.proto package com.google.cloud.datalabeling.v1beta1; /** * * * <pre> * Metadata of a LabelImageClassification operation. * </pre> * * Protobuf type {@code google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata} */ public final class LabelImageClassificationOperationMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) LabelImageClassificationOperationMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use LabelImageClassificationOperationMetadata.newBuilder() to construct. private LabelImageClassificationOperationMetadata( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LabelImageClassificationOperationMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new LabelImageClassificationOperationMetadata(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LabelImageClassificationOperationMetadata( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.Builder subBuilder = null; if (basicConfig_ != null) { subBuilder = basicConfig_.toBuilder(); } basicConfig_ = input.readMessage( com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(basicConfig_); basicConfig_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datalabeling.v1beta1.Operations .internal_static_google_cloud_datalabeling_v1beta1_LabelImageClassificationOperationMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datalabeling.v1beta1.Operations .internal_static_google_cloud_datalabeling_v1beta1_LabelImageClassificationOperationMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata.class, com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata.Builder .class); } public static final int BASIC_CONFIG_FIELD_NUMBER = 1; private com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basicConfig_; /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> * * @return Whether the basicConfig field is set. */ @java.lang.Override public boolean hasBasicConfig() { return basicConfig_ != null; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> * * @return The basicConfig. */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig getBasicConfig() { return basicConfig_ == null ? com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.getDefaultInstance() : basicConfig_; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ @java.lang.Override public com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOrBuilder getBasicConfigOrBuilder() { return getBasicConfig(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (basicConfig_ != null) { output.writeMessage(1, getBasicConfig()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (basicConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getBasicConfig()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata)) { return super.equals(obj); } com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata other = (com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) obj; if (hasBasicConfig() != other.hasBasicConfig()) return false; if (hasBasicConfig()) { if (!getBasicConfig().equals(other.getBasicConfig())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasBasicConfig()) { hash = (37 * hash) + BASIC_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getBasicConfig().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Metadata of a LabelImageClassification operation. * </pre> * * Protobuf type {@code * google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datalabeling.v1beta1.Operations .internal_static_google_cloud_datalabeling_v1beta1_LabelImageClassificationOperationMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datalabeling.v1beta1.Operations .internal_static_google_cloud_datalabeling_v1beta1_LabelImageClassificationOperationMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata.class, com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata .Builder.class); } // Construct using // com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (basicConfigBuilder_ == null) { basicConfig_ = null; } else { basicConfig_ = null; basicConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datalabeling.v1beta1.Operations .internal_static_google_cloud_datalabeling_v1beta1_LabelImageClassificationOperationMetadata_descriptor; } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata getDefaultInstanceForType() { return com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata .getDefaultInstance(); } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata build() { com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata buildPartial() { com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata result = new com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata(this); if (basicConfigBuilder_ == null) { result.basicConfig_ = basicConfig_; } else { result.basicConfig_ = basicConfigBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) { return mergeFrom( (com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata other) { if (other == com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata .getDefaultInstance()) return this; if (other.hasBasicConfig()) { mergeBasicConfig(other.getBasicConfig()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basicConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig, com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.Builder, com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOrBuilder> basicConfigBuilder_; /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> * * @return Whether the basicConfig field is set. */ public boolean hasBasicConfig() { return basicConfigBuilder_ != null || basicConfig_ != null; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> * * @return The basicConfig. */ public com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig getBasicConfig() { if (basicConfigBuilder_ == null) { return basicConfig_ == null ? com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.getDefaultInstance() : basicConfig_; } else { return basicConfigBuilder_.getMessage(); } } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ public Builder setBasicConfig( com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig value) { if (basicConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } basicConfig_ = value; onChanged(); } else { basicConfigBuilder_.setMessage(value); } return this; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ public Builder setBasicConfig( com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.Builder builderForValue) { if (basicConfigBuilder_ == null) { basicConfig_ = builderForValue.build(); onChanged(); } else { basicConfigBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ public Builder mergeBasicConfig( com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig value) { if (basicConfigBuilder_ == null) { if (basicConfig_ != null) { basicConfig_ = com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.newBuilder(basicConfig_) .mergeFrom(value) .buildPartial(); } else { basicConfig_ = value; } onChanged(); } else { basicConfigBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ public Builder clearBasicConfig() { if (basicConfigBuilder_ == null) { basicConfig_ = null; onChanged(); } else { basicConfig_ = null; basicConfigBuilder_ = null; } return this; } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ public com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.Builder getBasicConfigBuilder() { onChanged(); return getBasicConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ public com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOrBuilder getBasicConfigOrBuilder() { if (basicConfigBuilder_ != null) { return basicConfigBuilder_.getMessageOrBuilder(); } else { return basicConfig_ == null ? com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.getDefaultInstance() : basicConfig_; } } /** * * * <pre> * Basic human annotation config used in labeling request. * </pre> * * <code>.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig basic_config = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig, com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.Builder, com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOrBuilder> getBasicConfigFieldBuilder() { if (basicConfigBuilder_ == null) { basicConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig, com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.Builder, com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOrBuilder>( getBasicConfig(), getParentForChildren(), isClean()); basicConfig_ = null; } return basicConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) private static final com.google.cloud.datalabeling.v1beta1 .LabelImageClassificationOperationMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata(); } public static com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LabelImageClassificationOperationMetadata> PARSER = new com.google.protobuf.AbstractParser<LabelImageClassificationOperationMetadata>() { @java.lang.Override public LabelImageClassificationOperationMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LabelImageClassificationOperationMetadata(input, extensionRegistry); } }; public static com.google.protobuf.Parser<LabelImageClassificationOperationMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LabelImageClassificationOperationMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/******************************************************************************* * Copyright 2018 Ivan Shubin http://galenframework.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.galenframework.speclang2.pagespec; import com.galenframework.parser.ExpectWord; import com.galenframework.parser.Expectations; import com.galenframework.parser.StructNode; import com.galenframework.parser.SyntaxException; import com.galenframework.specs.page.CorrectionsRect; import com.galenframework.page.Page; import com.galenframework.specs.page.Locator; import com.galenframework.parser.StringCharReader; import com.galenframework.utils.GalenUtils; import java.util.*; import static java.lang.String.format; public class ObjectDefinitionProcessor implements StructNodeProcessor { public static final String GROUPED = "@grouped"; private final PageSpecHandler pageSpecHandler; private static final String CORRECTIONS_SYMBOL = "@"; public ObjectDefinitionProcessor(PageSpecHandler pageSpecHandler) { this.pageSpecHandler = pageSpecHandler; } private Stack<List<String>> groupStack = new Stack<>(); public List<StructNode> process(StringCharReader reader, StructNode structNode) { if (!reader.getTheRest().isEmpty()) { throw new SyntaxException(structNode.getPlace(), "Objects definition does not take any arguments"); } if (structNode.getChildNodes() != null) { groupStack = new Stack<>(); for (StructNode childNode : structNode.getChildNodes()) { parseItem(childNode); } } return Collections.emptyList(); } private void parseItem(StructNode objectNode) { parseItem(objectNode, null, null); } private void parseItem(StructNode objectNode, String parentName, Locator parentLocator) { processObject(objectNode, parentName, parentLocator); } private void processObject(StructNode objectNode, String parentName, Locator parentLocator) { StringCharReader reader = new StringCharReader(pageSpecHandler.processExpressionsIn(objectNode).getName()); String objectName = reader.readWord(); if (parentName != null) { objectName = parentName + "." + objectName; } String locatorText = null; List<String> groups = null; CorrectionsRect corrections = null; while(reader.hasMore()) { String word = expectCorrectionsOrId(objectNode, reader, objectName); if (word.equals(CORRECTIONS_SYMBOL)) { corrections = Expectations.corrections().read(reader); } else if (word.equals(GROUPED)) { groups = parseInlineGroupsInBrackets(reader); } else { locatorText = word + reader.getTheRest(); reader.moveToTheEnd(); } } if (locatorText == null) { throw new SyntaxException("Missing locator"); } Locator locator = readLocatorFromString(objectNode, objectName, locatorText.trim()); locator.setCorrections(corrections); if (parentLocator != null) { locator.setParent(parentLocator); } if (objectName.contains("*")) { addMultiObjectsToSpec(objectNode, objectName, locator, groups); } else { addObjectToSpec(objectNode, objectName, locator, groups); } } private List<String> parseInlineGroupsInBrackets(StringCharReader reader) { if (reader.firstNonWhiteSpaceSymbol() == '(') { reader.readUntilSymbol('('); return GalenUtils.fromCommaSeparated(reader.readUntilSymbol(')')); } else { throw new SyntaxException("Missing '(' for group definitions"); } } private void addObjectToSpec(StructNode objectNode, String objectName, Locator locator, List<String> groupsForThisObject) { if (!objectName.matches("[0-9a-zA-Z_\\.\\-]*")) { throw new SyntaxException("Invalid object name: " + objectName); } pageSpecHandler.addObjectToSpec(objectName, locator); List<String> allCurrentGroups = getAllCurrentGroups(); if (allCurrentGroups != null && !allCurrentGroups.isEmpty()) { pageSpecHandler.applyGroupsToObject(objectName, allCurrentGroups); } if (groupsForThisObject != null && !groupsForThisObject.isEmpty()) { pageSpecHandler.applyGroupsToObject(objectName, groupsForThisObject); } if (objectNode.getChildNodes() != null && objectNode.getChildNodes().size() > 0) { for (StructNode subObjectNode : objectNode.getChildNodes()) { parseItem(pageSpecHandler.processExpressionsIn(subObjectNode), objectName, locator); } } } private void addMultiObjectsToSpec(StructNode objectNode, String objectName, Locator locator, List<String> groupsForThisObject) { Page page = pageSpecHandler.getPage(); int count = page.getObjectCount(locator); for (int index = 1; index <= count; index++) { addObjectToSpec(objectNode, objectName.replace("*", Integer.toString(index)), new Locator(locator.getLocatorType(), locator.getLocatorValue(), index).withParent(locator.getParent()).withCorrections(locator.getCorrections()), groupsForThisObject); } } private Locator readLocatorFromString(StructNode structNode, String objectName, String locatorText) { if (locatorText.isEmpty()) { throw new SyntaxException(structNode.getPlace(), "Missing locator for object \"" + objectName + "\""); } StringCharReader reader = new StringCharReader(locatorText); String firstWord = reader.readWord(); String locatorValue = reader.getTheRest().trim(); if ("id".equals(firstWord) || "css".equals(firstWord) || "xpath".equals(firstWord)) { return createLocator(objectName, firstWord, locatorValue); } else { return identifyLocator(locatorText); } } private Locator identifyLocator(String locatorText) { if (locatorText.startsWith("/")) { return new Locator("xpath", locatorText); } else { return new Locator("css", locatorText); } } private Locator createLocator(String objectName, String type, String value) { if (value == null || value.isEmpty()) { throw new SyntaxException("Locator for object \"" + objectName + "\" is not defined correctly"); } return new Locator(type, value); } private String expectCorrectionsOrId(StructNode structNode, StringCharReader reader, String objectName) { String word = new ExpectWord().stopOnTheseSymbols('(').read(reader).trim(); if (word.isEmpty()) { throw new SyntaxException(structNode.getPlace(), format("Missing locator for object \"%s\"", objectName)); } return word; } private List<String> getAllCurrentGroups() { List<String> allCurrentGroups = new LinkedList<>(); Iterator<List<String>> it = groupStack.iterator(); while(it.hasNext()) { for (String groupName : it.next()) { if (!allCurrentGroups.contains(groupName)) { allCurrentGroups.add(groupName); } } } return allCurrentGroups; } }