gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.configuration.component; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.web.component.AjaxButton; import com.evolveum.midpoint.web.component.data.TablePanel; import com.evolveum.midpoint.web.component.data.column.*; import com.evolveum.midpoint.web.component.input.DropDownChoicePanel; import com.evolveum.midpoint.web.component.input.ListMultipleChoicePanel; import com.evolveum.midpoint.web.component.input.TextPanel; import com.evolveum.midpoint.web.component.prism.InputPanel; import com.evolveum.midpoint.web.component.util.Editable; import com.evolveum.midpoint.web.component.util.ListDataProvider; import com.evolveum.midpoint.web.component.util.LoadableModel; import com.evolveum.midpoint.web.component.util.SimplePanel; import com.evolveum.midpoint.web.page.admin.configuration.dto.*; import com.evolveum.midpoint.web.util.InfoTooltipBehavior; import com.evolveum.midpoint.web.util.WebMiscUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import org.apache.commons.lang.StringUtils; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.ajax.form.OnChangeAjaxBehavior; import org.apache.wicket.behavior.AttributeAppender; import org.apache.wicket.extensions.markup.html.repeater.data.table.DataTable; import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn; import org.apache.wicket.extensions.markup.html.repeater.data.table.ISortableDataProvider; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.*; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * @author lazyman */ public class LoggingConfigPanel extends SimplePanel<LoggingDto> { private static final String DOT_CLASS = LoggingConfigPanel.class.getName() + "."; private static final String OPERATION_LOAD_LOGGING_CONFIGURATION = DOT_CLASS + "loadLoggingConfiguration"; private static final String ID_LOGGERS_TABLE = "loggersTable"; private static final String ID_ROOT_LEVEL = "rootLevel"; private static final String ID_ROOT_APPENDER = "rootAppender"; private static final String ID_TABLE_APPENDERS = "appendersTable"; private static final String ID_BUTTON_ADD_CONSOLE_APPENDER = "addConsoleAppender"; private static final String ID_BUTTON_ADD_FILE_APPENDER = "addFileAppender"; private static final String ID_BUTTON_DELETE_APPENDER = "deleteAppender"; private static final String ID_BUTTON_ADD_STANDARD_LOGGER = "addStandardLogger"; private static final String ID_DUMP_INTERVAL_TOOLTIP = "dumpIntervalTooltip"; public LoggingConfigPanel(String id) { super(id, null); } @Override public IModel<LoggingDto> createModel() { return new LoadableModel<LoggingDto>(false) { @Override protected LoggingDto load() { return initLoggingModel(); } }; } private LoggingDto initLoggingModel() { LoggingDto dto = null; OperationResult result = new OperationResult(OPERATION_LOAD_LOGGING_CONFIGURATION); try { Task task = getPageBase().createSimpleTask(OPERATION_LOAD_LOGGING_CONFIGURATION); PrismObject<SystemConfigurationType> config = getPageBase().getModelService().getObject( SystemConfigurationType.class, SystemObjectsType.SYSTEM_CONFIGURATION.value(), null, task, result); SystemConfigurationType systemConfiguration = config.asObjectable(); LoggingConfigurationType logging = systemConfiguration.getLogging(); dto = new LoggingDto(config, logging); result.recordSuccess(); } catch (Exception ex) { result.recordFatalError("Couldn't load logging configuration.", ex); } if (!result.isSuccess()) { getPageBase().showResult(result); } if (dto == null) { dto = new LoggingDto(); } return dto; } @Override protected void initLayout() { initLoggers(); initAudit(); initProfiling(); initAppenders(); } private void initLoggers() { initRoot(); ISortableDataProvider<LoggerConfiguration, String> provider = new ListDataProvider<>(this, new PropertyModel<List<LoggerConfiguration>>(getModel(), "loggers")); TablePanel table = new TablePanel<>(ID_LOGGERS_TABLE, provider, initLoggerColumns()); table.setOutputMarkupId(true); table.setShowPaging(true); add(table); AjaxButton addStandardLogger = new AjaxButton(ID_BUTTON_ADD_STANDARD_LOGGER, createStringResource("LoggingConfigPanel.button.addStandardLogger")) { @Override public void onClick(AjaxRequestTarget target) { addStandardLoggerPerformed(target); } }; add(addStandardLogger); AjaxButton addComponentLogger = new AjaxButton("addComponentLogger", createStringResource("LoggingConfigPanel.button.addComponentLogger")) { @Override public void onClick(AjaxRequestTarget target) { addComponentLoggerPerformed(target); } }; add(addComponentLogger); AjaxButton addClassLogger = new AjaxButton("addClassLogger", createStringResource("LoggingConfigPanel.button.addClassLogger")) { @Override public void onClick(AjaxRequestTarget target) { addClassLoggerPerformed(target); } }; add(addClassLogger); AjaxButton deleteLogger = new AjaxButton("deleteLogger", createStringResource("LoggingConfigPanel.button.deleteLogger")) { @Override public void onClick(AjaxRequestTarget target) { deleteLoggerPerformed(target); } }; add(deleteLogger); } private void initRoot() { DropDownChoice<LoggingLevelType> rootLevel = new DropDownChoice<>(ID_ROOT_LEVEL, new PropertyModel<LoggingLevelType>(getModel(), LoggingDto.F_ROOT_LEVEL), WebMiscUtil.createReadonlyModelFromEnum(LoggingLevelType.class)); add(rootLevel); DropDownChoice<String> rootAppender = new DropDownChoice<>(ID_ROOT_APPENDER, new PropertyModel<String>(getModel(), LoggingDto.F_ROOT_APPENDER), createAppendersListModel()); rootAppender.setNullValid(true); rootAppender.add(new OnChangeAjaxBehavior() { @Override protected void onUpdate(AjaxRequestTarget target) { rootAppenderChangePerformed(target); } }); add(rootAppender); } private void initAudit(){ CheckBox auditLog = new CheckBox("auditLog", new PropertyModel<Boolean>(getModel(), "auditLog")); add(auditLog); CheckBox auditDetails = new CheckBox("auditDetails", new PropertyModel<Boolean>(getModel(), "auditDetails")); add(auditDetails); DropDownChoice<String> auditAppender = new DropDownChoice<>("auditAppender", new PropertyModel<String>( getModel(), "auditAppender"), createAppendersListModel()); auditAppender.setNullValid(true); add(auditAppender); } private void initProfiling(){ //Entry-Exit profiling init DropDownChoice<ProfilingLevel> profilingLevel = new DropDownChoice<>("profilingLevel", new PropertyModel<ProfilingLevel>(getModel(), "profilingLevel"), WebMiscUtil.createReadonlyModelFromEnum(ProfilingLevel.class), new EnumChoiceRenderer<ProfilingLevel>(this)); add(profilingLevel); DropDownChoice<String> profilingAppender = new DropDownChoice<>("profilingAppender", new PropertyModel<String>(getModel(), "profilingAppender"), createAppendersListModel()); profilingAppender.setNullValid(true); add(profilingAppender); //Subsystem and general profiling init CheckBox requestFilter = new CheckBox("requestFilter", new PropertyModel<Boolean>(getModel(), "requestFilter")); CheckBox performanceStatistics = new CheckBox("performanceStatistics", new PropertyModel<Boolean>(getModel(), "performanceStatistics")); CheckBox subsystemModel = new CheckBox("subsystemModel", new PropertyModel<Boolean>(getModel(), "subsystemModel")); CheckBox subsystemRepository = new CheckBox("subsystemRepository", new PropertyModel<Boolean>(getModel(), "subsystemRepository")); CheckBox subsystemProvisioning = new CheckBox("subsystemProvisioning", new PropertyModel<Boolean>(getModel(), "subsystemProvisioning")); CheckBox subsystemUcf = new CheckBox("subsystemUcf", new PropertyModel<Boolean>(getModel(), "subsystemUcf")); CheckBox subsystemResourceObjectChangeListener = new CheckBox("subsystemResourceObjectChangeListener", new PropertyModel<Boolean>(getModel(), "subsystemResourceObjectChangeListener")); CheckBox subsystemTaskManager = new CheckBox("subsystemTaskManager", new PropertyModel<Boolean>(getModel(), "subsystemTaskManager")); CheckBox subsystemWorkflow = new CheckBox("subsystemWorkflow", new PropertyModel<Boolean>(getModel(), "subsystemWorkflow")); add(requestFilter); add(performanceStatistics); add(subsystemModel); add(subsystemRepository); add(subsystemProvisioning); add(subsystemUcf); add(subsystemResourceObjectChangeListener); add(subsystemTaskManager); add(subsystemWorkflow); TextField<Integer> dumpInterval = new TextField<>("dumpInterval", new PropertyModel<Integer>(getModel(), "dumpInterval")); add(dumpInterval); Label dumpIntervalTooltip = new Label(ID_DUMP_INTERVAL_TOOLTIP); dumpIntervalTooltip.add(new InfoTooltipBehavior()); add(dumpIntervalTooltip); } private void addStandardLoggerPerformed(AjaxRequestTarget target){ LoggingDto dto = getModel().getObject(); StandardLogger logger = new StandardLogger(new ClassLoggerConfigurationType()); logger.setEditing(true); dto.getLoggers().add(logger); TablePanel loggersTable = getLoggersTable(); adjustLoggersTablePage(loggersTable, dto); target.add(getLoggersTable()); } private void addComponentLoggerPerformed(AjaxRequestTarget target) { LoggingDto dto = getModel().getObject(); ComponentLogger logger = new ComponentLogger(new ClassLoggerConfigurationType()); logger.setEditing(true); dto.getLoggers().add(logger); TablePanel loggersTable = getLoggersTable(); adjustLoggersTablePage(loggersTable, dto); target.add(loggersTable); } private void addClassLoggerPerformed(AjaxRequestTarget target) { LoggingDto dto = getModel().getObject(); ClassLogger logger = new ClassLogger(new ClassLoggerConfigurationType()); logger.setEditing(true); dto.getLoggers().add(logger); TablePanel loggersTable = getLoggersTable(); adjustLoggersTablePage(loggersTable, dto); target.add(getLoggersTable()); } private void adjustLoggersTablePage(TablePanel loggersTable, LoggingDto dto){ if(loggersTable != null && dto.getLoggers().size() % 10 == 1 && dto.getLoggers().size() != 1){ DataTable table = loggersTable.getDataTable(); if(table != null){ table.setCurrentPage((long)(dto.getLoggers().size()/10)); } } } private TablePanel getLoggersTable() { return (TablePanel) get(ID_LOGGERS_TABLE); } private TablePanel getAppendersTable(){ return (TablePanel) get(ID_TABLE_APPENDERS); } private List<IColumn<LoggerConfiguration, String>> initLoggerColumns() { List<IColumn<LoggerConfiguration, String>> columns = new ArrayList<>(); IColumn column = new CheckBoxHeaderColumn<LoggerConfiguration>(); columns.add(column); //name editing column columns.add(new EditableLinkColumn<LoggerConfiguration>( createStringResource("LoggingConfigPanel.logger"), "name") { @Override protected Component createInputPanel(String componentId, final IModel<LoggerConfiguration> model) { if(model.getObject() instanceof StandardLogger){ DropDownChoicePanel dropDownChoicePanel = new DropDownChoicePanel(componentId, new PropertyModel(model, "logger"), WebMiscUtil.createReadonlyModelFromEnum(StandardLoggerType.class), new IChoiceRenderer<StandardLoggerType>() { @Override public Object getDisplayValue(StandardLoggerType item) { return createStringResource("StandardLoggerType." + item).getString(); } @Override public String getIdValue(StandardLoggerType item, int index) { return Integer.toString(index); } }); FormComponent<StandardLoggerType> input = dropDownChoicePanel.getBaseFormComponent(); input.add(new LoggerValidator()); input.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); return dropDownChoicePanel; } else if (model.getObject() instanceof ComponentLogger) { DropDownChoicePanel dropDownChoicePanel = new DropDownChoicePanel(componentId, new PropertyModel(model, "component"), WebMiscUtil.createReadonlyModelFromEnum(LoggingComponentType.class), new IChoiceRenderer<LoggingComponentType>() { @Override public Object getDisplayValue(LoggingComponentType item) { return getComponenLoggerDisplayValue(item); } @Override public String getIdValue(LoggingComponentType item, int index) { return Integer.toString(index); } }); FormComponent<LoggingComponentType> input = dropDownChoicePanel.getBaseFormComponent(); input.add(new LoggerValidator()); input.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); return dropDownChoicePanel; } else { TextPanel textPanel = new TextPanel<>(componentId, new PropertyModel<String>(model, getPropertyExpression())); FormComponent input = textPanel.getBaseFormComponent(); input.add(new AttributeAppender("style", "width: 100%")); input.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); input.add(new InputStringValidator()); return textPanel; } } @Override public void onClick(AjaxRequestTarget target, IModel<LoggerConfiguration> rowModel) { loggerEditPerformed(target, rowModel); } }); //level editing column columns.add(new EditableLinkColumn<LoggerConfiguration>(createStringResource("LoggingConfigPanel.loggersLevel"), "level") { @Override protected Component createInputPanel(String componentId, IModel<LoggerConfiguration> model) { DropDownChoicePanel dropDownChoicePanel = new DropDownChoicePanel(componentId, new PropertyModel(model, getPropertyExpression()), WebMiscUtil.createReadonlyModelFromEnum(LoggingLevelType.class)); FormComponent<LoggingLevelType> input = dropDownChoicePanel.getBaseFormComponent(); input.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); input.add(new LevelValidator()); return dropDownChoicePanel; } @Override public void onClick(AjaxRequestTarget target, IModel<LoggerConfiguration> rowModel) { loggerEditPerformed(target, rowModel); } @Override protected IModel<String> createLinkModel(IModel<LoggerConfiguration> rowModel) { LoggerConfiguration configuration = rowModel.getObject(); return WebMiscUtil.createLocalizedModelForEnum(configuration.getLevel(), getPageBase()); } }); //appender editing column columns.add(new EditableLinkColumn<LoggerConfiguration>(createStringResource("LoggingConfigPanel.loggersAppender"), "appenders") { @Override protected IModel<String> createLinkModel(IModel<LoggerConfiguration> rowModel){ final LoggerConfiguration configuration = rowModel.getObject(); if(configuration.getAppenders().isEmpty()){ return createStringResource("LoggingConfigPanel.appenders.Inherit"); } else{ return new LoadableModel<String>() { @Override protected String load() { StringBuilder builder = new StringBuilder(); for (String appender : configuration.getAppenders()) { if (configuration.getAppenders().indexOf(appender) != 0) { builder.append(", "); } builder.append(appender); } return builder.toString(); } }; } } @Override protected InputPanel createInputPanel(String componentId, IModel<LoggerConfiguration> model) { IModel<Map<String, String>> options = new Model(null); ListMultipleChoicePanel panel = new ListMultipleChoicePanel<>(componentId, new PropertyModel<List<String>>(model, getPropertyExpression()), createNewLoggerAppendersListModel(), new IChoiceRenderer<String>() { @Override public String getDisplayValue(String o) { return o; } @Override public String getIdValue(String o, int index) { return Integer.toString(index); } }, options); FormComponent<AppenderConfigurationType> input = panel.getBaseFormComponent(); input.add(new EmptyOnChangeAjaxFormUpdatingBehavior()); return panel; } public void onClick(AjaxRequestTarget target, IModel<LoggerConfiguration> rowModel) { loggerEditPerformed(target, rowModel); } }); return columns; } private String getComponenLoggerDisplayValue(LoggingComponentType item){ //LoggingConfigPanel.this.getString("LoggingConfigPanel.logger." + item); return createStringResource("LoggingComponentType." + item).getString(); } private IModel<List<String>> createNewLoggerAppendersListModel(){ return new AbstractReadOnlyModel<List<String>>() { @Override public List<String> getObject() { List<String> list = new ArrayList<>(); LoggingDto dto = getModel().getObject(); //list.add(PageSystemConfiguration.ROOT_APPENDER_INHERITANCE_CHOICE); for(AppenderConfiguration appender: dto.getAppenders()){ if(!appender.getName().equals(dto.getRootAppender())){ list.add(appender.getName()); } } return list; } }; } private IModel<List<String>> createAppendersListModel() { return new AbstractReadOnlyModel<List<String>>() { @Override public List<String> getObject() { List<String> list = new ArrayList<>(); LoggingDto dto = getModel().getObject(); for (AppenderConfiguration appender : dto.getAppenders()) { list.add(appender.getName()); } return list; } }; } private void deleteLoggerPerformed(AjaxRequestTarget target) { Iterator<LoggerConfiguration> iterator = getModel().getObject().getLoggers().iterator(); while (iterator.hasNext()) { LoggerConfiguration item = iterator.next(); if (item.isSelected()) { iterator.remove(); } } target.add(getLoggersTable()); } private void initAppenders(){ ISortableDataProvider<AppenderConfiguration, String> provider = new ListDataProvider<>( this, new PropertyModel<List<AppenderConfiguration>>(getModel(), LoggingDto.F_APPENDERS)); TablePanel table = new TablePanel<>(ID_TABLE_APPENDERS, provider, initAppenderColumns()); table.setOutputMarkupId(true); table.setShowPaging(false); add(table); AjaxButton addConsoleAppender = new AjaxButton(ID_BUTTON_ADD_CONSOLE_APPENDER, createStringResource("LoggingConfigPanel.button.addConsoleAppender")) { @Override public void onClick(AjaxRequestTarget target) { addConsoleAppenderPerformed(target); } }; add(addConsoleAppender); AjaxButton addFileAppender = new AjaxButton(ID_BUTTON_ADD_FILE_APPENDER, createStringResource("LoggingConfigPanel.button.addFileAppender")) { @Override public void onClick(AjaxRequestTarget target) { addFileAppenderPerformed(target); } }; add(addFileAppender); AjaxButton deleteAppender = new AjaxButton(ID_BUTTON_DELETE_APPENDER, createStringResource("LoggingConfigPanel.button.deleteAppender")) { @Override public void onClick(AjaxRequestTarget target) { deleteAppenderPerformed(target); } }; add(deleteAppender); } private List<IColumn<AppenderConfiguration, String>> initAppenderColumns(){ List<IColumn<AppenderConfiguration, String>> columns = new ArrayList<>(); IColumn column = new CheckBoxHeaderColumn<AppenderConfiguration>(); columns.add(column); //name columns (editable) column = new EditableLinkColumn<AppenderConfiguration>(createStringResource("LoggingConfigPanel.appenders.name"), "name"){ @Override public void onClick(AjaxRequestTarget target, IModel<AppenderConfiguration> rowModel){ appenderEditPerformed(target, rowModel); } @Override protected Component createInputPanel(String componentId, IModel<AppenderConfiguration> model){ TextPanel<String> panel = new TextPanel<String>(componentId, new PropertyModel(model, getPropertyExpression())); panel.getBaseFormComponent().add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); panel.add(new InputStringValidator()); return panel; } }; columns.add(column); //pattern column (editable) column = new EditablePropertyColumn(createStringResource("LoggingConfigPanel.appenders.pattern"), "pattern") { @Override protected InputPanel createInputPanel(String componentId, IModel model) { InputPanel panel = super.createInputPanel(componentId, model); panel.getBaseFormComponent().add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); panel.add(new InputStringValidator()); return panel; } }; columns.add(column); //file path column (editable) column = new FileAppenderColumn(createStringResource("LoggingConfigPanel.appenders.filePath"), "filePath"); columns.add(column); //file pattern column (editable) jj column = new FileAppenderColumn(createStringResource("LoggingConfigPanel.appenders.filePattern"), "filePattern"); columns.add(column); //max history column (editable) column = new FileAppenderColumn(createStringResource("LoggingConfigPanel.appenders.maxHistory"), "maxHistory") { @Override protected InputPanel createInputPanel(String componentId, IModel model) { TextPanel panel = new TextPanel<>(componentId, new PropertyModel<String>(model, getPropertyExpression())); FormComponent component = panel.getBaseFormComponent(); component.add(new AttributeModifier("size", 5)); component.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); return panel; } }; columns.add(column); //max file size column (editable) column = new FileAppenderColumn(createStringResource("LoggingConfigPanel.appenders.maxFileSize"), "maxFileSize") { @Override protected InputPanel createInputPanel(String componentId, IModel model) { TextPanel<String> panel = new TextPanel<>(componentId, new PropertyModel<String>(model, getPropertyExpression())); FormComponent component = panel.getBaseFormComponent(); component.add(new AttributeModifier("size", 5)); component.add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); component.add(new InputStringValidator()); return panel; } }; columns.add(column); CheckBoxColumn check = new EditableCheckboxColumn(createStringResource("LoggingConfigPanel.appenders.appending"), "appending") { @Override protected InputPanel createInputPanel(String componentId, IModel model) { InputPanel panel = super.createInputPanel(componentId, model); panel.getBaseFormComponent().add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); panel.add(new InputStringValidator()); return panel; } }; check.setEnabled(false); columns.add(check); return columns; } private IModel<LoggingComponentType> createFilterModel(final IModel<FilterConfiguration> model) { return new Model<LoggingComponentType>() { @Override public LoggingComponentType getObject() { String name = model.getObject().getName(); if (StringUtils.isEmpty(name)) { return null; } return LoggingComponentType.fromValue(name); } @Override public void setObject(LoggingComponentType object) { model.getObject().setName(object.name()); } }; } private void loggerEditPerformed(AjaxRequestTarget target, IModel<LoggerConfiguration> rowModel) { LoggerConfiguration config = rowModel.getObject(); config.setEditing(true); target.add(getLoggersTable()); } private static class EmptyOnChangeAjaxFormUpdatingBehavior extends AjaxFormComponentUpdatingBehavior { public EmptyOnChangeAjaxFormUpdatingBehavior(){ super("onChange"); } @Override protected void onUpdate(AjaxRequestTarget target){ } } private static class EmptyOnBlurAjaxFormUpdatingBehaviour extends AjaxFormComponentUpdatingBehavior { public EmptyOnBlurAjaxFormUpdatingBehaviour() { super("onBlur"); } @Override protected void onUpdate(AjaxRequestTarget target) { } } private static class FileAppenderColumn<T extends Editable> extends EditablePropertyColumn<T> { private FileAppenderColumn(IModel<String> displayModel, String propertyExpression) { super(displayModel, propertyExpression); } @Override protected boolean isEditing(IModel<T> rowModel) { return super.isEditing(rowModel) && (rowModel.getObject() instanceof FileAppenderConfig); } @Override protected InputPanel createInputPanel(String componentId, IModel iModel) { InputPanel panel = super.createInputPanel(componentId, iModel); panel.getBaseFormComponent().add(new EmptyOnBlurAjaxFormUpdatingBehaviour()); panel.add(new InputStringValidator()); return panel; } } private void rootAppenderChangePerformed(AjaxRequestTarget target){ target.add(getLoggersTable()); } private void addConsoleAppenderPerformed(AjaxRequestTarget target){ LoggingDto dto = getModel().getObject(); AppenderConfiguration appender = new AppenderConfiguration(new AppenderConfigurationType()); appender.setEditing(true); dto.getAppenders().add(appender); target.add(getAppendersTable()); } private void addFileAppenderPerformed(AjaxRequestTarget target){ LoggingDto dto = getModel().getObject(); FileAppenderConfig appender = new FileAppenderConfig(new FileAppenderConfigurationType()); appender.setEditing(true); dto.getAppenders().add(appender); target.add(getAppendersTable()); } private void deleteAppenderPerformed(AjaxRequestTarget target){ Iterator<AppenderConfiguration> iterator = getModel().getObject().getAppenders().iterator(); while (iterator.hasNext()) { AppenderConfiguration item = iterator.next(); if (item.isSelected()) { iterator.remove(); } } target.add(getAppendersTable()); } private void appenderEditPerformed(AjaxRequestTarget target, IModel<AppenderConfiguration> model){ AppenderConfiguration config = model.getObject(); config.setEditing(true); target.add(getAppendersTable()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.7.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING */ package backtype.storm.generated; import org.apache.commons.lang.builder.HashCodeBuilder; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ComponentCommon implements org.apache.thrift7.TBase<ComponentCommon, ComponentCommon._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ComponentCommon"); private static final org.apache.thrift7.protocol.TField INPUTS_FIELD_DESC = new org.apache.thrift7.protocol.TField("inputs", org.apache.thrift7.protocol.TType.MAP, (short)1); private static final org.apache.thrift7.protocol.TField STREAMS_FIELD_DESC = new org.apache.thrift7.protocol.TField("streams", org.apache.thrift7.protocol.TType.MAP, (short)2); private static final org.apache.thrift7.protocol.TField PARALLELISM_HINT_FIELD_DESC = new org.apache.thrift7.protocol.TField("parallelism_hint", org.apache.thrift7.protocol.TType.I32, (short)3); private static final org.apache.thrift7.protocol.TField JSON_CONF_FIELD_DESC = new org.apache.thrift7.protocol.TField("json_conf", org.apache.thrift7.protocol.TType.STRING, (short)4); private Map<GlobalStreamId,Grouping> inputs; // required private Map<String,StreamInfo> streams; // required private int parallelism_hint; // required private String json_conf; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift7.TFieldIdEnum { INPUTS((short)1, "inputs"), STREAMS((short)2, "streams"), PARALLELISM_HINT((short)3, "parallelism_hint"), JSON_CONF((short)4, "json_conf"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // INPUTS return INPUTS; case 2: // STREAMS return STREAMS; case 3: // PARALLELISM_HINT return PARALLELISM_HINT; case 4: // JSON_CONF return JSON_CONF; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __PARALLELISM_HINT_ISSET_ID = 0; private BitSet __isset_bit_vector = new BitSet(1); public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.INPUTS, new org.apache.thrift7.meta_data.FieldMetaData("inputs", org.apache.thrift7.TFieldRequirementType.REQUIRED, new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP, new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class), new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, Grouping.class)))); tmpMap.put(_Fields.STREAMS, new org.apache.thrift7.meta_data.FieldMetaData("streams", org.apache.thrift7.TFieldRequirementType.REQUIRED, new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP, new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING), new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, StreamInfo.class)))); tmpMap.put(_Fields.PARALLELISM_HINT, new org.apache.thrift7.meta_data.FieldMetaData("parallelism_hint", org.apache.thrift7.TFieldRequirementType.OPTIONAL, new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32))); tmpMap.put(_Fields.JSON_CONF, new org.apache.thrift7.meta_data.FieldMetaData("json_conf", org.apache.thrift7.TFieldRequirementType.OPTIONAL, new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ComponentCommon.class, metaDataMap); } public ComponentCommon() { } public ComponentCommon( Map<GlobalStreamId,Grouping> inputs, Map<String,StreamInfo> streams) { this(); this.inputs = inputs; this.streams = streams; } /** * Performs a deep copy on <i>other</i>. */ public ComponentCommon(ComponentCommon other) { __isset_bit_vector.clear(); __isset_bit_vector.or(other.__isset_bit_vector); if (other.is_set_inputs()) { Map<GlobalStreamId,Grouping> __this__inputs = new HashMap<GlobalStreamId,Grouping>(); for (Map.Entry<GlobalStreamId, Grouping> other_element : other.inputs.entrySet()) { GlobalStreamId other_element_key = other_element.getKey(); Grouping other_element_value = other_element.getValue(); GlobalStreamId __this__inputs_copy_key = new GlobalStreamId(other_element_key); Grouping __this__inputs_copy_value = new Grouping(other_element_value); __this__inputs.put(__this__inputs_copy_key, __this__inputs_copy_value); } this.inputs = __this__inputs; } if (other.is_set_streams()) { Map<String,StreamInfo> __this__streams = new HashMap<String,StreamInfo>(); for (Map.Entry<String, StreamInfo> other_element : other.streams.entrySet()) { String other_element_key = other_element.getKey(); StreamInfo other_element_value = other_element.getValue(); String __this__streams_copy_key = other_element_key; StreamInfo __this__streams_copy_value = new StreamInfo(other_element_value); __this__streams.put(__this__streams_copy_key, __this__streams_copy_value); } this.streams = __this__streams; } this.parallelism_hint = other.parallelism_hint; if (other.is_set_json_conf()) { this.json_conf = other.json_conf; } } public ComponentCommon deepCopy() { return new ComponentCommon(this); } @Override public void clear() { this.inputs = null; this.streams = null; set_parallelism_hint_isSet(false); this.parallelism_hint = 0; this.json_conf = null; } public int get_inputs_size() { return (this.inputs == null) ? 0 : this.inputs.size(); } public void put_to_inputs(GlobalStreamId key, Grouping val) { if (this.inputs == null) { this.inputs = new HashMap<GlobalStreamId,Grouping>(); } this.inputs.put(key, val); } public Map<GlobalStreamId,Grouping> get_inputs() { return this.inputs; } public void set_inputs(Map<GlobalStreamId,Grouping> inputs) { this.inputs = inputs; } public void unset_inputs() { this.inputs = null; } /** Returns true if field inputs is set (has been assigned a value) and false otherwise */ public boolean is_set_inputs() { return this.inputs != null; } public void set_inputs_isSet(boolean value) { if (!value) { this.inputs = null; } } public int get_streams_size() { return (this.streams == null) ? 0 : this.streams.size(); } public void put_to_streams(String key, StreamInfo val) { if (this.streams == null) { this.streams = new HashMap<String,StreamInfo>(); } this.streams.put(key, val); } public Map<String,StreamInfo> get_streams() { return this.streams; } public void set_streams(Map<String,StreamInfo> streams) { this.streams = streams; } public void unset_streams() { this.streams = null; } /** Returns true if field streams is set (has been assigned a value) and false otherwise */ public boolean is_set_streams() { return this.streams != null; } public void set_streams_isSet(boolean value) { if (!value) { this.streams = null; } } public int get_parallelism_hint() { return this.parallelism_hint; } public void set_parallelism_hint(int parallelism_hint) { this.parallelism_hint = parallelism_hint; set_parallelism_hint_isSet(true); } public void unset_parallelism_hint() { __isset_bit_vector.clear(__PARALLELISM_HINT_ISSET_ID); } /** Returns true if field parallelism_hint is set (has been assigned a value) and false otherwise */ public boolean is_set_parallelism_hint() { return __isset_bit_vector.get(__PARALLELISM_HINT_ISSET_ID); } public void set_parallelism_hint_isSet(boolean value) { __isset_bit_vector.set(__PARALLELISM_HINT_ISSET_ID, value); } public String get_json_conf() { return this.json_conf; } public void set_json_conf(String json_conf) { this.json_conf = json_conf; } public void unset_json_conf() { this.json_conf = null; } /** Returns true if field json_conf is set (has been assigned a value) and false otherwise */ public boolean is_set_json_conf() { return this.json_conf != null; } public void set_json_conf_isSet(boolean value) { if (!value) { this.json_conf = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case INPUTS: if (value == null) { unset_inputs(); } else { set_inputs((Map<GlobalStreamId,Grouping>)value); } break; case STREAMS: if (value == null) { unset_streams(); } else { set_streams((Map<String,StreamInfo>)value); } break; case PARALLELISM_HINT: if (value == null) { unset_parallelism_hint(); } else { set_parallelism_hint((Integer)value); } break; case JSON_CONF: if (value == null) { unset_json_conf(); } else { set_json_conf((String)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case INPUTS: return get_inputs(); case STREAMS: return get_streams(); case PARALLELISM_HINT: return Integer.valueOf(get_parallelism_hint()); case JSON_CONF: return get_json_conf(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case INPUTS: return is_set_inputs(); case STREAMS: return is_set_streams(); case PARALLELISM_HINT: return is_set_parallelism_hint(); case JSON_CONF: return is_set_json_conf(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof ComponentCommon) return this.equals((ComponentCommon)that); return false; } public boolean equals(ComponentCommon that) { if (that == null) return false; boolean this_present_inputs = true && this.is_set_inputs(); boolean that_present_inputs = true && that.is_set_inputs(); if (this_present_inputs || that_present_inputs) { if (!(this_present_inputs && that_present_inputs)) return false; if (!this.inputs.equals(that.inputs)) return false; } boolean this_present_streams = true && this.is_set_streams(); boolean that_present_streams = true && that.is_set_streams(); if (this_present_streams || that_present_streams) { if (!(this_present_streams && that_present_streams)) return false; if (!this.streams.equals(that.streams)) return false; } boolean this_present_parallelism_hint = true && this.is_set_parallelism_hint(); boolean that_present_parallelism_hint = true && that.is_set_parallelism_hint(); if (this_present_parallelism_hint || that_present_parallelism_hint) { if (!(this_present_parallelism_hint && that_present_parallelism_hint)) return false; if (this.parallelism_hint != that.parallelism_hint) return false; } boolean this_present_json_conf = true && this.is_set_json_conf(); boolean that_present_json_conf = true && that.is_set_json_conf(); if (this_present_json_conf || that_present_json_conf) { if (!(this_present_json_conf && that_present_json_conf)) return false; if (!this.json_conf.equals(that.json_conf)) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_inputs = true && (is_set_inputs()); builder.append(present_inputs); if (present_inputs) builder.append(inputs); boolean present_streams = true && (is_set_streams()); builder.append(present_streams); if (present_streams) builder.append(streams); boolean present_parallelism_hint = true && (is_set_parallelism_hint()); builder.append(present_parallelism_hint); if (present_parallelism_hint) builder.append(parallelism_hint); boolean present_json_conf = true && (is_set_json_conf()); builder.append(present_json_conf); if (present_json_conf) builder.append(json_conf); return builder.toHashCode(); } public int compareTo(ComponentCommon other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; ComponentCommon typedOther = (ComponentCommon)other; lastComparison = Boolean.valueOf(is_set_inputs()).compareTo(typedOther.is_set_inputs()); if (lastComparison != 0) { return lastComparison; } if (is_set_inputs()) { lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.inputs, typedOther.inputs); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_streams()).compareTo(typedOther.is_set_streams()); if (lastComparison != 0) { return lastComparison; } if (is_set_streams()) { lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.streams, typedOther.streams); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_parallelism_hint()).compareTo(typedOther.is_set_parallelism_hint()); if (lastComparison != 0) { return lastComparison; } if (is_set_parallelism_hint()) { lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.parallelism_hint, typedOther.parallelism_hint); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(is_set_json_conf()).compareTo(typedOther.is_set_json_conf()); if (lastComparison != 0) { return lastComparison; } if (is_set_json_conf()) { lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.json_conf, typedOther.json_conf); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException { org.apache.thrift7.protocol.TField field; iprot.readStructBegin(); while (true) { field = iprot.readFieldBegin(); if (field.type == org.apache.thrift7.protocol.TType.STOP) { break; } switch (field.id) { case 1: // INPUTS if (field.type == org.apache.thrift7.protocol.TType.MAP) { { org.apache.thrift7.protocol.TMap _map12 = iprot.readMapBegin(); this.inputs = new HashMap<GlobalStreamId,Grouping>(2*_map12.size); for (int _i13 = 0; _i13 < _map12.size; ++_i13) { GlobalStreamId _key14; // required Grouping _val15; // required _key14 = new GlobalStreamId(); _key14.read(iprot); _val15 = new Grouping(); _val15.read(iprot); this.inputs.put(_key14, _val15); } iprot.readMapEnd(); } } else { org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } break; case 2: // STREAMS if (field.type == org.apache.thrift7.protocol.TType.MAP) { { org.apache.thrift7.protocol.TMap _map16 = iprot.readMapBegin(); this.streams = new HashMap<String,StreamInfo>(2*_map16.size); for (int _i17 = 0; _i17 < _map16.size; ++_i17) { String _key18; // required StreamInfo _val19; // required _key18 = iprot.readString(); _val19 = new StreamInfo(); _val19.read(iprot); this.streams.put(_key18, _val19); } iprot.readMapEnd(); } } else { org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } break; case 3: // PARALLELISM_HINT if (field.type == org.apache.thrift7.protocol.TType.I32) { this.parallelism_hint = iprot.readI32(); set_parallelism_hint_isSet(true); } else { org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } break; case 4: // JSON_CONF if (field.type == org.apache.thrift7.protocol.TType.STRING) { this.json_conf = iprot.readString(); } else { org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } break; default: org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); validate(); } public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException { validate(); oprot.writeStructBegin(STRUCT_DESC); if (this.inputs != null) { oprot.writeFieldBegin(INPUTS_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.STRUCT, this.inputs.size())); for (Map.Entry<GlobalStreamId, Grouping> _iter20 : this.inputs.entrySet()) { _iter20.getKey().write(oprot); _iter20.getValue().write(oprot); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (this.streams != null) { oprot.writeFieldBegin(STREAMS_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.STRUCT, this.streams.size())); for (Map.Entry<String, StreamInfo> _iter21 : this.streams.entrySet()) { oprot.writeString(_iter21.getKey()); _iter21.getValue().write(oprot); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (is_set_parallelism_hint()) { oprot.writeFieldBegin(PARALLELISM_HINT_FIELD_DESC); oprot.writeI32(this.parallelism_hint); oprot.writeFieldEnd(); } if (this.json_conf != null) { if (is_set_json_conf()) { oprot.writeFieldBegin(JSON_CONF_FIELD_DESC); oprot.writeString(this.json_conf); oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } @Override public String toString() { StringBuilder sb = new StringBuilder("ComponentCommon("); boolean first = true; sb.append("inputs:"); if (this.inputs == null) { sb.append("null"); } else { sb.append(this.inputs); } first = false; if (!first) sb.append(", "); sb.append("streams:"); if (this.streams == null) { sb.append("null"); } else { sb.append(this.streams); } first = false; if (is_set_parallelism_hint()) { if (!first) sb.append(", "); sb.append("parallelism_hint:"); sb.append(this.parallelism_hint); first = false; } if (is_set_json_conf()) { if (!first) sb.append(", "); sb.append("json_conf:"); if (this.json_conf == null) { sb.append("null"); } else { sb.append(this.json_conf); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift7.TException { // check for required fields if (!is_set_inputs()) { throw new org.apache.thrift7.protocol.TProtocolException("Required field 'inputs' is unset! Struct:" + toString()); } if (!is_set_streams()) { throw new org.apache.thrift7.protocol.TProtocolException("Required field 'streams' is unset! Struct:" + toString()); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out))); } catch (org.apache.thrift7.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bit_vector = new BitSet(1); read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in))); } catch (org.apache.thrift7.TException te) { throw new java.io.IOException(te); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.apache.camel.CamelContext; import org.apache.camel.Channel; import org.apache.camel.DelegateProcessor; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.Route; import org.apache.camel.TestSupport; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.impl.engine.DefaultRoute; import org.apache.camel.processor.ChoiceProcessor; import org.apache.camel.processor.EvaluateExpressionProcessor; import org.apache.camel.processor.FilterProcessor; import org.apache.camel.processor.MulticastProcessor; import org.apache.camel.processor.Pipeline; import org.apache.camel.processor.RecipientList; import org.apache.camel.processor.SendProcessor; import org.apache.camel.processor.Splitter; import org.apache.camel.processor.ThreadsProcessor; import org.apache.camel.processor.errorhandler.DeadLetterChannel; import org.apache.camel.processor.idempotent.IdempotentConsumer; import org.apache.camel.support.processor.idempotent.MemoryIdempotentRepository; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; public class RouteBuilderTest extends TestSupport { protected Processor myProcessor = new MyProcessor(); protected DelegateProcessor interceptor1; protected DelegateProcessor interceptor2; protected CamelContext createCamelContext() { // disable stream cache otherwise to much hazzle in this unit test to // filter the stream cache // in all the assertion codes DefaultCamelContext ctx = new DefaultCamelContext(); ctx.setStreamCaching(Boolean.FALSE); return ctx; } protected List<Route> buildSimpleRoute() throws Exception { // START SNIPPET: e1 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").to("direct:b"); } }; // END SNIPPET: e1 return getRouteList(builder); } @Test public void testSimpleRoute() throws Exception { List<Route> routes = buildSimpleRoute(); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); SendProcessor sendProcessor = assertIsInstanceOf(SendProcessor.class, channel.getNextProcessor()); assertEquals("direct://b", sendProcessor.getDestination().getEndpointUri(), "Endpoint URI"); } } protected List<Route> buildSimpleRouteWithHeaderPredicate() throws Exception { // START SNIPPET: e2 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").filter(header("foo").isEqualTo("bar")).to("direct:b"); } }; // END SNIPPET: e2 return getRouteList(builder); } @Test public void testSimpleRouteWithHeaderPredicate() throws Exception { List<Route> routes = buildSimpleRouteWithHeaderPredicate(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); FilterProcessor filterProcessor = assertIsInstanceOf(FilterProcessor.class, channel.getNextProcessor()); SendProcessor sendProcessor = assertIsInstanceOf(SendProcessor.class, unwrapChannel(filterProcessor).getNextProcessor()); assertEquals("direct://b", sendProcessor.getDestination().getEndpointUri(), "Endpoint URI"); } } protected List<Route> buildSimpleRouteWithChoice() throws Exception { // START SNIPPET: e3 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").choice().when(header("foo").isEqualTo("bar")).to("direct:b") .when(header("foo").isEqualTo("cheese")).to("direct:c").otherwise().to("direct:d"); } }; // END SNIPPET: e3 return getRouteList(builder); } @Test public void testSimpleRouteWithChoice() throws Exception { List<Route> routes = buildSimpleRouteWithChoice(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); ChoiceProcessor choiceProcessor = assertIsInstanceOf(ChoiceProcessor.class, channel.getNextProcessor()); List<FilterProcessor> filters = choiceProcessor.getFilters(); assertEquals(2, filters.size(), "Should be two when clauses"); Processor filter1 = filters.get(0); assertSendTo(unwrapChannel(((FilterProcessor) filter1).getProcessor()).getNextProcessor(), "direct://b"); Processor filter2 = filters.get(1); assertSendTo(unwrapChannel(((FilterProcessor) filter2).getProcessor()).getNextProcessor(), "direct://c"); assertSendTo(unwrapChannel(choiceProcessor.getOtherwise()).getNextProcessor(), "direct://d"); } } protected List<Route> buildCustomProcessor() throws Exception { // START SNIPPET: e4 myProcessor = new Processor() { public void process(Exchange exchange) { log.debug("Called with exchange: " + exchange); } }; RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").process(myProcessor); } }; // END SNIPPET: e4 return getRouteList(builder); } @Test public void testCustomProcessor() throws Exception { List<Route> routes = buildCustomProcessor(); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); } } protected List<Route> buildCustomProcessorWithFilter() throws Exception { // START SNIPPET: e5 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").filter(header("foo").isEqualTo("bar")).process(myProcessor); } }; // END SNIPPET: e5 return getRouteList(builder); } @Test public void testCustomProcessorWithFilter() throws Exception { List<Route> routes = buildCustomProcessorWithFilter(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); } } protected List<Route> buildWireTap() throws Exception { // START SNIPPET: e6 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").multicast().to("direct:tap", "direct:b"); } }; // END SNIPPET: e6 return getRouteList(builder); } @Test public void testWireTap() throws Exception { List<Route> routes = buildWireTap(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); MulticastProcessor multicastProcessor = assertIsInstanceOf(MulticastProcessor.class, channel.getNextProcessor()); List<Processor> endpoints = new ArrayList<>(multicastProcessor.getProcessors()); assertEquals(2, endpoints.size(), "Should have 2 endpoints"); assertSendToProcessor(unwrapChannel(endpoints.get(0)).getNextProcessor(), "direct://tap"); assertSendToProcessor(unwrapChannel(endpoints.get(1)).getNextProcessor(), "direct://b"); } } protected List<Route> buildRouteWithInterceptor() throws Exception { interceptor1 = new org.apache.camel.support.processor.DelegateProcessor() { }; interceptor2 = new MyInterceptorProcessor(); RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").process(interceptor1).process(interceptor2).to("direct:d"); } }; return getRouteList(builder); } @Test public void testRouteWithInterceptor() throws Exception { List<Route> routes = buildRouteWithInterceptor(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Pipeline line = assertIsInstanceOf(Pipeline.class, unwrap(consumer.getProcessor())); assertEquals(3, line.next().size()); // last should be our seda List<Processor> processors = new ArrayList<>(line.next()); Processor sendTo = assertIsInstanceOf(SendProcessor.class, unwrapChannel(processors.get(2)).getNextProcessor()); assertSendTo(sendTo, "direct://d"); } } @Test public void testComplexExpressions() throws Exception { // START SNIPPET: e7 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").filter(header("foo").isEqualTo(123)).to("direct:b"); } }; // END SNIPPET: e7 List<Route> routes = getRouteList(builder); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); } } protected List<Route> buildStaticRecipientList() throws Exception { // START SNIPPET: multicast RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").multicast().to("direct:b", "direct:c", "direct:d"); } }; // END SNIPPET: multicast return getRouteList(builder); } protected List<Route> buildDynamicRecipientList() throws Exception { // START SNIPPET: e9 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").recipientList(header("foo")); } }; // END SNIPPET: e9 return getRouteList(builder); } @Test public void testRouteDynamicReceipentList() throws Exception { List<Route> routes = buildDynamicRecipientList(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); Pipeline line = assertIsInstanceOf(Pipeline.class, channel.getNextProcessor()); Iterator<?> it = line.next().iterator(); // EvaluateExpressionProcessor should be wrapped in error handler Object first = it.next(); first = assertIsInstanceOf(DeadLetterChannel.class, first).getOutput(); assertIsInstanceOf(EvaluateExpressionProcessor.class, first); // and the second should NOT be wrapped in error handler Object second = it.next(); assertIsInstanceOf(RecipientList.class, second); } } protected List<Route> buildSplitter() throws Exception { // START SNIPPET: splitter RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").split(bodyAs(String.class).tokenize("\n")).to("direct:b"); } }; // END SNIPPET: splitter return getRouteList(builder); } @Test public void testSplitter() throws Exception { List<Route> routes = buildSplitter(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); assertIsInstanceOf(Splitter.class, channel.getNextProcessor()); } } protected List<Route> buildIdempotentConsumer() throws Exception { // START SNIPPET: idempotent RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a") .idempotentConsumer(header("myMessageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200)) .to("direct:b"); } }; // END SNIPPET: idempotent return getRouteList(builder); } @Test public void testIdempotentConsumer() throws Exception { List<Route> routes = buildIdempotentConsumer(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Channel channel = unwrapChannel(consumer.getProcessor()); IdempotentConsumer idempotentConsumer = assertIsInstanceOf(IdempotentConsumer.class, channel.getNextProcessor()); assertEquals("header(myMessageId)", idempotentConsumer.getMessageIdExpression().toString(), "messageIdExpression"); assertIsInstanceOf(MemoryIdempotentRepository.class, idempotentConsumer.getIdempotentRepository()); SendProcessor sendProcessor = assertIsInstanceOf(SendProcessor.class, unwrapChannel(idempotentConsumer.getProcessor()).getNextProcessor()); assertEquals("direct://b", sendProcessor.getDestination().getEndpointUri(), "Endpoint URI"); } } protected List<Route> buildThreads() throws Exception { // START SNIPPET: e10 RouteBuilder builder = new RouteBuilder() { public void configure() { errorHandler(deadLetterChannel("mock:error")); from("direct:a").threads(5, 10).to("mock:a").to("mock:b"); } }; // END SNIPPET: e10 return getRouteList(builder); } @Test public void testThreads() throws Exception { List<Route> routes = buildThreads(); log.debug("Created routes: " + routes); assertEquals(1, routes.size(), "Number routes created"); for (Route route : routes) { Endpoint key = route.getEndpoint(); assertEquals("direct://a", key.getEndpointUri(), "From endpoint"); DefaultRoute consumer = assertIsInstanceOf(DefaultRoute.class, route); Pipeline line = assertIsInstanceOf(Pipeline.class, unwrap(consumer.getProcessor())); Iterator<Processor> it = line.next().iterator(); assertIsInstanceOf(ThreadsProcessor.class, unwrapChannel(it.next()).getNextProcessor()); assertIsInstanceOf(SendProcessor.class, unwrapChannel(it.next()).getNextProcessor()); assertIsInstanceOf(SendProcessor.class, unwrapChannel(it.next()).getNextProcessor()); } } protected void assertSendTo(Processor processor, String uri) { if (!(processor instanceof SendProcessor)) { processor = unwrapErrorHandler(processor); } SendProcessor sendProcessor = assertIsInstanceOf(SendProcessor.class, processor); assertEquals(uri, sendProcessor.getDestination().getEndpointUri(), "Endpoint URI"); } protected void assertSendToProcessor(Processor processor, String uri) { if (!(processor instanceof Producer)) { processor = unwrapErrorHandler(processor); } if (processor instanceof SendProcessor) { assertSendTo(processor, uri); } else { Producer producer = assertIsInstanceOf(Producer.class, processor); assertEquals(uri, producer.getEndpoint().getEndpointUri(), "Endpoint URI"); } } /** * By default routes should be wrapped in the {@link DeadLetterChannel} so lets unwrap that and return the actual * processor */ protected Processor getProcessorWithoutErrorHandler(Route route) { DefaultRoute consumerRoute = assertIsInstanceOf(DefaultRoute.class, route); Processor processor = unwrap(consumerRoute.getProcessor()); return unwrapErrorHandler(processor); } protected Processor unwrapErrorHandler(Processor processor) { if (processor instanceof DeadLetterChannel) { DeadLetterChannel deadLetter = (DeadLetterChannel) processor; return deadLetter.getOutput(); } else { return processor; } } protected Processor unwrapDelegateProcessor(Processor processor) { if (processor instanceof DelegateProcessor) { DelegateProcessor delegate = (DelegateProcessor) processor; return delegate.getProcessor(); } else { return processor; } } @Test public void testCorrectNumberOfRoutes() throws Exception { RouteBuilder builder = new RouteBuilder() { public void configure() throws Exception { errorHandler(deadLetterChannel("mock:error")); from("direct:start").to("direct:in"); from("direct:in").to("mock:result"); } }; List<Route> routes = getRouteList(builder); assertEquals(2, routes.size()); } @Test public void testLifecycleInterceptor() throws Exception { AtomicInteger before = new AtomicInteger(); AtomicInteger after = new AtomicInteger(); RouteBuilder builder = new RouteBuilder() { public void configure() throws Exception { } }; builder.addLifecycleInterceptor(new RouteBuilderLifecycleStrategy() { @Override public void beforeConfigure(RouteBuilder builder) { before.incrementAndGet(); } @Override public void afterConfigure(RouteBuilder builder) { after.incrementAndGet(); } }); DefaultCamelContext context = new DefaultCamelContext(); context.addRoutes(builder); assertEquals(1, before.get()); assertEquals(1, after.get()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.regression; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.feed.ActionType; import org.apache.falcon.entity.v0.feed.ClusterType; import org.apache.falcon.regression.Entities.FeedMerlin; import org.apache.falcon.regression.core.bundle.Bundle; import org.apache.falcon.regression.core.helpers.ColoHelper; import org.apache.falcon.regression.core.util.OozieUtil; import org.apache.falcon.regression.core.util.AssertUtil; import org.apache.falcon.regression.core.util.TimeUtil; import org.apache.falcon.regression.core.util.HadoopUtil; import org.apache.falcon.regression.core.util.BundleUtil; import org.apache.falcon.regression.core.util.InstanceUtil; import org.apache.falcon.regression.core.util.OSUtil; import org.apache.falcon.regression.core.util.Util; import org.apache.falcon.regression.testHelper.BaseTestClass; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.log4j.Logger; import org.apache.oozie.client.CoordinatorAction; import org.apache.oozie.client.OozieClient; import org.apache.oozie.client.OozieClientException; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import javax.xml.bind.JAXBException; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; /** * This test submits and schedules feed and then check for replication. * On adding further late data it checks whether the data has been replicated correctly in the given late cut-off time. * Assuming that late frequency set in server is 3 minutes. Although value can be changed according to requirement. */ @Test(groups = "embedded") public class FeedLateRerunTest extends BaseTestClass { private ColoHelper cluster1 = servers.get(0); private ColoHelper cluster2 = servers.get(1); private FileSystem cluster1FS = serverFS.get(0); private FileSystem cluster2FS = serverFS.get(1); private OozieClient cluster2OC = serverOC.get(1); private String baseTestDir = cleanAndGetTestDir(); private String feedDataLocation = baseTestDir + "/source" + MINUTE_DATE_PATTERN; private String targetPath = baseTestDir + "/target"; private String targetDataLocation = targetPath + MINUTE_DATE_PATTERN; private static final Logger LOGGER = Logger.getLogger(FeedLateRerunTest.class); private String source = null; private String target = null; @BeforeMethod(alwaysRun = true) public void setUp() throws JAXBException, IOException { Bundle bundle = BundleUtil.readFeedReplicationBundle(); bundles[0] = new Bundle(bundle, cluster1); bundles[1] = new Bundle(bundle, cluster2); bundles[0].generateUniqueBundle(this); bundles[1].generateUniqueBundle(this); } @AfterMethod(alwaysRun = true) public void tearDown() { removeTestClassEntities(); } @Test(dataProvider = "dataFlagProvider") public void testLateRerun(boolean dataFlag) throws URISyntaxException, AuthenticationException, InterruptedException, IOException, OozieClientException, JAXBException { Bundle.submitCluster(bundles[0], bundles[1]); String startTime = TimeUtil.getTimeWrtSystemTime(0); String endTime = TimeUtil.addMinsToTime(startTime, 30); LOGGER.info("Time range between : " + startTime + " and " + endTime); //configure feed FeedMerlin feed = new FeedMerlin(bundles[0].getDataSets().get(0)); feed.setFilePath(feedDataLocation); //erase all clusters from feed definition feed.clearFeedClusters(); //set cluster1 as source feed.addFeedCluster( new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0))) .withRetention("days(1000000)", ActionType.DELETE) .withValidity(startTime, endTime) .withClusterType(ClusterType.SOURCE) .build()); //set cluster2 as target feed.addFeedCluster( new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0))) .withRetention("days(1000000)", ActionType.DELETE) .withValidity(startTime, endTime) .withClusterType(ClusterType.TARGET) .withDataLocation(targetDataLocation) .build()); String entityName = feed.getName(); //submit and schedule feed AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed.toString())); //check if coordinator exists InstanceUtil.waitTillInstancesAreCreated(cluster2OC, feed.toString(), 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, entityName, "REPLICATION"), 1); //Finding bundleId of replicated instance on target String bundleId = OozieUtil.getLatestBundleID(cluster2OC, entityName, EntityType.FEED); //Finding and creating missing dependencies List<String> missingDependencies = getAndCreateDependencies( cluster1FS, cluster1.getPrefix(), cluster2OC, bundleId, dataFlag, entityName); int count = 1; for (String location : missingDependencies) { if (count==1) { source = location; count++; } } source=splitPathFromIp(source, "8020"); LOGGER.info("source : " + source); target = source.replace("source", "target"); LOGGER.info("target : " + target); /* Sleep for some time ( as is defined in runtime property of server ). Let the instance rerun and then it should succeed.*/ int sleepMins = 8; for(int i=0; i < sleepMins; i++) { LOGGER.info("Waiting..."); TimeUtil.sleepSeconds(60); } String bundleID = OozieUtil.getLatestBundleID(cluster2OC, entityName, EntityType.FEED); OozieUtil.validateRetryAttempts(cluster2OC, bundleID, EntityType.FEED, 1); //check if data has been replicated correctly List<Path> cluster1ReplicatedData = HadoopUtil .getAllFilesRecursivelyHDFS(cluster1FS, new Path(HadoopUtil.cutProtocol(source))); List<Path> cluster2ReplicatedData = HadoopUtil .getAllFilesRecursivelyHDFS(cluster2FS, new Path(HadoopUtil.cutProtocol(target))); AssertUtil.checkForListSizes(cluster1ReplicatedData, cluster2ReplicatedData); } private String splitPathFromIp(String src, String port) { String reqSrc, tempSrc = ""; if (src.contains(":")) { String[] tempPath = src.split(":"); for (String aTempPath : tempPath) { if (aTempPath.startsWith(port)) { tempSrc = aTempPath; } } } if (tempSrc.isEmpty()) { reqSrc = src; } else { reqSrc=tempSrc.replace(port, ""); } return reqSrc; } /* prismHelper1 - source colo, prismHelper2 - target colo */ private List<String> getAndCreateDependencies(FileSystem sourceFS, String prefix, OozieClient targetOC, String bundleId, boolean dataFlag, String entityName) throws OozieClientException, IOException { List<String> missingDependencies = OozieUtil.getMissingDependencies(targetOC, bundleId); for (int i = 0; i < 10 && missingDependencies == null; ++i) { TimeUtil.sleepSeconds(30); LOGGER.info("sleeping..."); missingDependencies = OozieUtil.getMissingDependencies(targetOC, bundleId); } Assert.assertNotNull(missingDependencies, "Missing dependencies not found."); //print missing dependencies for (String dependency : missingDependencies) { LOGGER.info("dependency from job: " + dependency); } // Creating missing dependencies HadoopUtil.createFolders(sourceFS, prefix, missingDependencies); //Adding data to empty folders depending on dataFlag if (dataFlag) { int tempCount = 1; for (String location : missingDependencies) { if (tempCount==1) { LOGGER.info("Transferring data to : " + location); HadoopUtil.copyDataToFolder(sourceFS, location, OSUtil.concat(OSUtil.NORMAL_INPUT, "dataFile.xml")); tempCount++; } } } //replication should start, wait while it ends InstanceUtil.waitTillInstanceReachState(targetOC, entityName, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.FEED); // Adding data for late rerun int tempCounter = 1; for (String dependency : missingDependencies) { if (tempCounter==1) { LOGGER.info("Transferring late data to : " + dependency); HadoopUtil.copyDataToFolder(sourceFS, dependency, OSUtil.concat(OSUtil.NORMAL_INPUT, "dataFile.properties")); } tempCounter++; } return missingDependencies; } @DataProvider(name = "dataFlagProvider") private Object[][] dataFlagProvider() { return new Object[][] { new Object[] {true, }, new Object[] {false, }, }; } }
/******************************************************************************* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.google.cloud.dataflow.sdk.util.common.worker; import static com.google.common.base.Preconditions.checkNotNull; import com.google.cloud.dataflow.sdk.util.common.ElementByteSizeObservableIterable; import com.google.cloud.dataflow.sdk.util.common.ElementByteSizeObservableIterator; import com.google.cloud.dataflow.sdk.util.common.PeekingReiterator; import com.google.cloud.dataflow.sdk.util.common.Reiterable; import com.google.cloud.dataflow.sdk.util.common.Reiterator; import java.util.Arrays; import java.util.Iterator; import java.util.NoSuchElementException; import javax.annotation.Nullable; /** * An iterator through KeyGroupedShuffleEntries. */ public abstract class GroupingShuffleEntryIterator implements Iterator<KeyGroupedShuffleEntries> { /** The iterator through the underlying shuffle records. */ private PeekingReiterator<ShuffleEntry> shuffleIterator; /** * The key of the most recent KeyGroupedShuffleEntries returned by * {@link #next}, if any. * * <p>If currentKeyBytes is non-null, then it's the key for the last entry * returned by {@link #next}, and all incoming entries with that key should * be skipped over by this iterator (since this iterator is iterating over * keys, not the individual values associated with a given key). * * <p>If currentKeyBytes is null, and shuffleIterator.hasNext(), then the * key of shuffleIterator.next() is the key of the next * KeyGroupedShuffleEntries to return from {@link #next}. */ @Nullable private byte[] currentKeyBytes = null; /** * Constructs a GroupingShuffleEntryIterator, given a Reiterator * over ungrouped ShuffleEntries, assuming the ungrouped * ShuffleEntries for a given key are consecutive. */ public GroupingShuffleEntryIterator( Reiterator<ShuffleEntry> shuffleIterator) { this.shuffleIterator = new PeekingReiterator<>( new ProgressTrackingReiterator<>( shuffleIterator, new ProgressTrackerGroup<ShuffleEntry>() { @Override protected void report(ShuffleEntry entry) { notifyElementRead(entry.length()); } }.start())); } /** * Notifies observers about a new ShuffleEntry (key and value, not * key and iterable of values) read. */ protected abstract void notifyElementRead(long byteSize); @Override public boolean hasNext() { advanceIteratorToNextKey(); return shuffleIterator.hasNext(); } @Override public KeyGroupedShuffleEntries next() { if (!hasNext()) { throw new NoSuchElementException(); } ShuffleEntry entry = shuffleIterator.peek(); currentKeyBytes = entry.getKey(); return new KeyGroupedShuffleEntries( entry.getPosition(), currentKeyBytes, new ValuesIterable(new ValuesIterator(currentKeyBytes))); } @Override public void remove() { throw new UnsupportedOperationException(); } private void advanceIteratorToNextKey() { if (currentKeyBytes == null) { return; } while (shuffleIterator.hasNext()) { ShuffleEntry entry = shuffleIterator.peek(); if (!Arrays.equals(entry.getKey(), currentKeyBytes)) { break; } shuffleIterator.next(); } currentKeyBytes = null; } private static class ValuesIterable extends ElementByteSizeObservableIterable<ShuffleEntry, ValuesIterator> implements Reiterable<ShuffleEntry> { private final ValuesIterator base; public ValuesIterable(ValuesIterator base) { this.base = checkNotNull(base); } @Override public ValuesIterator createIterator() { return base.copy(); } } /** * Provides the {@link Reiterator} used to iterate through the * shuffle entries of a KeyGroupedShuffleEntries. */ private class ValuesIterator extends ElementByteSizeObservableIterator<ShuffleEntry> implements Reiterator<ShuffleEntry> { // N.B. This class is *not* static; it maintains a reference to its // enclosing KeyGroupedShuffleEntriesIterator instance so that it can update // that instance's shuffleIterator as an optimization. private final byte[] valueKeyBytes; private final PeekingReiterator<ShuffleEntry> valueShuffleIterator; private final ProgressTracker<ShuffleEntry> tracker; private boolean nextKnownValid = false; public ValuesIterator(byte[] valueKeyBytes) { this.valueKeyBytes = checkNotNull(valueKeyBytes); this.valueShuffleIterator = shuffleIterator.copy(); // N.B. The ProgressTrackerGroup captures the reference to the original // ValuesIterator for a given values iteration, which happens to be // exactly what we want, since this is also the ValuesIterator whose // base Observable has the references to all of the Observers watching // the iteration. Copied ValuesIterator instances do *not* have these // Observers, but that's fine, since the derived ProgressTracker // instances reference the ProgressTrackerGroup, which references the // original ValuesIterator, which does have them. this.tracker = new ProgressTrackerGroup<ShuffleEntry>() { @Override protected void report(ShuffleEntry entry) { notifyValueReturned(entry.length()); } }.start(); } private ValuesIterator(ValuesIterator it) { this.valueKeyBytes = it.valueKeyBytes; this.valueShuffleIterator = it.valueShuffleIterator.copy(); this.tracker = it.tracker.copy(); this.nextKnownValid = it.nextKnownValid; } @Override public boolean hasNext() { if (nextKnownValid) { return true; } if (!valueShuffleIterator.hasNext()) { return false; } ShuffleEntry entry = valueShuffleIterator.peek(); nextKnownValid = Arrays.equals(entry.getKey(), valueKeyBytes); // Opportunistically update the parent KeyGroupedShuffleEntriesIterator, // potentially allowing it to skip a large number of key/value pairs // with this key. if (!nextKnownValid && valueKeyBytes == currentKeyBytes) { shuffleIterator = valueShuffleIterator.copy(); currentKeyBytes = null; } return nextKnownValid; } @Override public ShuffleEntry next() { if (!hasNext()) { throw new NoSuchElementException(); } ShuffleEntry entry = valueShuffleIterator.next(); nextKnownValid = false; tracker.saw(entry); return entry; } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public ValuesIterator copy() { return new ValuesIterator(this); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.spoon.delegates; import java.util.ArrayList; import java.util.List; import org.apache.commons.vfs.FileObject; import org.eclipse.swt.SWT; import org.eclipse.swt.browser.LocationListener; import org.eclipse.swt.browser.OpenWindowListener; import org.eclipse.swt.browser.WindowEvent; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.widgets.Composite; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.EngineMetaInterface; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.gui.SpoonInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.repository.ObjectRevision; import org.pentaho.di.repository.RepositoryOperation; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.repository.RepositorySecurityUI; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.SpoonBrowser; import org.pentaho.di.ui.spoon.TabItemInterface; import org.pentaho.di.ui.spoon.TabMapEntry; import org.pentaho.di.ui.spoon.TabMapEntry.ObjectType; import org.pentaho.di.ui.spoon.job.JobGraph; import org.pentaho.di.ui.spoon.trans.TransGraph; import org.pentaho.ui.util.Launch; import org.pentaho.ui.util.Launch.Status; import org.pentaho.xul.swt.tab.TabItem; import org.pentaho.xul.swt.tab.TabSet; public class SpoonTabsDelegate extends SpoonDelegate { private static Class<?> PKG = Spoon.class; // for i18n purposes, needed by Translator2!! /** * This contains a list of the tab map entries */ private List<TabMapEntry> tabMap; public SpoonTabsDelegate( Spoon spoon ) { super( spoon ); tabMap = new ArrayList<TabMapEntry>(); } public boolean tabClose( TabItem item ) throws KettleException { // Try to find the tab-item that's being closed. List<TabMapEntry> collection = new ArrayList<TabMapEntry>(); collection.addAll( tabMap ); boolean createPerms = !RepositorySecurityUI .verifyOperations( Spoon.getInstance().getShell(), Spoon.getInstance().getRepository(), false, RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.MODIFY_JOB ); boolean close = true; for ( TabMapEntry entry : collection ) { if ( item.equals( entry.getTabItem() ) ) { TabItemInterface itemInterface = entry.getObject(); // Can we close this tab? Only allow users with create content perms to save if ( !itemInterface.canBeClosed() && createPerms ) { int reply = itemInterface.showChangedWarning(); if ( reply == SWT.YES ) { close = itemInterface.applyChanges(); } else { if ( reply == SWT.CANCEL ) { close = false; } else { close = true; } } } // Also clean up the log/history associated with this // transformation/job // if ( close ) { if ( entry.getObject() instanceof TransGraph ) { TransMeta transMeta = (TransMeta) entry.getObject().getManagedObject(); spoon.delegates.trans.closeTransformation( transMeta ); spoon.refreshTree(); // spoon.refreshCoreObjects(); } else if ( entry.getObject() instanceof JobGraph ) { JobMeta jobMeta = (JobMeta) entry.getObject().getManagedObject(); spoon.delegates.jobs.closeJob( jobMeta ); spoon.refreshTree(); // spoon.refreshCoreObjects(); } else if ( entry.getObject() instanceof SpoonBrowser ) { spoon.closeSpoonBrowser(); spoon.refreshTree(); } else if ( entry.getObject() instanceof Composite ) { Composite comp = (Composite) entry.getObject(); if ( comp != null && !comp.isDisposed() ) { comp.dispose(); } } } break; } } return close; } public void removeTab( TabMapEntry tabMapEntry ) { for ( TabMapEntry entry : getTabs() ) { if ( tabMapEntry.equals( entry ) ) { tabMap.remove( tabMapEntry ); } } if ( !tabMapEntry.getTabItem().isDisposed() ) { tabMapEntry.getTabItem().dispose(); } } public List<TabMapEntry> getTabs() { List<TabMapEntry> list = new ArrayList<TabMapEntry>(); list.addAll( tabMap ); return list; } public TabMapEntry getTab( TabItem tabItem ) { for ( TabMapEntry tabMapEntry : tabMap ) { if ( tabMapEntry.getTabItem().equals( tabItem ) ) { return tabMapEntry; } } return null; } public EngineMetaInterface getActiveMeta() { TabSet tabfolder = spoon.tabfolder; if ( tabfolder == null ) { return null; } TabItem tabItem = tabfolder.getSelected(); if ( tabItem == null ) { return null; } // What transformation is in the active tab? // TransLog, TransGraph & TransHist contain the same transformation // TabMapEntry mapEntry = getTab( tabfolder.getSelected() ); EngineMetaInterface meta = null; if ( mapEntry != null ) { if ( mapEntry.getObject() instanceof TransGraph ) { meta = ( mapEntry.getObject() ).getMeta(); } if ( mapEntry.getObject() instanceof JobGraph ) { meta = ( mapEntry.getObject() ).getMeta(); } } return meta; } public String makeSlaveTabName( SlaveServer slaveServer ) { return "Slave server: " + slaveServer.getName(); } public boolean addSpoonBrowser( String name, String urlString ) { return addSpoonBrowser( name, urlString, true, null ); } public boolean addSpoonBrowser( String name, String urlString, LocationListener listener ) { boolean ok = addSpoonBrowser( name, urlString, true, listener ); return ok; } public boolean addSpoonBrowser( String name, String urlString, boolean isURL, LocationListener listener ) { TabSet tabfolder = spoon.tabfolder; try { // OK, now we have the HTML, create a new browset tab. // See if there already is a tab for this browser // If no, add it // If yes, select that tab // TabMapEntry tabMapEntry = findTabMapEntry( name, ObjectType.BROWSER ); if ( tabMapEntry == null ) { CTabFolder cTabFolder = tabfolder.getSwtTabset(); final SpoonBrowser browser = new SpoonBrowser( cTabFolder, spoon, urlString, isURL, true, listener ); browser.getBrowser().addOpenWindowListener( new OpenWindowListener() { @Override public void open( WindowEvent event ) { if ( event.required ) { event.browser = browser.getBrowser(); } } } ); TabItem tabItem = new TabItem( tabfolder, name, name ); tabItem.setImage( GUIResource.getInstance().getImageLogoSmall() ); tabItem.setControl( browser.getComposite() ); tabMapEntry = new TabMapEntry( tabItem, isURL ? urlString : null, name, null, null, browser, ObjectType.BROWSER ); tabMap.add( tabMapEntry ); } int idx = tabfolder.indexOf( tabMapEntry.getTabItem() ); // keep the focus on the graph tabfolder.setSelected( idx ); return true; } catch ( Throwable e ) { boolean ok = false; if ( isURL ) { // Retry to show the welcome page in an external browser. // Status status = Launch.openURL( urlString ); ok = status.equals( Status.Success ); } if ( !ok ) { // Log an error // log.logError( "Unable to open browser tab", e ); return false; } else { return true; } } } public TabMapEntry findTabMapEntry( String tabItemText, ObjectType objectType ) { for ( TabMapEntry entry : tabMap ) { if ( entry.getTabItem().isDisposed() ) { continue; } if ( objectType == entry.getObjectType() && entry.getTabItem().getText().equalsIgnoreCase( tabItemText ) ) { return entry; } } return null; } public TabMapEntry findTabMapEntry( Object managedObject ) { for ( TabMapEntry entry : tabMap ) { if ( entry.getTabItem().isDisposed() ) { continue; } Object entryManagedObj = entry.getObject().getManagedObject(); // make sure they are the same class before comparing them if ( entryManagedObj != null && managedObject != null ) { if ( entryManagedObj.getClass().equals( managedObject.getClass() ) ) { if ( entryManagedObj.equals( managedObject ) ) { return entry; } } } } return null; } /** * Finds the tab for the transformation that matches the metadata provided (either the file must be the same or the * repository id). * * @param trans * Transformation metadata to look for * @return Tab with transformation open whose metadata matches {@code trans} or {@code null} if no tab exists. * @throws KettleFileException * If there is a problem loading the file object for an open transformation with an invalid a filename. */ public TabMapEntry findTabForTransformation( TransMeta trans ) throws KettleFileException { // File for the transformation we're looking for. It will be loaded upon first request. FileObject transFile = null; for ( TabMapEntry entry : tabMap ) { if ( entry == null || entry.getTabItem().isDisposed() ) { continue; } if ( trans.getFilename() != null && entry.getFilename() != null ) { // If the entry has a file name it is the same as trans iff. they originated from the same files FileObject entryFile = KettleVFS.getFileObject( entry.getFilename() ); if ( transFile == null ) { transFile = KettleVFS.getFileObject( trans.getFilename() ); } if ( entryFile.equals( transFile ) ) { return entry; } } else if ( trans.getObjectId() != null && entry.getObject() != null ) { EngineMetaInterface meta = entry.getObject().getMeta(); if ( meta != null && trans.getObjectId().equals( meta.getObjectId() ) ) { // If the transformation has an object id and the entry shares the same id they are the same return entry; } } } // No tabs for the transformation exist and are not disposed return null; } /** * Rename the tabs */ public void renameTabs() { List<TabMapEntry> list = new ArrayList<TabMapEntry>( tabMap ); for ( TabMapEntry entry : list ) { if ( entry.getTabItem().isDisposed() ) { // this should not be in the map, get rid of it. tabMap.remove( entry.getObjectName() ); continue; } // TabItem before = entry.getTabItem(); // PDI-1683: need to get the String here, otherwise using only the "before" instance below, the reference gets // changed and result is always the same // String beforeText=before.getText(); // Object managedObject = entry.getObject().getManagedObject(); if ( managedObject != null ) { if ( entry.getObject() instanceof TransGraph ) { TransMeta transMeta = (TransMeta) managedObject; String tabText = makeTabName( transMeta, entry.isShowingLocation() ); entry.getTabItem().setText( tabText ); String toolTipText = BaseMessages.getString( PKG, "Spoon.TabTrans.Tooltip", tabText ); if ( Const.isWindows() && !Const.isEmpty( transMeta.getFilename() ) ) { toolTipText += Const.CR + Const.CR + transMeta.getFilename(); } entry.getTabItem().setToolTipText( toolTipText ); } else if ( entry.getObject() instanceof JobGraph ) { JobMeta jobMeta = (JobMeta) managedObject; entry.getTabItem().setText( makeTabName( jobMeta, entry.isShowingLocation() ) ); String toolTipText = BaseMessages.getString( PKG, "Spoon.TabJob.Tooltip", makeTabName( jobMeta, entry.isShowingLocation() ) ); if ( Const.isWindows() && !Const.isEmpty( jobMeta.getFilename() ) ) { toolTipText += Const.CR + Const.CR + jobMeta.getFilename(); } entry.getTabItem().setToolTipText( toolTipText ); } } /* * String after = entry.getTabItem().getText(); * * if (!beforeText.equals(after)) // PDI-1683, could be improved to rename all the time { * entry.setObjectName(after); * * // Also change the transformation map if (entry.getObject() instanceof TransGraph) { * spoon.delegates.trans.removeTransformation(beforeText); spoon.delegates.trans.addTransformation(after, * (TransMeta) entry.getObject().getManagedObject()); } // Also change the job map if (entry.getObject() * instanceof JobGraph) { spoon.delegates.jobs.removeJob(beforeText); spoon.delegates.jobs.addJob(after, (JobMeta) * entry.getObject().getManagedObject()); } } */ } spoon.setShellText(); } public void addTab( TabMapEntry entry ) { tabMap.add( entry ); } public String makeTabName( EngineMetaInterface transMeta, boolean showLocation ) { if ( Const.isEmpty( transMeta.getName() ) && Const.isEmpty( transMeta.getFilename() ) ) { return Spoon.STRING_TRANS_NO_NAME; } if ( Const.isEmpty( transMeta.getName() ) || spoon.delegates.trans.isDefaultTransformationName( transMeta.getName() ) ) { transMeta.nameFromFilename(); } String name = ""; if ( showLocation ) { if ( !Const.isEmpty( transMeta.getFilename() ) ) { // Regular file... // name += transMeta.getFilename() + " : "; } else { // Repository object... // name += transMeta.getRepositoryDirectory().getPath() + " : "; } } name += transMeta.getName(); if ( showLocation ) { ObjectRevision version = transMeta.getObjectRevision(); if ( version != null ) { name += " : r" + version.getName(); } } return name; } public void tabSelected( TabItem item ) { ArrayList<TabMapEntry> collection = new ArrayList<TabMapEntry>( tabMap ); // See which core objects to show // for ( TabMapEntry entry : collection ) { boolean isTrans = ( entry.getObject() instanceof TransGraph ); if ( item.equals( entry.getTabItem() ) ) { if ( isTrans || entry.getObject() instanceof JobGraph ) { EngineMetaInterface meta = entry.getObject().getMeta(); if ( meta != null ) { meta.setInternalKettleVariables(); } if ( spoon.getCoreObjectsState() != SpoonInterface.STATE_CORE_OBJECTS_SPOON ) { spoon.refreshCoreObjects(); } } if ( entry.getObject() instanceof JobGraph ) { ( (JobGraph) entry.getObject() ).setFocus(); } else if ( entry.getObject() instanceof TransGraph ) { ( (TransGraph) entry.getObject() ).setFocus(); } break; } } // Also refresh the tree spoon.refreshTree(); spoon.setShellText(); // calls also enableMenus() and markTabsChanged() } /* * private void setEnabled(String id,boolean enable) { spoon.getToolbar().getButtonById(id).setEnable(enable); } */ }
package atomicstryker.dynamiclights.client.modules; import java.util.ArrayList; import java.util.EnumSet; import java.util.Iterator; import java.util.List; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityOtherPlayerMP; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraftforge.common.Configuration; import net.minecraftforge.common.Property; import atomicstryker.dynamiclights.client.DynamicLights; import atomicstryker.dynamiclights.client.IDynamicLightSource; import atomicstryker.dynamiclights.client.ItemConfigHelper; import cpw.mods.fml.client.FMLClientHandler; import cpw.mods.fml.common.ITickHandler; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.Mod.EventHandler; import cpw.mods.fml.common.TickType; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; import cpw.mods.fml.common.registry.TickRegistry; import cpw.mods.fml.relauncher.Side; /** * * @author AtomicStryker * * Offers Dynamic Light functionality to Player Entities that aren't the client. * Handheld Items and Armor can give off Light through this Module. * */ @Mod(modid = "DynamicLights_otherPlayers", name = "Dynamic Lights Other Player Light", version = "1.0.4", dependencies = "required-after:DynamicLights") public class PlayerOthersLightSource { private Minecraft mcinstance; private long nextUpdate; private long updateInterval; private ArrayList<OtherPlayerAdapter> trackedPlayers; private Thread thread; private boolean threadRunning; private ItemConfigHelper itemsMap; @EventHandler public void preInit(FMLPreInitializationEvent evt) { Configuration config = new Configuration(evt.getSuggestedConfigurationFile()); config.load(); Property itemsList = config.get(Configuration.CATEGORY_GENERAL, "LightItems", "50,89=12,348=10,91,327,76=10,331=10,314=14"); itemsList.comment = "Item IDs that shine light while held. Armor Items also work when worn. [ONLY ON OTHERS] Syntax: ItemID[-MetaValue]:LightValue, seperated by commas"; itemsMap = new ItemConfigHelper(itemsList.getString(), 15); Property updateI = config.get(Configuration.CATEGORY_GENERAL, "update Interval", 1000); updateI.comment = "Update Interval time for all other player entities in milliseconds. The lower the better and costlier."; updateInterval = updateI.getInt(); config.save(); } @EventHandler public void load(FMLInitializationEvent evt) { mcinstance = FMLClientHandler.instance().getClient(); nextUpdate = System.currentTimeMillis(); trackedPlayers = new ArrayList<OtherPlayerAdapter>(); threadRunning = false; TickRegistry.registerTickHandler(new TickHandler(), Side.CLIENT); } private class TickHandler implements ITickHandler { private final EnumSet<TickType> ticks; public TickHandler() { ticks = EnumSet.of(TickType.CLIENT); } @Override public void tickStart(EnumSet<TickType> type, Object... tickData) { } @SuppressWarnings("unchecked") @Override public void tickEnd(EnumSet<TickType> type, Object... tickData) { if (mcinstance.theWorld != null && System.currentTimeMillis() > nextUpdate && !DynamicLights.globalLightsOff()) { nextUpdate = System.currentTimeMillis() + updateInterval; if (!threadRunning) { thread = new OtherPlayerChecker(mcinstance.theWorld.loadedEntityList); thread.setPriority(Thread.MIN_PRIORITY); thread.start(); threadRunning = true; } } } @Override public EnumSet<TickType> ticks() { return ticks; } @Override public String getLabel() { return "DynamicLights_otherPlayers"; } } private int getLightFromItemStack(ItemStack stack) { if (stack != null) { int r = itemsMap.retrieveValue(stack.itemID, stack.getItemDamage()); return r < 0 ? 0 : r; } return 0; } private class OtherPlayerChecker extends Thread { private final Object[] list; public OtherPlayerChecker(List<Entity> input) { list = input.toArray(); } @Override public void run() { ArrayList<OtherPlayerAdapter> newList = new ArrayList<OtherPlayerAdapter>(); Entity ent; for (Object o : list) { ent = (Entity) o; // Loop all loaded Entities, find alive and valid other Player Entities if (ent instanceof EntityOtherPlayerMP && ent.isEntityAlive()) { // now find them in the already tracked player adapters boolean found = false; Iterator<OtherPlayerAdapter> iter = trackedPlayers.iterator(); OtherPlayerAdapter adapter = null; while (iter.hasNext()) { adapter = iter.next(); if (adapter.getAttachmentEntity().equals(ent)) // already tracked! { adapter.onTick(); // execute a tick newList.add(adapter); // put them in the new list found = true; iter.remove(); // remove them from the old break; } } if (!found) // wasnt already tracked { // make new, tick, put in new list adapter = new OtherPlayerAdapter((EntityPlayer) ent); adapter.onTick(); newList.add(adapter); } } } // any remaining adapters were not in the loaded entities. The main Dynamic Lights mod will kill them. trackedPlayers = newList; threadRunning = false; } } private class OtherPlayerAdapter implements IDynamicLightSource { private EntityPlayer player; private int lightLevel; private boolean enabled; public OtherPlayerAdapter(EntityPlayer p) { lightLevel = 0; enabled = false; player = p; } /** * Since they are IDynamicLightSource instances, they will already receive updates! Why do we need * to do this? Because Player Entities can change equipment and we really don't want this method * in an onUpdate tick, way too expensive. So we put it in a seperate Thread! */ public void onTick() { int prevLight = lightLevel; lightLevel = getLightFromItemStack(player.getCurrentEquippedItem()); for (ItemStack armor : player.inventory.armorInventory) { lightLevel = Math.max(lightLevel, getLightFromItemStack(armor)); } if (prevLight != 0 && lightLevel != prevLight) { lightLevel = 0; } else { if (player.isBurning()) { lightLevel = 15; } } if (!enabled && lightLevel > 8) { enableLight(); } else if (enabled && lightLevel < 9) { disableLight(); } } private void enableLight() { DynamicLights.addLightSource(this); enabled = true; } private void disableLight() { DynamicLights.removeLightSource(this); enabled = false; } @Override public Entity getAttachmentEntity() { return player; } @Override public int getLightLevel() { return lightLevel; } } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.dx.dex.file; import com.android.dex.SizeOf; import com.android.dx.rop.annotation.Annotations; import com.android.dx.rop.annotation.AnnotationsList; import com.android.dx.rop.code.AccessFlags; import com.android.dx.rop.cst.Constant; import com.android.dx.rop.cst.CstArray; import com.android.dx.rop.cst.CstFieldRef; import com.android.dx.rop.cst.CstMethodRef; import com.android.dx.rop.cst.CstString; import com.android.dx.rop.cst.CstType; import com.android.dx.rop.type.StdTypeList; import com.android.dx.rop.type.TypeList; import com.android.dx.util.AnnotatedOutput; import com.android.dx.util.Hex; import com.android.dx.util.Writers; import java.io.PrintWriter; import java.io.Writer; import java.util.ArrayList; /** * Representation of a Dalvik class, which is basically a set of * members (fields or methods) along with a few more pieces of * information. */ public final class ClassDefItem extends IndexedItem { /** {@code non-null;} type constant for this class */ private final CstType thisClass; /** access flags */ private final int accessFlags; /** * {@code null-ok;} superclass or {@code null} if this class is a/the * root class */ private final CstType superclass; /** {@code null-ok;} list of implemented interfaces */ private TypeListItem interfaces; /** {@code null-ok;} source file name or {@code null} if unknown */ private final CstString sourceFile; /** {@code non-null;} associated class data object */ private final ClassDataItem classData; /** * {@code null-ok;} item wrapper for the static values, initialized * in {@link #addContents} */ private EncodedArrayItem staticValuesItem; /** {@code non-null;} annotations directory */ private AnnotationsDirectoryItem annotationsDirectory; /** * Constructs an instance. Its sets of members and annotations are * initially empty. * * @param thisClass {@code non-null;} type constant for this class * @param accessFlags access flags * @param superclass {@code null-ok;} superclass or {@code null} if * this class is a/the root class * @param interfaces {@code non-null;} list of implemented interfaces * @param sourceFile {@code null-ok;} source file name or * {@code null} if unknown */ public ClassDefItem(CstType thisClass, int accessFlags, CstType superclass, TypeList interfaces, CstString sourceFile) { if (thisClass == null) { throw new NullPointerException("thisClass == null"); } /* * TODO: Maybe check accessFlags and superclass, at * least for easily-checked stuff? */ if (interfaces == null) { throw new NullPointerException("interfaces == null"); } this.thisClass = thisClass; this.accessFlags = accessFlags; this.superclass = superclass; this.interfaces = (interfaces.size() == 0) ? null : new TypeListItem(interfaces); this.sourceFile = sourceFile; this.classData = new ClassDataItem(thisClass); this.staticValuesItem = null; this.annotationsDirectory = new AnnotationsDirectoryItem(); } /** {@inheritDoc} */ @Override public ItemType itemType() { return ItemType.TYPE_CLASS_DEF_ITEM; } /** {@inheritDoc} */ @Override public int writeSize() { return SizeOf.CLASS_DEF_ITEM; } /** {@inheritDoc} */ @Override public void addContents(DexFile file) { TypeIdsSection typeIds = file.getTypeIds(); MixedItemSection byteData = file.getByteData(); MixedItemSection wordData = file.getWordData(); MixedItemSection typeLists = file.getTypeLists(); StringIdsSection stringIds = file.getStringIds(); typeIds.intern(thisClass); if (!classData.isEmpty()) { MixedItemSection classDataSection = file.getClassData(); classDataSection.add(classData); CstArray staticValues = classData.getStaticValuesConstant(); if (staticValues != null) { staticValuesItem = byteData.intern(new EncodedArrayItem(staticValues)); } } if (superclass != null) { typeIds.intern(superclass); } if (interfaces != null) { interfaces = typeLists.intern(interfaces); } if (sourceFile != null) { stringIds.intern(sourceFile); } if (! annotationsDirectory.isEmpty()) { if (annotationsDirectory.isInternable()) { annotationsDirectory = wordData.intern(annotationsDirectory); } else { wordData.add(annotationsDirectory); } } } /** {@inheritDoc} */ @Override public void writeTo(DexFile file, AnnotatedOutput out) { boolean annotates = out.annotates(); TypeIdsSection typeIds = file.getTypeIds(); int classIdx = typeIds.indexOf(thisClass); int superIdx = (superclass == null) ? -1 : typeIds.indexOf(superclass); int interOff = OffsettedItem.getAbsoluteOffsetOr0(interfaces); int annoOff = annotationsDirectory.isEmpty() ? 0 : annotationsDirectory.getAbsoluteOffset(); int sourceFileIdx = (sourceFile == null) ? -1 : file.getStringIds().indexOf(sourceFile); int dataOff = classData.isEmpty()? 0 : classData.getAbsoluteOffset(); int staticValuesOff = OffsettedItem.getAbsoluteOffsetOr0(staticValuesItem); if (annotates) { out.annotate(0, indexString() + ' ' + thisClass.toHuman()); out.annotate(4, " class_idx: " + Hex.u4(classIdx)); out.annotate(4, " access_flags: " + AccessFlags.classString(accessFlags)); out.annotate(4, " superclass_idx: " + Hex.u4(superIdx) + " // " + ((superclass == null) ? "<none>" : superclass.toHuman())); out.annotate(4, " interfaces_off: " + Hex.u4(interOff)); if (interOff != 0) { TypeList list = interfaces.getList(); int sz = list.size(); for (int i = 0; i < sz; i++) { out.annotate(0, " " + list.getType(i).toHuman()); } } out.annotate(4, " source_file_idx: " + Hex.u4(sourceFileIdx) + " // " + ((sourceFile == null) ? "<none>" : sourceFile.toHuman())); out.annotate(4, " annotations_off: " + Hex.u4(annoOff)); out.annotate(4, " class_data_off: " + Hex.u4(dataOff)); out.annotate(4, " static_values_off: " + Hex.u4(staticValuesOff)); } out.writeInt(classIdx); out.writeInt(accessFlags); out.writeInt(superIdx); out.writeInt(interOff); out.writeInt(sourceFileIdx); out.writeInt(annoOff); out.writeInt(dataOff); out.writeInt(staticValuesOff); } /** * Gets the constant corresponding to this class. * * @return {@code non-null;} the constant */ public CstType getThisClass() { return thisClass; } /** * Gets the superclass. * * @return {@code null-ok;} the superclass or {@code null} if * this class is a/the root class */ public CstType getSuperclass() { return superclass; } /** * Gets the list of interfaces implemented. * * @return {@code non-null;} the interfaces list */ public TypeList getInterfaces() { if (interfaces == null) { return StdTypeList.EMPTY; } return interfaces.getList(); } /** * Gets the source file name. * * @return {@code null-ok;} the source file name or {@code null} if unknown */ public CstString getSourceFile() { return sourceFile; } /** * Adds a static field. * * @param field {@code non-null;} the field to add * @param value {@code null-ok;} initial value for the field, if any */ public void addStaticField(EncodedField field, Constant value) { classData.addStaticField(field, value); } /** * Adds an instance field. * * @param field {@code non-null;} the field to add */ public void addInstanceField(EncodedField field) { classData.addInstanceField(field); } /** * Adds a direct ({@code static} and/or {@code private}) method. * * @param method {@code non-null;} the method to add */ public void addDirectMethod(EncodedMethod method) { classData.addDirectMethod(method); } /** * Adds a virtual method. * * @param method {@code non-null;} the method to add */ public void addVirtualMethod(EncodedMethod method) { classData.addVirtualMethod(method); } /** * Gets all the methods in this class. The returned list is not linked * in any way to the underlying lists contained in this instance, but * the objects contained in the list are shared. * * @return {@code non-null;} list of all methods */ public ArrayList<EncodedMethod> getMethods() { return classData.getMethods(); } /** * Sets the direct annotations on this class. These are annotations * made on the class, per se, as opposed to on one of its members. * It is only valid to call this method at most once per instance. * * @param annotations {@code non-null;} annotations to set for this class * @param dexFile {@code non-null;} dex output */ public void setClassAnnotations(Annotations annotations, DexFile dexFile) { annotationsDirectory.setClassAnnotations(annotations, dexFile); } /** * Adds a field annotations item to this class. * * @param field {@code non-null;} field in question * @param annotations {@code non-null;} associated annotations to add * @param dexFile {@code non-null;} dex output */ public void addFieldAnnotations(CstFieldRef field, Annotations annotations, DexFile dexFile) { annotationsDirectory.addFieldAnnotations(field, annotations, dexFile); } /** * Adds a method annotations item to this class. * * @param method {@code non-null;} method in question * @param annotations {@code non-null;} associated annotations to add * @param dexFile {@code non-null;} dex output */ public void addMethodAnnotations(CstMethodRef method, Annotations annotations, DexFile dexFile) { annotationsDirectory.addMethodAnnotations(method, annotations, dexFile); } /** * Adds a parameter annotations item to this class. * * @param method {@code non-null;} method in question * @param list {@code non-null;} associated list of annotation sets to add * @param dexFile {@code non-null;} dex output */ public void addParameterAnnotations(CstMethodRef method, AnnotationsList list, DexFile dexFile) { annotationsDirectory.addParameterAnnotations(method, list, dexFile); } /** * Gets the method annotations for a given method, if any. This is * meant for use by debugging / dumping code. * * @param method {@code non-null;} the method * @return {@code null-ok;} the method annotations, if any */ public Annotations getMethodAnnotations(CstMethodRef method) { return annotationsDirectory.getMethodAnnotations(method); } /** * Gets the parameter annotations for a given method, if any. This is * meant for use by debugging / dumping code. * * @param method {@code non-null;} the method * @return {@code null-ok;} the parameter annotations, if any */ public AnnotationsList getParameterAnnotations(CstMethodRef method) { return annotationsDirectory.getParameterAnnotations(method); } }
package it.unibz.krdb.obda.owlrefplatform.core.basicoperations; /* * #%L * ontop-reformulation-core * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.model.AlgebraOperatorPredicate; import it.unibz.krdb.obda.model.BooleanOperationPredicate; import it.unibz.krdb.obda.model.CQIE; import it.unibz.krdb.obda.model.Constant; import it.unibz.krdb.obda.model.Function; import it.unibz.krdb.obda.model.Term; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.Predicate.COL_TYPE; import it.unibz.krdb.obda.model.Variable; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.model.impl.OBDAVocabulary; import it.unibz.krdb.obda.model.impl.TermUtils; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; public class DatalogNormalizer { private final static OBDADataFactory fac = OBDADataFactoryImpl.getInstance(); /*** * This expands all AND trees into individual comparison atoms in the body * of the query. Nested AND trees inside Join or LeftJoin atoms are not * touched. * * @param query * @return */ public static void unfoldANDTrees(CQIE query) { List<Function> body = query.getBody(); /* Collecting all necessary conditions */ for (int i = 0; i < body.size(); i++) { Function currentAtom = body.get(i); if (currentAtom.getFunctionSymbol() == OBDAVocabulary.AND) { body.remove(i); body.addAll(getUnfolderAtomList(currentAtom)); } } } /*** * This expands all Join that can be directly added as conjuncts to a * query's body. Nested Join trees inside left joins are not touched. * * @param query * @return */ public static void unfoldJoinTrees(CQIE query) { List<Function> body = query.getBody(); unfoldJoinTrees(body, true); } /*** * This expands all Join that can be directly added as conjuncts to a * query's body. Nested Join trees inside left joins are not touched. * <p> * In addition, we will remove any Join atoms that only contain one single * data atom, i.e., the join is not a join, but a table reference with * conditions. These kind of atoms can result from the partial evaluation * process and should be eliminated. The elimination takes all the atoms in * the join (the single data atom plus possibly extra boolean conditions and * adds them to the node that is the parent of the join). * * @param query * @return */ private static void unfoldJoinTrees(List body, boolean isJoin) { /* Collecting all necessary conditions */ for (int i = 0; i < body.size(); i++) { Function currentAtom = (Function) body.get(i); if (!currentAtom.isAlgebraFunction()) continue; if (currentAtom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN) unfoldJoinTrees(currentAtom.getTerms(), false); if (currentAtom.getFunctionSymbol() == OBDAVocabulary.SPARQL_JOIN) { unfoldJoinTrees(currentAtom.getTerms(), true); int dataAtoms = countDataItems(currentAtom.getTerms()); if (isJoin || dataAtoms == 1) { body.remove(i); for (int j = currentAtom.getTerms().size() - 1; j >= 0; j--) { Term term = currentAtom.getTerm(j); Function asAtom = (Function)term; if (!body.contains(asAtom)) body.add(i, asAtom); } i -= 1; } } } } public static void foldJoinTrees(CQIE query) { List<Function> body = query.getBody(); foldJoinTrees(body, false); } private static void foldJoinTrees(List atoms, boolean isJoin) { List<Function> dataAtoms = new LinkedList<>(); List<Function> booleanAtoms = new LinkedList<>(); /* * Collecting all data and boolean atoms for later processing. Calling * recursively fold Join trees on any algebra function. */ for (Object o : atoms) { Function atom = (Function) o; if (atom.isBooleanFunction()) { booleanAtoms.add(atom); } else { dataAtoms.add(atom); if (atom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN) foldJoinTrees(atom.getTerms(), false); if (atom.getFunctionSymbol() == OBDAVocabulary.SPARQL_JOIN) foldJoinTrees(atom.getTerms(), true); } } if (!isJoin || dataAtoms.size() <= 2) return; /* * We process all atoms in dataAtoms to make only BINARY joins. Taking * two at a time and replacing them for JOINs, until only two are left. * All boolean conditions of the original join go into the first join * generated. It always merges from the left to the right. */ while (dataAtoms.size() > 2) { Function joinAtom = fac.getSPARQLJoin(dataAtoms.remove(0), dataAtoms.remove(0)); joinAtom.getTerms().addAll(booleanAtoms); booleanAtoms.clear(); dataAtoms.add(0, joinAtom); } atoms.clear(); atoms.addAll(dataAtoms); } /*** * Counts the number of data atoms in this list of terms. Not recursive. * * @param terms * @return */ public static int countDataItems(List<?extends Term> terms) { int count = 0; for (Term currentTerm : terms) { Function currentAtom = (Function)currentTerm; if (!currentAtom.isBooleanFunction()) count += 1; } return count; } /*** * This method introduces new variable names in each data atom and * equalities to account for JOIN operations. This method is called before * generating SQL queries and allows to avoid cross refrences in nested * JOINs, which generate wrong ON or WHERE conditions. * * * @param currentTerms * @param substitutions */ public static void pullOutEqualities(CQIE query) { Substitution substitutions = new SubstitutionImpl(); int[] newVarCounter = { 1 }; Set<Function> booleanAtoms = new HashSet<>(); List<Function> equalities = new LinkedList<>(); pullOutEqualities(query.getBody(), substitutions, equalities, newVarCounter, false); List<Function> body = query.getBody(); body.addAll(equalities); /* * All new variables have been generated, the substitutions also, we * need to apply them to the equality atoms and to the head of the * query. */ SubstitutionUtilities.applySubstitution(query, substitutions, false); } /*** * Adds a trivial equality to a LeftJoin in case the left join doesn't have * at least one boolean condition. This is necessary to have syntactically * correct LeftJoins in SQL. * * @param leftJoin */ private static void addMinimalEqualityToLeftJoin(Function leftJoin) { int booleanAtoms = 0; boolean isLeftJoin = leftJoin.isAlgebraFunction(); for (Term term : leftJoin.getTerms()) { Function f = (Function) term; if (f.isAlgebraFunction()) { addMinimalEqualityToLeftJoin(f); } if (f.isBooleanFunction()) booleanAtoms += 1; } if (isLeftJoin && booleanAtoms == 0) { Function trivialEquality = fac.getFunctionEQ(fac.getConstantLiteral("1", COL_TYPE.INTEGER), fac.getConstantLiteral("1", COL_TYPE.INTEGER)); leftJoin.getTerms().add(trivialEquality); } } public static void addMinimalEqualityToLeftJoin(CQIE query) { for (Function f : query.getBody()) { if (f.isAlgebraFunction()) { addMinimalEqualityToLeftJoin(f); } } } /*** * This method introduces new variable names in each data atom and * equalities to account for JOIN operations. This method is called before * generating SQL queries and allows to avoid cross references in nested * JOINs, which generate wrong ON or WHERE conditions. * * * @param currentTerms * @param substitutions */ private static void pullOutEqualities(List currentTerms, Substitution substitutions, List<Function> eqList, int[] newVarCounter, boolean isLeftJoin) { for (int i = 0; i < currentTerms.size(); i++) { Term term = (Term) currentTerms.get(i); /* * We don't expect any functions as terms, data atoms will only have * variables or constants at this level. This method is only called * exactly before generating the SQL query. */ if (!(term instanceof Function)) throw new RuntimeException("Unexpected term found while normalizing (pulling out equalities) the query."); Function atom = (Function) term; List<Term> subterms = atom.getTerms(); if (atom.isAlgebraFunction()) { if (atom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN) pullOutEqualities(subterms, substitutions, eqList, newVarCounter, true); else pullOutEqualities(subterms, substitutions, eqList, newVarCounter, false); } else if (atom.isBooleanFunction()) { continue; } // rename/substitute variables for (int j = 0; j < subterms.size(); j++) { Term subTerm = subterms.get(j); if (subTerm instanceof Variable) { Variable var1 = (Variable) subTerm; Variable var2 = (Variable) substitutions.get(var1); if (var2 == null) { /* * No substitution exists, hence, no action but generate * a new variable and register in the substitutions, and * replace the current value with a fresh one. */ var2 = fac.getVariable(var1.getName() + "f" + newVarCounter[0]); substitutions.put(var1, var2); subterms.set(j, var2); } else { /* * There already exists one, so we generate a fresh, * replace the current value, and add an equality * between the substitution and the new value. */ if (atom.isDataFunction()) { Variable newVariable = fac.getVariable(var1.getName() + newVarCounter[0]); subterms.set(j, newVariable); Function equality = fac.getFunctionEQ(var2, newVariable); eqList.add(equality); } else { // if its not data function, just replace // variable subterms.set(j, var2); } } newVarCounter[0] += 1; } else if (subTerm instanceof Constant) { /* * This case was necessary for query 7 in BSBM */ /** * A('c') Replacing the constant with a fresh variable x and * adding an quality atom ,e.g., A(x), x = 'c' */ // only relevant if in data function? if (atom.isDataFunction()) { Variable var = fac.getVariable("f" + newVarCounter[0]); newVarCounter[0] += 1; Function equality = fac.getFunctionEQ(var, subTerm); subterms.set(j, var); eqList.add(equality); } } } currentTerms.addAll(i + 1, eqList); i = i + eqList.size(); eqList.clear(); } } // Saturate equalities list to explicitly state JOIN conditions and // therefore avoid having // to rely on DBMS for nested JOIN optimisations (PostgreSQL case for BSBM // Q3) private static void saturateEqualities(Set<Function> boolSet) { List<Set<Term>> equalitySets = new ArrayList<>(); Iterator<Function> iter = boolSet.iterator(); while (iter.hasNext()) { Function eq = iter.next(); if (eq.getFunctionSymbol() != OBDAVocabulary.EQ) continue; Term v1 = eq.getTerm(0); Term v2 = eq.getTerm(1); if (equalitySets.size() == 0) { Set<Term> firstSet = new LinkedHashSet<>(); firstSet.add(v1); firstSet.add(v2); equalitySets.add(firstSet); continue; } for (int k = 0; k < equalitySets.size(); k++) { Set<Term> set = equalitySets.get(k); if (set.contains(v1)) { set.add(v2); continue; } if (set.contains(v2)) { set.add(v1); continue; } if (k == equalitySets.size() - 1) { Set<Term> newSet = new LinkedHashSet<>(); newSet.add(v1); newSet.add(v2); equalitySets.add(newSet); break; } } } for (int k = 0; k < equalitySets.size(); k++) { List<Term> varList = new ArrayList<>(equalitySets.get(k)); for (int i = 0; i < varList.size() - 1; i++) { for (int j = i + 1; j < varList.size(); j++) { Function equality = fac.getFunctionEQ(varList.get(i), varList.get(j)); boolSet.add(equality); } } } } /**** * Gets all the variables that are defined in this list of atoms, except in * atom i * * @param atoms * @return */ private static Set<Variable> getDefinedVariables(List<Term> atoms) { Set<Variable> currentLevelVariables = new HashSet<>(); for (Term l : atoms) { Function atom = (Function) l; if (atom.isBooleanFunction()) { continue; } else if (atom.isAlgebraFunction()) { currentLevelVariables.addAll(getDefinedVariables(atom.getTerms())); } else { TermUtils.addReferencedVariablesTo(currentLevelVariables, atom); } } return currentLevelVariables; } /*** * Collects all the variables that appear in all other branches (these are * atoms in the list of atoms) except in focusBranch. * <p> * Variables are considered problematic because they are out of the scope of * focusBranch. There are not visible in an SQL algebra tree. * <p> * Note that this method should only be called after calling pushEqualities * and pullOutEqualities on the CQIE. This is to assure that there are no * transitive equalities to take care of and that each variable in a data * atom is unique. * * @param atoms * @param branch * @return */ private static Set<Variable> getProblemVariablesForBranchN(List<Term> atoms, int focusBranch) { Set<Variable> currentLevelVariables = new HashSet<>(); for (int i = 0; i < atoms.size(); i++) { if (i == focusBranch) continue; Function atom = (Function) atoms.get(i); if (atom.isDataFunction()) { TermUtils.addReferencedVariablesTo(currentLevelVariables, atom); } else if (atom.isAlgebraFunction()) { currentLevelVariables.addAll(getDefinedVariables(atom.getTerms())); } else { // noop } } return currentLevelVariables; } /*** * This will * * @param query * @return */ public static void pullUpNestedReferences(CQIE query) { List<Function> body = query.getBody(); Function head = query.getHead(); /* * This set is only for reference */ Set<Variable> currentLevelVariables = new HashSet<>(); /* * This set will be modified in the process */ Set<Function> resultingBooleanConditions = new HashSet<>(); /* * Analyze each atom that is a Join or LeftJoin, the process will * replace everything needed. */ int[] freshVariableCount = { 0 }; pullUpNestedReferences(body, head, currentLevelVariables, resultingBooleanConditions, freshVariableCount); /* * Adding any remiding boolean conditions to the top level. */ for (Function condition : resultingBooleanConditions) body.add(condition); } private static void pullUpNestedReferences(List currentLevelAtoms, Function head, Set<Variable> problemVariables, Set<Function> booleanConditions, int[] freshVariableCount) { /* * Call recursively on each atom that is a Join or a LeftJoin passing * the variables of this level */ for (int focusBranch = 0; focusBranch < currentLevelAtoms.size(); focusBranch++) { Function atom = (Function) currentLevelAtoms.get(focusBranch); if (!(atom.getFunctionSymbol() instanceof AlgebraOperatorPredicate)) continue; // System.out // .println("======================== INTO ALGEBRA ====================="); List<Term> terms = atom.getTerms(); Set<Variable> nestedProblemVariables = new HashSet<Variable>(); nestedProblemVariables.addAll(problemVariables); nestedProblemVariables.addAll(getProblemVariablesForBranchN(currentLevelAtoms, focusBranch)); pullUpNestedReferences(terms, head, nestedProblemVariables, booleanConditions, freshVariableCount); } // Here we need to saturate Equalities saturateEqualities(booleanConditions); /* * Add the resulting equalities that belong to the current level. An * equality belongs to this level if ALL its variables are defined at * the current level and not at the upper levels. */ Set<Function> removedBooleanConditions = new HashSet<>(); // System.out.println("Checking boolean conditions: " // + booleanConditions.size()); for (Function equality : booleanConditions) { Set<Variable> atomVariables = new HashSet<>(); TermUtils.addReferencedVariablesTo(atomVariables, equality); boolean belongsToThisLevel = true; for (Variable var : atomVariables) { if (!problemVariables.contains(var)) continue; belongsToThisLevel = false; } if (!belongsToThisLevel) continue; currentLevelAtoms.add(equality); removedBooleanConditions.add(equality); } booleanConditions.removeAll(removedBooleanConditions); /* * Review the atoms of the current level and generate any variables, * equalities needed at this level (no further recursive calls). * Generate new variables for each variable that appears at this level, * and also appears at a top level. We do this only for data atoms. * * We do this by creating a substitution for each of the, and then * applying the substitution. We also add an equality for each * substitution we created. */ /* * Review the current boolean atoms, if the refer to upper level * variables then remove them from the current level and add them to the * equalities set for the upper level. * * If an contains at least 1 variable that is mentioned in an upper * level, then this condition is removed from the current level and * moved forward by adding it to the booleanConditions set. */ for (int index = 0; index < currentLevelAtoms.size(); index++) { // System.out.println(index); // System.out.println(currentLevelAtoms.size()); Term l = (Term) currentLevelAtoms.get(index); Function atom = (Function) l; // System.out // .println(atom.getFunctionSymbol().getClass() + " " + atom); if (!(atom.getFunctionSymbol() instanceof BooleanOperationPredicate)) continue; Set<Variable> variables = new HashSet<>(); TermUtils.addReferencedVariablesTo(variables, atom); boolean belongsUp = false; search: for (Variable var : variables) { if (problemVariables.contains(var)) { // /* // * looking for an equality that might indicate that in // fact, // * the atom doesn't belong up because there is an equality // * at this level that mentiones the "unsafe variable" and // a // * "safe variable" at the same time. (this pattern happens // * due to the call to // DatalogNormalizer.pullOutEqualities() // * that happens before pullingUp // */ // for (int idx2 = 0; idx2 < currentLevelAtoms.size(); // idx2++) { // NewLiteral l2 = (NewLiteral) currentLevelAtoms // .get(idx2); // if (!(l2 instanceof Function)) // continue; // Function f2 = (Function) l2; // if (f2.getPredicate() != OBDAVocabulary.EQ) // continue; // List<NewLiteral> equalityVariables = f2.getTerms(); // if (equalityVariables.contains(var)) { // // NewLiteral var2 = equalityVariables.get(0); // // if (!(var2 instanceof Variable)) // // continue; // if (!(problemVariables // .containsAll(equalityVariables))) { // /* // * we found that var is acutally safe, there is // * an equality that bounds it to a data atom in // * the current level // */ // continue search; // } // } // // } belongsUp = true; break; } } if (!belongsUp) continue; // Belongs up, removing and pushing up // System.out.println("REMOVED!!!!"); currentLevelAtoms.remove(index); index -= 1; booleanConditions.add(atom); } } /*** * Takes an AND atom and breaks it into a list of individual condition * atoms. * * @param atom * @return */ public static List<Function> getUnfolderAtomList(Function atom) { if (atom.getFunctionSymbol() != OBDAVocabulary.AND) { throw new InvalidParameterException(); } List<Term> innerFunctionalTerms = new LinkedList<>(); for (Term term : atom.getTerms()) { innerFunctionalTerms.addAll(getUnfolderTermList((Function) term)); } List<Function> newatoms = new LinkedList<Function>(); for (Term innerterm : innerFunctionalTerms) { Function f = (Function) innerterm; Function newatom = fac.getFunction(f.getFunctionSymbol(), f.getTerms()); newatoms.add(newatom); } return newatoms; } /*** * Takes an AND atom and breaks it into a list of individual condition * atoms. * * @param atom * @return */ public static List<Term> getUnfolderTermList(Function term) { List<Term> result = new LinkedList<>(); if (term.getFunctionSymbol() != OBDAVocabulary.AND) { result.add(term); } else { List<Term> terms = term.getTerms(); for (Term currentterm : terms) { if (currentterm instanceof Function) { result.addAll(getUnfolderTermList((Function) currentterm)); } else { result.add(currentterm); } } } return result; } // THE FOLLOWING COMMENT IS TAKEN FROM THE CODE ABOVE, THE FUNCTIONALITY IT // DESCRIBES // WAS IMPLEMENTED BELLOW /* * Here we collect boolean atoms that have conditions of atoms on the left * of left joins. These cannot be put in the conditions of LeftJoin(...) * atoms as inside terms, since these conditions have to be applied no * matter what. Keeping them there makes them "optional", i.e., or else * return NULL. Hence these conditions have to be pulled up to the nearest * JOIN in the upper levels in the branches. The pulloutEqualities method * will do this, however if there are still remaiing some by the time it * finish, we must add them to the body of the CQIE as normal conditions to * the query (WHERE clauses) */ public static void pullOutLeftJoinConditions(CQIE query) { Set<Function> booleanAtoms = new HashSet<>(); Set<Function> tempBooleans = new HashSet<>(); List<Function> body = query.getBody(); pullOutLJCond(body, booleanAtoms, false, tempBooleans, false); body.addAll(booleanAtoms); } private static void pullOutLJCond(List currentTerms, Set<Function> leftConditionBooleans, boolean isLeftJoin, Set<Function> currentBooleans, boolean isSecondJoin) { boolean firstDataAtomFound = false; boolean secondDataAtomFound = false; boolean is2 = false; List tempTerms = new LinkedList<>(); tempTerms.addAll(currentTerms); Set<Function> tempConditionBooleans = new HashSet<>(); if (currentTerms.size() == 0) { /* * This can happen when there are mappings with no body (facts) */ return; } Term firstT = (Term) currentTerms.get(0); if (!(firstT instanceof Function)) throw new RuntimeException("Unexpected term found while normalizing (pulling out conditions) the query."); for (int i = 0; i < currentTerms.size(); i++) { Function atom = (Function) currentTerms.get(i); List<Term> subterms = atom.getTerms(); // if we are in left join then pull out boolean conditions that // correspond to first data atom if (atom.isDataFunction() || atom.isAlgebraFunction()) { // if an atom is a Join then go inside, otherwise its // Data Function if (atom.isAlgebraFunction()) { if (i != 0) is2 = true; if (atom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN) pullOutLJCond(subterms, leftConditionBooleans, true, currentBooleans, is2); else pullOutLJCond(subterms, leftConditionBooleans, false, currentBooleans, is2); } // if first data atom is found already then this is the second if (firstDataAtomFound) secondDataAtomFound = true; // if both are false then its the first data atom if (!firstDataAtomFound && !secondDataAtomFound) { firstDataAtomFound = true; } if (secondDataAtomFound && isLeftJoin) { tempTerms.addAll(currentBooleans); } } else { // its boolean atom if (firstDataAtomFound && !secondDataAtomFound) { // they need to be pulled out of LEFT // JOINs ON clause if (isLeftJoin) { tempTerms.remove(atom); // currentTerms.remove(atom); // i--; tempConditionBooleans.add(atom); // leftConditionBooleans.add(atom); } } } } // tempTerms is currentTerms with "bad" boolean conditions removed // now add all these removed conditions at the end // and update current terms // if we are at the top level Left Join then push booleans into // where clause // otherwise push it into upper Left Join ON clause // if we are in a Join that is a second data atom, then don't push it all // the way up if (!isSecondJoin) { leftConditionBooleans.addAll(tempConditionBooleans); } currentTerms.clear(); currentTerms.addAll(tempTerms); // currentTerms.addAll(currentBooleans); currentBooleans.clear(); currentBooleans.addAll(tempConditionBooleans); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.kafka.pubsub; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.anyObject; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser; import org.apache.nifi.processors.kafka.pubsub.util.MockRecordWriter; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.Before; import org.junit.Test; public class TestConsumeKafkaRecord_0_10 { private ConsumerLease mockLease = null; private ConsumerPool mockConsumerPool = null; private TestRunner runner; @Before public void setup() throws InitializationException { mockLease = mock(ConsumerLease.class); mockConsumerPool = mock(ConsumerPool.class); ConsumeKafkaRecord_0_10 proc = new ConsumeKafkaRecord_0_10() { @Override protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) { return mockConsumerPool; } }; runner = TestRunners.newTestRunner(proc); runner.setProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS, "okeydokey:1234"); final String readerId = "record-reader"; final MockRecordParser readerService = new MockRecordParser(); readerService.addSchemaField("name", RecordFieldType.STRING); readerService.addSchemaField("age", RecordFieldType.INT); runner.addControllerService(readerId, readerService); runner.enableControllerService(readerService); final String writerId = "record-writer"; final RecordSetWriterFactory writerService = new MockRecordWriter("name, age"); runner.addControllerService(writerId, writerService); runner.enableControllerService(writerService); runner.setProperty(ConsumeKafkaRecord_0_10.RECORD_READER, readerId); runner.setProperty(ConsumeKafkaRecord_0_10.RECORD_WRITER, writerId); } @Test public void validateCustomValidatorSettings() throws Exception { runner.setProperty(ConsumeKafkaRecord_0_10.TOPICS, "foo"); runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, "foo"); runner.setProperty(ConsumeKafkaRecord_0_10.AUTO_OFFSET_RESET, ConsumeKafkaRecord_0_10.OFFSET_EARLIEST); runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); runner.assertValid(); runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "Foo"); runner.assertNotValid(); runner.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); runner.assertValid(); runner.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); runner.assertValid(); runner.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); runner.assertNotValid(); } @Test public void validatePropertiesValidation() throws Exception { runner.setProperty(ConsumeKafkaRecord_0_10.TOPICS, "foo"); runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, "foo"); runner.setProperty(ConsumeKafkaRecord_0_10.AUTO_OFFSET_RESET, ConsumeKafkaRecord_0_10.OFFSET_EARLIEST); runner.removeProperty(ConsumeKafkaRecord_0_10.GROUP_ID); try { runner.assertValid(); fail(); } catch (AssertionError e) { assertTrue(e.getMessage().contains("invalid because Group ID is required")); } runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, ""); try { runner.assertValid(); fail(); } catch (AssertionError e) { assertTrue(e.getMessage().contains("must contain at least one character that is not white space")); } runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, " "); try { runner.assertValid(); fail(); } catch (AssertionError e) { assertTrue(e.getMessage().contains("must contain at least one character that is not white space")); } } @Test public void validateGetAllMessages() throws Exception { String groupName = "validateGetAllMessages"; when(mockConsumerPool.obtainConsumer(anyObject(), anyObject())).thenReturn(mockLease); when(mockLease.continuePolling()).thenReturn(Boolean.TRUE, Boolean.TRUE, Boolean.FALSE); when(mockLease.commit()).thenReturn(Boolean.TRUE); runner.setProperty(ConsumeKafkaRecord_0_10.TOPICS, "foo,bar"); runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, groupName); runner.setProperty(ConsumeKafkaRecord_0_10.AUTO_OFFSET_RESET, ConsumeKafkaRecord_0_10.OFFSET_EARLIEST); runner.run(1, false); verify(mockConsumerPool, times(1)).obtainConsumer(anyObject(), anyObject()); verify(mockLease, times(3)).continuePolling(); verify(mockLease, times(2)).poll(); verify(mockLease, times(1)).commit(); verify(mockLease, times(1)).close(); verifyNoMoreInteractions(mockConsumerPool); verifyNoMoreInteractions(mockLease); } @Test public void validateGetAllMessagesPattern() throws Exception { String groupName = "validateGetAllMessagesPattern"; when(mockConsumerPool.obtainConsumer(anyObject(), anyObject())).thenReturn(mockLease); when(mockLease.continuePolling()).thenReturn(Boolean.TRUE, Boolean.TRUE, Boolean.FALSE); when(mockLease.commit()).thenReturn(Boolean.TRUE); runner.setProperty(ConsumeKafkaRecord_0_10.TOPICS, "(fo.*)|(ba)"); runner.setProperty(ConsumeKafkaRecord_0_10.TOPIC_TYPE, "pattern"); runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, groupName); runner.setProperty(ConsumeKafkaRecord_0_10.AUTO_OFFSET_RESET, ConsumeKafkaRecord_0_10.OFFSET_EARLIEST); runner.run(1, false); verify(mockConsumerPool, times(1)).obtainConsumer(anyObject(), anyObject()); verify(mockLease, times(3)).continuePolling(); verify(mockLease, times(2)).poll(); verify(mockLease, times(1)).commit(); verify(mockLease, times(1)).close(); verifyNoMoreInteractions(mockConsumerPool); verifyNoMoreInteractions(mockLease); } @Test public void validateGetErrorMessages() throws Exception { String groupName = "validateGetErrorMessages"; when(mockConsumerPool.obtainConsumer(anyObject(), anyObject())).thenReturn(mockLease); when(mockLease.continuePolling()).thenReturn(true, false); when(mockLease.commit()).thenReturn(Boolean.FALSE); runner.setProperty(ConsumeKafkaRecord_0_10.TOPICS, "foo,bar"); runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, groupName); runner.setProperty(ConsumeKafkaRecord_0_10.AUTO_OFFSET_RESET, ConsumeKafkaRecord_0_10.OFFSET_EARLIEST); runner.run(1, false); verify(mockConsumerPool, times(1)).obtainConsumer(anyObject(), anyObject()); verify(mockLease, times(2)).continuePolling(); verify(mockLease, times(1)).poll(); verify(mockLease, times(1)).commit(); verify(mockLease, times(1)).close(); verifyNoMoreInteractions(mockConsumerPool); verifyNoMoreInteractions(mockLease); } @Test public void testJaasConfiguration() throws Exception { runner.setProperty(ConsumeKafkaRecord_0_10.TOPICS, "foo"); runner.setProperty(ConsumeKafkaRecord_0_10.GROUP_ID, "foo"); runner.setProperty(ConsumeKafkaRecord_0_10.AUTO_OFFSET_RESET, ConsumeKafkaRecord_0_10.OFFSET_EARLIEST); runner.setProperty(KafkaProcessorUtils.SECURITY_PROTOCOL, KafkaProcessorUtils.SEC_SASL_PLAINTEXT); runner.assertNotValid(); runner.setProperty(KafkaProcessorUtils.JAAS_SERVICE_NAME, "kafka"); runner.assertValid(); runner.setProperty(KafkaProcessorUtils.USER_PRINCIPAL, "nifi@APACHE.COM"); runner.assertNotValid(); runner.setProperty(KafkaProcessorUtils.USER_KEYTAB, "not.A.File"); runner.assertNotValid(); runner.setProperty(KafkaProcessorUtils.USER_KEYTAB, "src/test/resources/server.properties"); runner.assertValid(); runner.setVariable("keytab", "src/test/resources/server.properties"); runner.setVariable("principal", "nifi@APACHE.COM"); runner.setVariable("service", "kafka"); runner.setProperty(KafkaProcessorUtils.USER_PRINCIPAL, "${principal}"); runner.setProperty(KafkaProcessorUtils.USER_KEYTAB, "${keytab}s"); runner.setProperty(KafkaProcessorUtils.JAAS_SERVICE_NAME, "${service}"); runner.assertValid(); } }
/** * ******************************************************************************** * Copyright (c) 2011, Monnet Project All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * Redistributions in binary * form must reproduce the above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or other materials provided * with the distribution. * Neither the name of the Monnet Project nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE MONNET PROJECT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ******************************************************************************* */ package eu.monnetproject.math.sparse; import eu.monnetproject.math.sparse.Vectors.Factory; import it.unimi.dsi.fastutil.ints.AbstractIntSet; import it.unimi.dsi.fastutil.ints.IntIterator; import it.unimi.dsi.fastutil.ints.IntSet; import java.util.AbstractSet; import java.util.Arrays; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; /** * * @author John McCrae */ public class IntVector implements Vector<Integer> { private final int[] data; public IntVector(int n) { this.data = new int[n]; } public IntVector(int[] data) { this.data = data; } public int[] data() { return data; } public static IntVector make(int... data) { return new IntVector(data); } @Override public double doubleValue(int idx) { return data[idx]; } @Override public int intValue(int idx) { return data[idx]; } @Override public Integer value(int idx) { return data[idx]; } @Override public Integer put(Integer idx, Integer n) { final int rval = data[idx]; data[idx] = n.intValue(); return rval; } @Override public double put(int idx, double value) { double r = data[idx]; data[idx] = (int) value; return r; } @Override public int put(int idx, int value) { int r = data[idx]; data[idx] = value; return r; } @Override public int add(int idx, int val) { int r = data[idx]; data[idx] += val; return r; } @Override public void sub(int idx, int val) { data[idx] -= val; } @Override public void multiply(int idx, int val) { data[idx] *= val; } @Override public void divide(int idx, int val) { data[idx] /= val; } @Override public double add(int idx, double val) { double r = data[idx]; data[idx] += val; return r; } @Override public void sub(int idx, double val) { data[idx] -= val; } @Override public void multiply(int idx, double val) { data[idx] *= val; } @Override public void divide(int idx, double val) { data[idx] /= val; } @Override public <M extends Number> void add(Vector<M> vector) { assert (vector.length() == data.length); if (vector instanceof IntVector) { final int[] data2 = ((IntVector) vector).data; for (int i = 0; i < data.length; i++) { data[i] += data2[i]; } } else { for (Map.Entry<Integer, M> e : vector.entrySet()) { data[e.getKey()] += e.getValue().intValue(); } } } @Override public <M extends Number> void sub(Vector<M> vector) { assert (vector.length() == data.length); if (vector instanceof IntVector) { final int[] data2 = ((IntVector) vector).data; for (int i = 0; i < data.length; i++) { data[i] -= data2[i]; } } else { for (Map.Entry<Integer, M> e : vector.entrySet()) { data[e.getKey()] -= e.getValue().intValue(); } } } @Override public void multiply(double n) { for (int i = 0; i < data.length; i++) { data[i] *= n; } } @Override public <M extends Number> double innerProduct(Vector<M> y) { assert (y.length() == data.length); if (y instanceof IntVector) { final IntVector y2 = (IntVector) y; int innerProduct = 0; for (int i = 0; i < data.length; i++) { innerProduct += data[i] * y2.data[i]; } return innerProduct; } else if (y.defaultValue().doubleValue() == 0.0) { double innerProduct = 0.0; for (Map.Entry<Integer, M> e : y.entrySet()) { innerProduct += data[e.getKey()] * e.getValue().doubleValue(); } return innerProduct; } else { double innerProduct = 0.0; for (int i = 0; i < data.length; i++) { innerProduct += data[i] * y.doubleValue(i); } return innerProduct; } } @Override @SuppressWarnings("unchecked") public <M extends Number, O extends Number> Matrix<O> outerProduct(Vector<M> y, Factory<O> using) { if (using == Vectors.AS_INTS) { int[][] data2 = new int[data.length][y.length()]; for (int i = 0; i < data.length; i++) { for (int j = 0; j < y.length(); j++) { data2[i][j] = data[i] * y.intValue(j); } } return (Matrix<O>) new IntArrayMatrix(data2); } else if (using == Vectors.AS_REALS) { double[][] data2 = new double[data.length][y.length()]; for (int i = 0; i < data.length; i++) { for (int j = 0; j < y.length(); j++) { data2[i][j] = y.doubleValue(j) * data[i]; } } return (Matrix<O>) new DoubleArrayMatrix(data2); } else { final SparseMatrix<O> matrix = new SparseMatrix<O>(data.length, y.length(), using); for (int i = 0; i < data.length; i++) { for (Map.Entry<Integer, M> e : y.entrySet()) { matrix.set(i, e.getKey(), e.getValue().doubleValue() * data[i]); } } return matrix; } } @Override public Set<Entry<Integer, Integer>> entrySet() { return new IntArraySet(); } @Override public double[] toDoubleArray() { double[] dataDouble = new double[data.length]; for (int i = 0; i < data.length; i++) { dataDouble[i] = data[i]; } return dataDouble; } @Override public int size() { int size = 0; for (int i = 0; i < data.length; i++) { if (data[i] != 0) { size++; } } return size; } @Override public double norm() { double norm = 0.0; for (int i = 0; i < data.length; i++) { norm += data[i] * data[i]; } return Math.sqrt(norm); } @Override public Integer defaultValue() { return 0; } @Override public Factory<Integer> factory() { return Vectors.AS_INTS; } @Override public int length() { return data.length; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < data.length; i++) { if (data[i] != 0) { if (sb.length() > 0) { sb.append(","); } sb.append(i).append("=").append(data[i]); } } return sb.toString(); } public static IntVector fromString(String s, int n) throws VectorFormatException { final int[] data = new int[n]; final String[] ss = s.split(","); if (ss.length == n) { for (int i = 0; i < n; i++) { if (ss[i].contains("=")) { final String[] sss = ss[i].split("="); if (sss.length != 2) { throw new VectorFormatException("Too many =s: " + ss[i]); } try { data[Integer.parseInt(sss[0].replaceAll("\\[\\]\\s", ""))] = Integer.parseInt(sss[1].replaceAll("\\[\\]\\s", "")); } catch (NumberFormatException x) { throw new VectorFormatException(x); } } else { try { data[i] = Integer.parseInt(ss[i].replaceAll("\\[\\]\\s", "")); } catch (NumberFormatException x) { throw new VectorFormatException(x); } } } } else if (ss.length < n) { for (int i = 0; i < n; i++) { final String[] sss = ss[i].split("="); if (sss.length != 2) { throw new VectorFormatException("Too many or too few =s: " + ss[i]); } try { data[Integer.parseInt(sss[0].replaceAll("\\[\\]\\s", ""))] = Integer.parseInt(sss[1].replaceAll("\\[\\]\\s", "")); } catch (NumberFormatException x) { throw new VectorFormatException(x); } } } else { throw new VectorFormatException("Real vector too long"); } return new IntVector(data); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final IntVector other = (IntVector) obj; if (!Arrays.equals(this.data, other.data)) { return false; } return true; } @Override public int hashCode() { int hash = 5; hash = 97 * hash + Arrays.hashCode(this.data); return hash; } @Override public Vector<Integer> clone() { return new IntVector(Arrays.copyOf(data, data.length)); } private class IntArraySet extends AbstractSet<Map.Entry<Integer, Integer>> { @Override public Iterator<Map.Entry<Integer, Integer>> iterator() { return new Iterator<Entry<Integer, Integer>>() { int n = 0; @Override public boolean hasNext() { return n < data.length; } @Override public Entry<Integer, Integer> next() { if (n < data.length) { final int m = n++; return new Map.Entry<Integer, Integer>() { @Override public Integer getKey() { return m; } @Override public Integer getValue() { return data[m]; } @Override public Integer setValue(Integer value) { final int old = data[m]; data[m] = value.intValue(); return old; } }; } else { throw new NoSuchElementException(); } } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } }; } @Override public int size() { return data.length; } } @Override public IntSet keySet() { return new IntStreamSet(size()); } private static class IntStreamSet extends AbstractIntSet { private final int N; public IntStreamSet(int N) { this.N = N; } @Override public IntIterator iterator() { return new IntIterator() { private int n; @Override public boolean hasNext() { return n < N; } @Override public Integer next() { return n++; } @Override public void remove() { throw new UnsupportedOperationException("Not mutable."); } @Override public int nextInt() { return n++; } @Override public int skip(int m) { int rval = Math.min(N - n, m); n += m; return rval; } }; } @Override public int size() { return N; } } @Override public boolean containsKey(int idx) { return idx >= 0 && idx < length(); } @Override public Integer sum() { int i = 0; for (int j : data) { i += j; } return i; } @Override public Vector<Integer> subvector(int offset, int length) { int[] sd = Arrays.copyOfRange(data, offset, offset+length); return new IntVector(sd); } }
/* * Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.bpel.ui.bpel2svg; import org.wso2.carbon.bpel.ui.bpel2svg.impl.AssignImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.CompensateImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.CompensateScopeImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.ElseIfImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.*; import org.wso2.carbon.bpel.ui.bpel2svg.impl.FlowImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.ForEachImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.IfImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.InvokeImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.OnAlarmImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.OnEventImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.OnMessageImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.ProcessImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.ReThrowImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.ReceiveImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.RepeatUntilImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.ReplyImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.PickImpl; import org.wso2.carbon.bpel.ui.bpel2svg.impl.WhileImpl; /** * Manage the tag names, and the tag icon locations */ public class BPEL2SVGFactory { // Constants // START_TAGS public final static String ASSIGN_START_TAG = "assign"; public final static String CATCH_START_TAG = "catch"; public final static String CATCHALL_START_TAG = "catchAll"; public final static String COMPENSATESCOPE_START_TAG = "compensateScope"; public final static String COMPENSATE_START_TAG = "compensate"; public final static String COMPENSATIONHANDLER_START_TAG = "compensationHandler"; public final static String ELSE_START_TAG = "else"; public final static String ELSEIF_START_TAG = "elseif"; public final static String EVENTHANDLER_START_TAG = "eventHandlers"; public final static String EXIT_START_TAG = "exit"; public final static String FAULTHANDLER_START_TAG = "faultHandlers"; public final static String FLOW_START_TAG = "flow"; public final static String FOREACH_START_TAG = "forEach"; public final static String IF_START_TAG = "if"; public final static String INVOKE_START_TAG = "invoke"; public final static String ONALARM_START_TAG = "onAlarm"; public final static String ONEVENT_START_TAG = "onEvent"; public final static String ONMESSAGE_START_TAG = "onMessage"; public final static String PICK_START_TAG = "pick"; public final static String PROCESS_START_TAG = "process"; public final static String RECEIVE_START_TAG = "receive"; public final static String REPEATUNTIL_START_TAG = "repeatUntil"; public final static String REPLY_START_TAG = "reply"; public final static String RETHROW_START_TAG = "rethrow"; public final static String SCOPE_START_TAG = "scope"; public final static String SEQUENCE_START_TAG = "sequence"; public final static String SOURCE_START_TAG = "source"; public final static String SOURCES_START_TAG = "sources"; public final static String TARGET_START_TAG = "target"; public final static String TARGETS_START_TAG = "targets"; public final static String TERMINATIONHANDLER_START_TAG = "terminationHandler"; public final static String THROW_START_TAG = "throw"; public final static String WAIT_START_TAG = "wait"; public final static String WHILE_START_TAG = "while"; // END_TAGS public final static String ASSIGN_END_TAG = "/assign"; public final static String CATCH_END_TAG = "/catch"; public final static String CATCHALL_END_TAG = "/catchAll"; public final static String COMPENSATESCOPE_END_TAG = "/compensateScope"; public final static String COMPENSATE_END_TAG = "/compensate"; public final static String COMPENSATIONHANDLER_END_TAG = "/compensationHandler"; public final static String ELSE_END_TAG = "/else"; public final static String ELSEIF_END_TAG = "/elseif"; public final static String EVENTHANDLER_END_TAG = "/eventHandlers"; public final static String EXIT_END_TAG = "/exit"; public final static String FAULTHANDLER_END_TAG = "/faultHandlers"; public final static String FLOW_END_TAG = "/flow"; public final static String FOREACH_END_TAG = "/forEach"; public final static String IF_END_TAG = "/if"; public final static String INVOKE_END_TAG = "/invoke"; public final static String ONMESSAGE_END_TAG = "/onMessage"; public final static String ONALARM_END_TAG = "/onAlarm"; public final static String ONEVENT_END_TAG = "/onEvent"; public final static String PICK_END_TAG = "/pick"; public final static String PROCESS_END_TAG = "/process"; public final static String RECEIVE_END_TAG = "/receive"; public final static String REPEATUNTIL_END_TAG = "/repeatUntil"; public final static String REPLY_END_TAG = "/reply"; public final static String RETHROW_END_TAG = "/rethrow"; public final static String SCOPE_END_TAG = "/scope"; public final static String SEQUENCE_END_TAG = "/sequence"; public final static String SOURCE_END_TAG = "/source"; public final static String SOURCES_END_TAG = "/sources"; public final static String TARGET_END_TAG = "/target"; public final static String TARGETS_END_TAG = "/targets"; public final static String TERMINATIONHANDLER_END_TAG = "/terminationHandler"; public final static String THROW_END_TAG = "/throw"; public final static String WAIT_END_TAG = "/wait"; public final static String WHILE_END_TAG = "/while"; public final static String SINGLE_LINE_END_TAG = "/>"; public final static int TEXT_ADJUST = 10; // Properties public String iconSource = "images/bpel2svg"; private static BPEL2SVGFactory instance = null; public static BPEL2SVGFactory getInstance() { if (instance == null) { instance = new BPEL2SVGFactory(); } return instance; } public LayoutManager layoutManager = null; public LayoutManager getLayoutManager() { if (layoutManager == null) { layoutManager = new LayoutManager(); } return layoutManager; } public void setLayoutManager(LayoutManager layoutManager) { this.layoutManager = layoutManager; } private String iconExtension = ".png"; public String getIconExtension() { return iconExtension; } public void setIconExtension(String iconExtension) { this.iconExtension = iconExtension; } // Methods public String getIconPath(String activity) { String iconPath = null; if (activity != null) { if (activity.equalsIgnoreCase(AssignImpl.class.getName())) { iconPath = BPEL2SVGIcons.ASSIGN_ICON; } else if (activity.equalsIgnoreCase(ElseIfImpl.class.getName())) { iconPath = BPEL2SVGIcons.ELSEIF_ICON; } else if (activity.equalsIgnoreCase(CompensateImpl.class.getName())) { iconPath = BPEL2SVGIcons.COMPENSATE_ICON; } else if (activity.equalsIgnoreCase(CompensateScopeImpl.class.getName())) { iconPath = BPEL2SVGIcons.COMPENSATESCOPE_ICON; } else if (activity.equalsIgnoreCase(ExitImpl.class.getName())) { iconPath = BPEL2SVGIcons.EXIT_ICON; } else if (activity.equalsIgnoreCase(FlowImpl.class.getName())) { iconPath = BPEL2SVGIcons.FLOW_ICON; } else if (activity.equalsIgnoreCase(ForEachImpl.class.getName())) { iconPath = BPEL2SVGIcons.FOREACH_ICON; } else if (activity.equalsIgnoreCase(IfImpl.class.getName())) { iconPath = BPEL2SVGIcons.IF_ICON; } else if (activity.equalsIgnoreCase(InvokeImpl.class.getName())) { iconPath = BPEL2SVGIcons.INVOKE_ICON; } else if (activity.equalsIgnoreCase(OnAlarmImpl.class.getName())) { iconPath = BPEL2SVGIcons.ONALARM_ICON; } else if (activity.equalsIgnoreCase(OnEventImpl.class.getName())) { iconPath = BPEL2SVGIcons.ONEVENT_ICON; } else if (activity.equalsIgnoreCase(OnMessageImpl.class.getName())) { iconPath = BPEL2SVGIcons.ONMESSAGE_ICON; } else if (activity.equalsIgnoreCase(PickImpl.class.getName())) { iconPath = BPEL2SVGIcons.PICK_ICON; } else if (activity.equalsIgnoreCase(ProcessImpl.class.getName())) { iconPath = BPEL2SVGIcons.PROCESS_ICON; } else if (activity.equalsIgnoreCase(ReceiveImpl.class.getName())) { iconPath = BPEL2SVGIcons.RECEIVE_ICON; } else if (activity.equalsIgnoreCase(RepeatUntilImpl.class.getName())) { iconPath = BPEL2SVGIcons.REPEATUNTIL_ICON; } else if (activity.equalsIgnoreCase(ReplyImpl.class.getName())) { iconPath = BPEL2SVGIcons.REPLY_ICON; } else if (activity.equalsIgnoreCase(ReThrowImpl.class.getName())) { iconPath = BPEL2SVGIcons.RETHROW_ICON; } else if (activity.equalsIgnoreCase(ScopeImpl.class.getName())) { iconPath = BPEL2SVGIcons.SCOPE_ICON; } else if (activity.equalsIgnoreCase(ThrowImpl.class.getName())) { iconPath = BPEL2SVGIcons.THROW_ICON; } else if (activity.equalsIgnoreCase(WaitImpl.class.getName())) { iconPath = BPEL2SVGIcons.WAIT_ICON; } else if (activity.equalsIgnoreCase(WhileImpl.class.getName())) { iconPath = BPEL2SVGIcons.WHILE_ICON; } } return iconPath; } public String getEndIconPath(String activity) { String iconPath = null; if (activity != null) { if (activity.equalsIgnoreCase(FlowImpl.class.getName())) { iconPath = BPEL2SVGIcons.FLOW_ICON; } else if (activity.equalsIgnoreCase(ForEachImpl.class.getName())) { iconPath = BPEL2SVGIcons.FOREACH_ICON; } else if (activity.equalsIgnoreCase(IfImpl.class.getName())) { iconPath = BPEL2SVGIcons.IF_ICON; } else if (activity.equalsIgnoreCase(PickImpl.class.getName())) { iconPath = BPEL2SVGIcons.PICK_ICON; } else if (activity.equalsIgnoreCase(ProcessImpl.class.getName())) { iconPath = BPEL2SVGIcons.PROCESS_ICON; } else if (activity.equalsIgnoreCase(RepeatUntilImpl.class.getName())) { iconPath = BPEL2SVGIcons.REPEATUNTIL_ICON; } else if (activity.equalsIgnoreCase(ScopeImpl.class.getName())) { iconPath = BPEL2SVGIcons.SCOPE_ICON; } else if (activity.equalsIgnoreCase(WhileImpl.class.getName())) { iconPath = BPEL2SVGIcons.WHILE_ICON; } } return iconPath; } public String getIconSource() { return iconSource; } public void setIconSource(String iconSource) { this.iconSource = iconSource; } }
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.oauth2.device.dao; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.core.util.IdentityDatabaseUtil; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; import org.wso2.carbon.identity.oauth2.device.codegenerator.GenerateKeys; import org.wso2.carbon.identity.oauth2.device.constants.Constants; import org.wso2.carbon.identity.oauth2.device.model.DeviceFlowDO; import org.wso2.carbon.identity.oauth2.device.util.DeviceFlowUtil; import org.wso2.carbon.identity.oauth2.util.OAuth2Util; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLIntegrityConstraintViolationException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.TimeZone; import java.util.UUID; /** * This class contains override methods of DeviceFlowDAO. */ public class DeviceFlowDAOImpl implements DeviceFlowDAO { private static final Log log = LogFactory.getLog(DeviceFlowDAOImpl.class); @Override public String insertDeviceFlowParametersWithQuantifier(String deviceCode, String userCode, long quantifier, String consumerKey, String scopes) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Persisting device_code: " + deviceCode + " for client: " + consumerKey); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { String codeId = UUID.randomUUID().toString(); String uniqueUserCode = storeIntoDeviceFlow(codeId, deviceCode, userCode, quantifier, consumerKey, connection, 0); storeIntoScopes(codeId, deviceCode, scopes, connection); IdentityDatabaseUtil.commitTransaction(connection); return uniqueUserCode; } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when storing the device flow parameters for consumer_key: " + consumerKey, e); } } @Override @Deprecated public void insertDeviceFlowParameters(String deviceCode, String userCode, String consumerKey, Long expiresIn, int interval, String scopes) throws IdentityOAuth2Exception { insertDeviceFlowParametersWithQuantifier(deviceCode, userCode, GenerateKeys.getCurrentQuantifier(), consumerKey, scopes); } @Override public String getClientIdByUserCode(String userCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting client_id for user_code: " + userCode); } String clientId = null; try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { try (PreparedStatement prepStmt = connection .prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_CONSUMER_KEY_FOR_USER_CODE)) { ResultSet resultSet; prepStmt.setString(1, userCode); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { clientId = resultSet.getString(1); } } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting client id for user_code: " + userCode, e); } return clientId; } @Override public void setAuthenticationStatus(String userCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Set authentication status: " + Constants.USED + " for user_code: " + userCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.SET_AUTHENTICATION_STATUS)) { prepStmt.setString(1, Constants.USED); prepStmt.setString(2, userCode); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when setting the authentication status for the user_code: " + DigestUtils.sha256Hex(userCode), e); } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when setting the authentication status for the user_code: " + DigestUtils.sha256Hex(userCode), e); } } @Override @Deprecated public void setAuthenticationStatus(String userCode, String status) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Set authentication status: " + status + " for user_code: " + userCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.SET_AUTHENTICATION_STATUS)) { prepStmt.setString(1, status); prepStmt.setString(2, userCode); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when setting the authentication status for the user_code: " + userCode, e); } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when setting the authentication status for the user_code: " + userCode, e); } } @Override public DeviceFlowDO getAuthenticationDetails(String deviceCode, String clientId) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting authentication details for device_code: " + deviceCode); } AuthenticatedUser user; int tenantId = 0; String userName = null; boolean isMatchingDeviceCodeAndClientId = false; // Check for matching deviceCode and clientId. String userDomain = null; String authenticatedIDP = null; DeviceFlowDO deviceFlowDO = new DeviceFlowDO(); try (Connection connection = IdentityDatabaseUtil.getDBConnection(false); PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_AUTHENTICATION_STATUS)) { prepStmt.setString(1, deviceCode); prepStmt.setString(2, clientId); try (ResultSet resultSet = prepStmt.executeQuery()) { while (resultSet.next()) { deviceFlowDO.setStatus(resultSet.getString(1)); deviceFlowDO.setLastPollTime(resultSet.getTimestamp(2, Calendar.getInstance(TimeZone.getTimeZone(Constants.UTC)))); deviceFlowDO.setPollTime(resultSet.getLong(3)); deviceFlowDO.setExpiryTime(resultSet.getTimestamp(4, Calendar.getInstance(TimeZone.getTimeZone(Constants.UTC)))); userName = resultSet.getString(5); tenantId = resultSet.getInt(6); userDomain = resultSet.getString(7); authenticatedIDP = resultSet.getString(8); isMatchingDeviceCodeAndClientId = true; } if (isMatchingDeviceCodeAndClientId) { if (StringUtils.isNotBlank(userName) && tenantId != 0 && StringUtils.isNotBlank(userDomain)) { String tenantDomain = OAuth2Util.getTenantDomain(tenantId); user = OAuth2Util.createAuthenticatedUser(userName, userDomain, tenantDomain, authenticatedIDP); deviceFlowDO.setAuthorizedUser(user); } } else { deviceFlowDO.setStatus(Constants.NOT_EXIST); } return deviceFlowDO; } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting authentication status for device_code: " + deviceCode, e); } } @Override @Deprecated public DeviceFlowDO getAuthenticationDetails(String deviceCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting authentication details for device_code: " + deviceCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { ResultSet resultSet; AuthenticatedUser user; int tenantId = 0; String userName = null; boolean isMatchingDeviceCodeAndClientId = false; // Check for matching deviceCode and clientId. String userDomain = null; String authenticatedIDP = null; DeviceFlowDO deviceFlowDO = new DeviceFlowDO(); try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_AUTHENTICATION_STATUS)) { prepStmt.setString(1, deviceCode); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { deviceFlowDO.setStatus(resultSet.getString(1)); deviceFlowDO.setLastPollTime(resultSet.getTimestamp(2, Calendar.getInstance(TimeZone.getTimeZone(Constants.UTC)))); deviceFlowDO.setPollTime(resultSet.getLong(3)); deviceFlowDO.setExpiryTime(resultSet.getTimestamp(4, Calendar.getInstance(TimeZone.getTimeZone(Constants.UTC)))); userName = resultSet.getString(5); tenantId = resultSet.getInt(6); userDomain = resultSet.getString(7); authenticatedIDP = resultSet.getString(8); isMatchingDeviceCodeAndClientId = true; } if (isMatchingDeviceCodeAndClientId) { if (userName != null && tenantId != 0 && userDomain != null) { String tenantDomain = OAuth2Util.getTenantDomain(tenantId); user = OAuth2Util.createAuthenticatedUser(userName, userDomain, tenantDomain, authenticatedIDP); deviceFlowDO.setAuthorizedUser(user); } return deviceFlowDO; } else { deviceFlowDO.setStatus(Constants.NOT_EXIST); return deviceFlowDO; } } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting authentication status for device_code: " + deviceCode, e); } } @Override public boolean checkClientIdExist(String clientId) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Checking existence of client_id: " + clientId); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.CHECK_CLIENT_ID_EXISTS)) { ResultSet resultSet; prepStmt.setString(1, clientId); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { String status = resultSet.getString(1); if (status != null) { return true; } } } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when check the existence of client_id: " + clientId, e); } return false; } @Override public String getStatusForUserCode(String userCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting status for user_code: " + userCode); } String status = null; try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_USER_CODE_STATUS)) { ResultSet resultSet; prepStmt.setString(1, userCode); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { status = resultSet.getString(1); } } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting status for user_code: " + userCode, e); } return status; } @Override public void setLastPollTime(String deviceCode, Timestamp newPollTime) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Setting last_poll_time: " + newPollTime + " for device_code: " + deviceCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.SET_LAST_POLL_TIME)) { prepStmt.setTimestamp(1, newPollTime, Calendar.getInstance(TimeZone .getTimeZone(Constants.UTC))); prepStmt.setString(2, deviceCode); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when setting last poll time for device_code: " + deviceCode, e); } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when setting last poll time for device_code: " + deviceCode, e); } } @Override public void setAuthzUserAndStatus(String userCode, String status, AuthenticatedUser authenticatedUser) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Setting authorize user: " + authenticatedUser.getLoggableUserId() + " and status: " + status + " for user_code: " + userCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.SET_AUTHZ_USER_AND_STATUS)) { String authenticatedIDP = OAuth2Util.getAuthenticatedIDP(authenticatedUser); int tenantId = OAuth2Util.getTenantId(authenticatedUser.getTenantDomain()); prepStmt.setString(1, authenticatedUser.getUserName()); prepStmt.setString(2, status); prepStmt.setInt(3, tenantId); prepStmt.setString(4, OAuth2Util.getUserStoreDomain(authenticatedUser)); prepStmt.setString(5, authenticatedIDP); prepStmt.setInt(6, tenantId); prepStmt.setString(7, userCode); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when setting authenticated user for user_code: " + userCode, e); } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when setting authenticated user for user_code: " + userCode, e); } } @Override public void setDeviceCodeExpired(String deviceCode, String status) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Setting status as EXPIRED for device_code: " + deviceCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.SET_DEVICE_CODE_EXPIRED)) { prepStmt.setString(1, status); prepStmt.setString(2, deviceCode); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when setting expired status for device_code: " + deviceCode, e); } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when setting expired status for device_code: " + deviceCode, e); } } @Override public void setCallbackURI(String clientId, String callbackUri) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Setting callback_uri: " + callbackUri + " for client_id: " + clientId); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(true)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.SET_CALLBACK_URL)) { prepStmt.setString(1, callbackUri); prepStmt.setString(2, clientId); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when setting expired callBackUri for consumer_key: " + clientId, e); } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when setting expired callBackUri for consumer_key: " + clientId, e); } } @Override public String[] getScopesForUserCode(String userCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting scopes for user_code: " + userCode); } List<String> scopeSet = new ArrayList<>(); try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_SCOPES_FOR_USER_CODE)) { ResultSet resultSet; prepStmt.setString(1, userCode); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { scopeSet.add(resultSet.getString(1)); } } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting scopes for user_code: " + userCode, e); } return scopeSet.toArray(new String[scopeSet.size()]); } @Override public String[] getScopesForDeviceCode(String deviceCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting scopes for device_code: " + deviceCode); } List<String> scopeSet = new ArrayList<>(); try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_SCOPES_FOR_DEVICE_CODE)) { ResultSet resultSet; prepStmt.setString(1, deviceCode); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { scopeSet.add(resultSet.getString(1)); } } } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting scopes for device_code: " + deviceCode, e); } return scopeSet.toArray(new String[scopeSet.size()]); } /** * Store into device flow database. * * @param codeId Internal mapping UUID. * @param deviceCode Code that is used to identify the device. * @param userCode Code that is used to correlate user and device. * @param quantifier Quantized time period user_code belongs. * @param consumerKey Consumer key of the client application. * @param retryAttempt No. of times user_code uniqueness checks. * @return Unique user_code. * @throws IdentityOAuth2Exception Error while storing parameters. */ private String storeIntoDeviceFlow(String codeId, String deviceCode, String userCode, long quantifier, String consumerKey, Connection connection, int retryAttempt) throws IdentityOAuth2Exception, SQLException { if (retryAttempt < Constants.DEFAULT_DEVICE_TOKEN_PERSIST_RETRY_COUNT) { String tempUserCode; long currentQuantifier; long timeExpired; PreparedStatement prepStmt = null; ResultSet rs = null; try { if (isUserCodeAndQuantifierExists(userCode, quantifier, connection)) { tempUserCode = GenerateKeys.getKey(Constants.KEY_LENGTH); currentQuantifier = GenerateKeys.getCurrentQuantifier(); return storeIntoDeviceFlow(codeId, deviceCode, tempUserCode, currentQuantifier, consumerKey, connection, ++retryAttempt); } prepStmt = connection.prepareStatement( SQLQueries.DeviceFlowDAOSQLQueries.STORE_DEVICE_CODE_WITH_QUANTIFIER); Date date = new Date(); Timestamp timeCreated = new Timestamp(date.getTime()); timeExpired = timeCreated.getTime() + (DeviceFlowUtil.getConfiguredExpiryTime() * 1000); Timestamp expiredTime = new Timestamp(timeExpired); prepStmt.setString(1, codeId); prepStmt.setString(2, deviceCode); prepStmt.setString(3, userCode); prepStmt.setTimestamp(4, timeCreated, Calendar.getInstance(TimeZone .getTimeZone(Constants.UTC))); prepStmt.setTimestamp(5, timeCreated, Calendar.getInstance(TimeZone .getTimeZone(Constants.UTC))); prepStmt.setTimestamp(6, expiredTime, Calendar.getInstance(TimeZone .getTimeZone(Constants.UTC))); prepStmt.setLong(7, Constants.INTERVAL_MILLISECONDS); prepStmt.setString(8, Constants.PENDING); prepStmt.setLong(9, quantifier); prepStmt.setString(10, consumerKey); prepStmt.execute(); IdentityDatabaseUtil.commitTransaction(connection); } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); // Handle constrain violation issue in JDBC drivers which does not throw // SQLIntegrityConstraintViolationException. if (e instanceof SQLIntegrityConstraintViolationException || StringUtils.containsIgnoreCase(e.getMessage(), Constants.USERCODE_QUANTIFIER_CONSTRAINT)) { tempUserCode = GenerateKeys.getKey(Constants.KEY_LENGTH); currentQuantifier = GenerateKeys.getCurrentQuantifier(); return storeIntoDeviceFlow(codeId, deviceCode, tempUserCode, currentQuantifier, consumerKey, connection, ++retryAttempt); } throw new IdentityOAuth2Exception("Error when storing the device flow parameters for consumer_key: " + consumerKey, e); } finally { if (prepStmt != null && rs != null) { prepStmt.close(); rs.close(); } } return userCode; } throw new IdentityOAuth2Exception("user_code for consumer_key: " + consumerKey + " already exists."); } /** * Check the existence of userCode and quantifier. * * @param userCode Code that is used to correlate user and device. * @param quantifier Quantized time period user_code belongs. * @throws IdentityOAuth2Exception Error while comparing parameters. */ private boolean isUserCodeAndQuantifierExists(String userCode, long quantifier, Connection connection) throws IdentityOAuth2Exception { try { ResultSet resultSet; PreparedStatement prepStmt = connection.prepareStatement( SQLQueries.DeviceFlowDAOSQLQueries.CHECK_UNIQUE_USER_CODE_AND_QUANTIFIER); prepStmt.setString(1, userCode); prepStmt.setLong(2, quantifier); resultSet = prepStmt.executeQuery(); if (resultSet.next()) { return resultSet.getBoolean(1); } return false; } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when checking the existence for user_code: " + DigestUtils.sha256Hex(userCode) + " and quantifier: " + quantifier, e); } } /** * Store into device flow scopes database. * * @param codeId Internal mapping UUID. * @param deviceCode Code that is used to identify the device. * @param scope Scopes to be stored * @param connection Database connection. @throws IdentityOAuth2Exception Error while storing scopes. */ private void storeIntoScopes(String codeId, String deviceCode, String scope, Connection connection) throws IdentityOAuth2Exception { String[] scopeSet = OAuth2Util.buildScopeArray(scope); try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.STORE_DEVICE_FLOW_SCOPES)) { for (String scopes : scopeSet) { prepStmt.setString(1, codeId); prepStmt.setString(2, scopes); prepStmt.execute(); } } catch (SQLException e) { IdentityDatabaseUtil.rollbackTransaction(connection); throw new IdentityOAuth2Exception("Error when storing scopes for device_code: " + deviceCode, e); } } @Override public DeviceFlowDO getDetailsForUserCode(String userCode) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { log.debug("Getting authentication details for user_code: " + userCode); } try (Connection connection = IdentityDatabaseUtil.getDBConnection(false)) { ResultSet resultSet; DeviceFlowDO deviceFlowDO = null; try (PreparedStatement prepStmt = connection.prepareStatement(SQLQueries.DeviceFlowDAOSQLQueries.GET_AUTHENTICATION_DETAILS)) { prepStmt.setString(1, userCode); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { deviceFlowDO = new DeviceFlowDO(); deviceFlowDO.setStatus(resultSet.getString(1)); deviceFlowDO.setExpiryTime(resultSet.getTimestamp(2, Calendar.getInstance(TimeZone.getTimeZone(Constants.UTC)))); deviceFlowDO.setDeviceCode(resultSet.getString(3)); } } return deviceFlowDO; } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when getting authentication details for user_code(hashed): " + DigestUtils.sha256Hex(userCode), e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.auth; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.spy; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.lang.reflect.Field; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.function.Supplier; import org.apache.bookkeeper.client.BookKeeper; import org.apache.bookkeeper.client.EnsemblePlacementPolicy; import org.apache.bookkeeper.client.PulsarMockBookKeeper; import org.apache.bookkeeper.test.PortManager; import org.apache.bookkeeper.util.ZkUtils; import org.apache.pulsar.broker.BookKeeperClientFactory; import org.apache.pulsar.broker.NoOpShutdownService; import org.apache.pulsar.broker.PulsarService; import org.apache.pulsar.broker.ServiceConfiguration; import org.apache.pulsar.broker.namespace.NamespaceService; import org.apache.pulsar.client.admin.PulsarAdmin; import org.apache.pulsar.client.api.PulsarClient; import org.apache.pulsar.client.api.PulsarClientException; import org.apache.pulsar.compaction.Compactor; import org.apache.pulsar.zookeeper.ZooKeeperClientFactory; import org.apache.pulsar.zookeeper.ZookeeperClientFactoryImpl; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.MockZooKeeper; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.data.ACL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Base class for all tests that need a Pulsar instance without a ZK and BK cluster */ public abstract class MockedPulsarServiceBaseTest { protected ServiceConfiguration conf; protected PulsarService pulsar; protected PulsarAdmin admin; protected PulsarClient pulsarClient; protected URL brokerUrl; protected URL brokerUrlTls; protected URI lookupUrl; protected int BROKER_WEBSERVICE_PORT; protected int BROKER_WEBSERVICE_PORT_TLS; protected int BROKER_PORT; protected int BROKER_PORT_TLS; protected MockZooKeeper mockZookKeeper; protected NonClosableMockBookKeeper mockBookKeeper; protected boolean isTcpLookup = false; protected final String configClusterName = "test"; private SameThreadOrderedSafeExecutor sameThreadOrderedSafeExecutor; private ExecutorService bkExecutor; public MockedPulsarServiceBaseTest() { resetConfig(); } protected void resetConfig() { this.conf = new ServiceConfiguration(); this.conf.setAdvertisedAddress("localhost"); this.conf.setAdvertisedAddress("localhost"); this.conf.setClusterName(configClusterName); this.conf.setAdvertisedAddress("localhost"); // there are TLS tests in here, they need to use localhost because of the certificate this.conf.setManagedLedgerCacheSizeMB(8); this.conf.setActiveConsumerFailoverDelayTimeMillis(0); this.conf.setDefaultNumberOfNamespaceBundles(1); this.conf.setZookeeperServers("localhost:2181"); this.conf.setConfigurationStoreServers("localhost:3181"); this.conf.setAllowAutoTopicCreationType("non-persistent"); } protected final void internalSetup() throws Exception { init(); lookupUrl = new URI(brokerUrl.toString()); if (isTcpLookup) { lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT); } pulsarClient = newPulsarClient(lookupUrl.toString(), 0); } protected PulsarClient newPulsarClient(String url, int intervalInSecs) throws PulsarClientException { return PulsarClient.builder().serviceUrl(url).statsInterval(intervalInSecs, TimeUnit.SECONDS).build(); } protected final void internalSetupForStatsTest() throws Exception { init(); String lookupUrl = brokerUrl.toString(); if (isTcpLookup) { lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT).toString(); } pulsarClient = newPulsarClient(lookupUrl, 1); } protected final void init() throws Exception { BROKER_WEBSERVICE_PORT = PortManager.nextFreePort(); BROKER_WEBSERVICE_PORT_TLS = PortManager.nextFreePort(); BROKER_PORT = PortManager.nextFreePort(); BROKER_PORT_TLS = PortManager.nextFreePort(); this.conf.setBrokerServicePort(Optional.of(BROKER_PORT)); this.conf.setBrokerServicePortTls(Optional.of(BROKER_PORT_TLS)); this.conf.setAdvertisedAddress("localhost"); this.conf.setWebServicePort(Optional.of(BROKER_WEBSERVICE_PORT)); this.conf.setWebServicePortTls(Optional.of(BROKER_WEBSERVICE_PORT_TLS)); sameThreadOrderedSafeExecutor = new SameThreadOrderedSafeExecutor(); bkExecutor = Executors.newSingleThreadExecutor( new ThreadFactoryBuilder().setNameFormat("mock-pulsar-bk") .setUncaughtExceptionHandler((thread, ex) -> log.info("Uncaught exception", ex)) .build()); mockZookKeeper = createMockZooKeeper(); mockBookKeeper = createMockBookKeeper(mockZookKeeper, bkExecutor); startBroker(); brokerUrl = new URL("http://" + pulsar.getAdvertisedAddress() + ":" + BROKER_WEBSERVICE_PORT); brokerUrlTls = new URL("https://" + pulsar.getAdvertisedAddress() + ":" + BROKER_WEBSERVICE_PORT_TLS); admin = spy(PulsarAdmin.builder().serviceHttpUrl(brokerUrl.toString()).build()); } protected final void internalCleanup() throws Exception { try { // if init fails, some of these could be null, and if so would throw // an NPE in shutdown, obscuring the real error if (admin != null) { admin.close(); admin = null; } if (pulsarClient != null) { pulsarClient.close(); pulsarClient = null; } if (pulsar != null) { pulsar.close(); } if (mockBookKeeper != null) { mockBookKeeper.reallyShutdown(); } if (mockZookKeeper != null) { mockZookKeeper.shutdown(); } if (sameThreadOrderedSafeExecutor != null) { sameThreadOrderedSafeExecutor.shutdown(); } if (bkExecutor != null) { bkExecutor.shutdown(); } } catch (Exception e) { log.warn("Failed to clean up mocked pulsar service:", e); } } protected abstract void setup() throws Exception; protected abstract void cleanup() throws Exception; protected void restartBroker() throws Exception { stopBroker(); startBroker(); } protected void stopBroker() throws Exception { pulsar.close(); // Simulate cleanup of ephemeral nodes //mockZooKeeper.delete("/loadbalance/brokers/localhost:" + pulsar.getConfiguration().getWebServicePort(), -1); } protected void startBroker() throws Exception { this.pulsar = startBroker(conf); } protected PulsarService startBroker(ServiceConfiguration conf) throws Exception { PulsarService pulsar = spy(new PulsarService(conf)); pulsar.setShutdownService(new NoOpShutdownService()); setupBrokerMocks(pulsar); boolean isAuthorizationEnabled = conf.isAuthorizationEnabled(); // enable authorization to initialize authorization service which is used by grant-permission conf.setAuthorizationEnabled(true); pulsar.start(); conf.setAuthorizationEnabled(isAuthorizationEnabled); Compactor spiedCompactor = spy(pulsar.getCompactor()); doReturn(spiedCompactor).when(pulsar).getCompactor(); return pulsar; } protected void setupBrokerMocks(PulsarService pulsar) throws Exception { // Override default providers with mocked ones doReturn(mockZooKeeperClientFactory).when(pulsar).getZooKeeperClientFactory(); doReturn(mockBookKeeperClientFactory).when(pulsar).newBookKeeperClientFactory(); Supplier<NamespaceService> namespaceServiceSupplier = () -> spy(new NamespaceService(pulsar)); doReturn(namespaceServiceSupplier).when(pulsar).getNamespaceServiceProvider(); doReturn(sameThreadOrderedSafeExecutor).when(pulsar).getOrderedExecutor(); } public static MockZooKeeper createMockZooKeeper() throws Exception { MockZooKeeper zk = MockZooKeeper.newInstance(MoreExecutors.newDirectExecutorService()); List<ACL> dummyAclList = new ArrayList<>(0); ZkUtils.createFullPathOptimistic(zk, "/ledgers/available/192.168.1.1:" + 5000, "".getBytes(ZookeeperClientFactoryImpl.ENCODING_SCHEME), dummyAclList, CreateMode.PERSISTENT); zk.create("/ledgers/LAYOUT", "1\nflat:1".getBytes(ZookeeperClientFactoryImpl.ENCODING_SCHEME), dummyAclList, CreateMode.PERSISTENT); return zk; } public static NonClosableMockBookKeeper createMockBookKeeper(ZooKeeper zookeeper, ExecutorService executor) throws Exception { return spy(new NonClosableMockBookKeeper(zookeeper, executor)); } // Prevent the MockBookKeeper instance from being closed when the broker is restarted within a test public static class NonClosableMockBookKeeper extends PulsarMockBookKeeper { public NonClosableMockBookKeeper(ZooKeeper zk, ExecutorService executor) throws Exception { super(zk, executor); } @Override public void close() { // no-op } @Override public void shutdown() { // no-op } public void reallyShutdown() { super.shutdown(); } } protected ZooKeeperClientFactory mockZooKeeperClientFactory = new ZooKeeperClientFactory() { @Override public CompletableFuture<ZooKeeper> create(String serverList, SessionType sessionType, int zkSessionTimeoutMillis) { // Always return the same instance (so that we don't loose the mock ZK content on broker restart return CompletableFuture.completedFuture(mockZookKeeper); } }; private BookKeeperClientFactory mockBookKeeperClientFactory = new BookKeeperClientFactory() { @Override public BookKeeper create(ServiceConfiguration conf, ZooKeeper zkClient, Optional<Class<? extends EnsemblePlacementPolicy>> ensemblePlacementPolicyClass, Map<String, Object> properties) { // Always return the same instance (so that we don't loose the mock BK content on broker restart return mockBookKeeper; } @Override public void close() { // no-op } }; public static boolean retryStrategically(Predicate<Void> predicate, int retryCount, long intSleepTimeInMillis) throws Exception { for (int i = 0; i < retryCount; i++) { if (predicate.test(null) || i == (retryCount - 1)) { return true; } Thread.sleep(intSleepTimeInMillis + (intSleepTimeInMillis * i)); } return false; } public static void setFieldValue(Class clazz, Object classObj, String fieldName, Object fieldValue) throws Exception { Field field = clazz.getDeclaredField(fieldName); field.setAccessible(true); field.set(classObj, fieldValue); } private static final Logger log = LoggerFactory.getLogger(MockedPulsarServiceBaseTest.class); }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.network.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.network.fluent.OperationsClient; import com.azure.resourcemanager.network.fluent.models.OperationInner; import com.azure.resourcemanager.network.models.OperationListResult; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in OperationsClient. */ public final class OperationsClientImpl implements OperationsClient { private final ClientLogger logger = new ClientLogger(OperationsClientImpl.class); /** The proxy service used to perform REST calls. */ private final OperationsService service; /** The service client containing this operation class. */ private final NetworkManagementClientImpl client; /** * Initializes an instance of OperationsClientImpl. * * @param client the instance of the service client containing this operation class. */ OperationsClientImpl(NetworkManagementClientImpl client) { this.service = RestProxy.create(OperationsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for NetworkManagementClientOperations to be used by the proxy service to * perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "NetworkManagementCli") private interface OperationsService { @Headers({"Content-Type: application/json"}) @Get("/providers/Microsoft.Network/operations") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<OperationListResult>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<OperationListResult>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Lists all of the available Network Rest API operations. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<OperationInner>> listSinglePageAsync() { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String apiVersion = "2021-05-01"; final String accept = "application/json"; return FluxUtil .withContext(context -> service.list(this.client.getEndpoint(), apiVersion, accept, context)) .<PagedResponse<OperationInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Lists all of the available Network Rest API operations. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<OperationInner>> listSinglePageAsync(Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String apiVersion = "2021-05-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .list(this.client.getEndpoint(), apiVersion, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Lists all of the available Network Rest API operations. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<OperationInner> listAsync() { return new PagedFlux<>(() -> listSinglePageAsync(), nextLink -> listNextSinglePageAsync(nextLink)); } /** * Lists all of the available Network Rest API operations. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<OperationInner> listAsync(Context context) { return new PagedFlux<>( () -> listSinglePageAsync(context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * Lists all of the available Network Rest API operations. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<OperationInner> list() { return new PagedIterable<>(listAsync()); } /** * Lists all of the available Network Rest API operations. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<OperationInner> list(Context context) { return new PagedIterable<>(listAsync(context)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<OperationInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<OperationInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return result of the request to list Network operations along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<OperationInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
package io.dropwizard.server; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.jetty9.InstrumentedQueuedThreadPool; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import io.dropwizard.jetty.ConnectorFactory; import io.dropwizard.jetty.HttpConnectorFactory; import io.dropwizard.jetty.RoutingHandler; import io.dropwizard.setup.Environment; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.util.component.ContainerLifeCycle; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.util.thread.ThreadPool; import org.hibernate.validator.constraints.NotEmpty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.validation.Valid; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * The default implementation of {@link ServerFactory}, which allows for multiple sets of * application and admin connectors, all running on separate ports. Admin connectors use a separate * thread pool to keep the control and data planes separate(ish). * <p/> * <b>Configuration Parameters:</b> * <table> * <tr> * <td>Name</td> * <td>Default</td> * <td>Description</td> * </tr> * <tr> * <td>{@code applicationConnectors}</td> * <td>An {@link HttpConnectorFactory HTTP connector} listening on port 8080.</td> * <td>A set of {@link ConnectorFactory connectors} which will handle application requests.</td> * </tr> * <tr> * <td>{@code adminConnectors}</td> * <td>An {@link HttpConnectorFactory HTTP connector} listening on port 8081.</td> * <td>A set of {@link ConnectorFactory connectors} which will handle admin requests.</td> * </tr> * <tr> * <td>{@code adminMaxThreads}</td> * <td>64</td> * <td>The maximum number of threads to use for admin requests.</td> * </tr> * <tr> * <td>{@code adminMinThreads}</td> * <td>1</td> * <td>The minimum number of threads to use for admin requests.</td> * </tr> * </table> * <p/> * For more configuration parameters, see {@link AbstractServerFactory}. * * @see ServerFactory * @see AbstractServerFactory */ @JsonTypeName("default") public class DefaultServerFactory extends AbstractServerFactory { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultServerFactory.class); @Valid @NotNull private List<ConnectorFactory> applicationConnectors = Collections.singletonList(HttpConnectorFactory.application()); @Valid @NotNull private List<ConnectorFactory> adminConnectors = Collections.singletonList(HttpConnectorFactory.admin()); @Min(2) private int adminMaxThreads = 64; @Min(1) private int adminMinThreads = 1; @NotEmpty private String applicationContextPath = "/"; @NotEmpty private String adminContextPath = "/"; @JsonProperty public List<ConnectorFactory> getApplicationConnectors() { return applicationConnectors; } @JsonProperty public void setApplicationConnectors(List<ConnectorFactory> connectors) { this.applicationConnectors = connectors; } @JsonProperty public List<ConnectorFactory> getAdminConnectors() { return adminConnectors; } @JsonProperty public void setAdminConnectors(List<ConnectorFactory> connectors) { this.adminConnectors = connectors; } @JsonProperty public int getAdminMaxThreads() { return adminMaxThreads; } @JsonProperty public void setAdminMaxThreads(int adminMaxThreads) { this.adminMaxThreads = adminMaxThreads; } @JsonProperty public int getAdminMinThreads() { return adminMinThreads; } @JsonProperty public void setAdminMinThreads(int adminMinThreads) { this.adminMinThreads = adminMinThreads; } @JsonProperty public String getApplicationContextPath() { return applicationContextPath; } @JsonProperty public void setApplicationContextPath(final String applicationContextPath) { this.applicationContextPath = applicationContextPath; } @JsonProperty public String getAdminContextPath() { return adminContextPath; } @JsonProperty public void setAdminContextPath(final String adminContextPath) { this.adminContextPath = adminContextPath; } @Override public Server build(Environment environment) { printBanner(environment.getName()); final ThreadPool threadPool = createThreadPool(environment.metrics()); final Server server = buildServer(environment.lifecycle(), threadPool); final Handler applicationHandler = createAppServlet(server, environment.jersey(), environment.getObjectMapper(), environment.getValidator(), environment.getApplicationContext(), environment.getJerseyServletContainer(), environment.metrics()); final Handler adminHandler = createAdminServlet(server, environment.getAdminContext(), environment.metrics(), environment.healthChecks()); final RoutingHandler routingHandler = buildRoutingHandler(environment.metrics(), server, applicationHandler, adminHandler); final Handler gzipHandler = buildGzipHandler(routingHandler); server.setHandler(addStatsHandler(addRequestLog(server, gzipHandler, environment.getName()))); return server; } @Override public void configure(Environment environment) { LOGGER.info("Registering jersey handler with root path prefix: {}", applicationContextPath); environment.getApplicationContext().setContextPath(applicationContextPath); LOGGER.info("Registering admin handler with root path prefix: {}", adminContextPath); environment.getAdminContext().setContextPath(adminContextPath); } private RoutingHandler buildRoutingHandler(MetricRegistry metricRegistry, Server server, Handler applicationHandler, Handler adminHandler) { final List<Connector> appConnectors = buildAppConnectors(metricRegistry, server); final List<Connector> adConnectors = buildAdminConnectors(metricRegistry, server); final Map<Connector, Handler> handlers = new LinkedHashMap<>(); for (Connector connector : appConnectors) { server.addConnector(connector); handlers.put(connector, applicationHandler); } for (Connector connector : adConnectors) { server.addConnector(connector); handlers.put(connector, adminHandler); } return new RoutingHandler(handlers); } private List<Connector> buildAdminConnectors(MetricRegistry metricRegistry, Server server) { // threadpool is shared between all the connectors, so it should be managed by the server instead of the // individual connectors final QueuedThreadPool threadPool = new InstrumentedQueuedThreadPool(metricRegistry, adminMaxThreads, adminMinThreads); threadPool.setName("dw-admin"); server.addBean(threadPool); final List<Connector> connectors = new ArrayList<>(); for (ConnectorFactory factory : adminConnectors) { final Connector connector = factory.build(server, metricRegistry, "admin", threadPool); if (connector instanceof ContainerLifeCycle) { ((ContainerLifeCycle) connector).unmanage(threadPool); } connectors.add(connector); } return connectors; } private List<Connector> buildAppConnectors(MetricRegistry metricRegistry, Server server) { final List<Connector> connectors = new ArrayList<>(); for (ConnectorFactory factory : applicationConnectors) { connectors.add(factory.build(server, metricRegistry, "application", null)); } return connectors; } @Override public String toString() { return "DefaultServerFactory{" + "applicationConnectors=" + applicationConnectors + ", adminConnectors=" + adminConnectors + ", adminMaxThreads=" + adminMaxThreads + ", adminMinThreads=" + adminMinThreads + ", applicationContextPath='" + applicationContextPath + '\'' + ", adminContextPath='" + adminContextPath + '\'' + '}'; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.airlift.compress.zstd; import static io.airlift.compress.zstd.Constants.COMPRESSED_BLOCK; import static io.airlift.compress.zstd.Constants.COMPRESSED_LITERALS_BLOCK; import static io.airlift.compress.zstd.Constants.MAGIC_NUMBER; import static io.airlift.compress.zstd.Constants.MAX_BLOCK_SIZE; import static io.airlift.compress.zstd.Constants.MIN_BLOCK_SIZE; import static io.airlift.compress.zstd.Constants.MIN_WINDOW_LOG; import static io.airlift.compress.zstd.Constants.RAW_BLOCK; import static io.airlift.compress.zstd.Constants.RAW_LITERALS_BLOCK; import static io.airlift.compress.zstd.Constants.RLE_LITERALS_BLOCK; import static io.airlift.compress.zstd.Constants.SIZE_OF_BLOCK_HEADER; import static io.airlift.compress.zstd.Constants.SIZE_OF_INT; import static io.airlift.compress.zstd.Constants.SIZE_OF_SHORT; import static io.airlift.compress.zstd.Constants.TREELESS_LITERALS_BLOCK; import static io.airlift.compress.zstd.Huffman.MAX_SYMBOL; import static io.airlift.compress.zstd.Huffman.MAX_SYMBOL_COUNT; import static io.airlift.compress.zstd.Util.checkArgument; import static io.airlift.compress.zstd.Util.put24BitLittleEndian; import io.airlift.compress.UnsafeUtils; class ZstdFrameCompressor { static final int MAX_FRAME_HEADER_SIZE = 14; //private static final int CHECKSUM_FLAG = 0x4; private static final int SINGLE_SEGMENT_FLAG = 0x20; private static final int MINIMUM_LITERALS_SIZE = 63; // the maximum table log allowed for literal encoding per RFC 8478, section 4.2.1 private static final int MAX_HUFFMAN_TABLE_LOG = 11; private ZstdFrameCompressor() { } // visible for testing static int writeMagic(final byte[] outputBase, final long outputAddress, final long outputLimit) { checkArgument(outputLimit - outputAddress >= SIZE_OF_INT, "Output buffer too small"); UnsafeUtils.putInt(outputBase, outputAddress, MAGIC_NUMBER); return SIZE_OF_INT; } // visible for testing static int writeFrameHeader(final byte[] outputBase, final long outputAddress, final long outputLimit, int inputSize, int windowSize) { checkArgument(outputLimit - outputAddress >= MAX_FRAME_HEADER_SIZE, "Output buffer too small"); long output = outputAddress; int contentSizeDescriptor = (inputSize >= 256 ? 1 : 0) + (inputSize >= 65536 + 256 ? 1 : 0); //int frameHeaderDescriptor = (contentSizeDescriptor << 6) | CHECKSUM_FLAG; // dictionary ID missing int frameHeaderDescriptor = (contentSizeDescriptor << 6); // dictionary ID missing boolean singleSegment = windowSize >= inputSize; if (singleSegment) { frameHeaderDescriptor |= SINGLE_SEGMENT_FLAG; } UnsafeUtils.putByte(outputBase, output, (byte) frameHeaderDescriptor); output++; if (!singleSegment) { int base = Integer.highestOneBit(windowSize); int exponent = 32 - Integer.numberOfLeadingZeros(base) - 1; if (exponent < MIN_WINDOW_LOG) { throw new IllegalArgumentException("Minimum window size is " + (1 << MIN_WINDOW_LOG)); } int remainder = windowSize - base; if (remainder % (base / 8) != 0) { throw new IllegalArgumentException("Window size of magnitude 2^" + exponent + " must be multiple of " + (base / 8)); } // mantissa is guaranteed to be between 0-7 int mantissa = remainder / (base / 8); int encoded = ((exponent - MIN_WINDOW_LOG) << 3) | mantissa; UnsafeUtils.putByte(outputBase, output, (byte) encoded); output++; } switch (contentSizeDescriptor) { case 0: if (singleSegment) { UnsafeUtils.putByte(outputBase, output++, (byte) inputSize); } break; case 1: UnsafeUtils.putShort(outputBase, output, (inputSize - 256)); output += SIZE_OF_SHORT; break; case 2: UnsafeUtils.putInt(outputBase, output, inputSize); output += SIZE_OF_INT; break; default: throw new AssertionError(); } return (int) (output - outputAddress); } // visible for testing static int writeChecksum(byte[] outputBase, long outputAddress, long outputLimit, byte[] inputBase, long inputAddress, long inputLimit) { checkArgument(outputLimit - outputAddress >= SIZE_OF_INT, "Output buffer too small"); int inputSize = (int) (inputLimit - inputAddress); long hash = XxHash64.hash(0, inputBase, inputAddress, inputSize); UnsafeUtils.putInt(outputBase, outputAddress, (int) hash); return SIZE_OF_INT; } public static int compress(byte[] inputBase, long inputAddress, long inputLimit, byte[] outputBase, long outputAddress, long outputLimit, int compressionLevel) { int inputSize = (int) (inputLimit - inputAddress); CompressionParameters parameters = CompressionParameters.compute(compressionLevel, inputSize); long output = outputAddress; output += writeMagic(outputBase, output, outputLimit); output += writeFrameHeader(outputBase, output, outputLimit, inputSize, 1 << parameters.getWindowLog()); output += compressFrame(inputBase, inputAddress, inputLimit, outputBase, output, outputLimit, parameters); //output += writeChecksum(outputBase, output, outputLimit, inputBase, inputAddress, inputLimit); return (int) (output - outputAddress); } private static int compressFrame(byte[] inputBase, long inputAddress, long inputLimit, byte[] outputBase, long outputAddress, long outputLimit, CompressionParameters parameters) { int windowSize = 1 << parameters.getWindowLog(); // TODO: store window size in parameters directly? int blockSize = Math.min(MAX_BLOCK_SIZE, windowSize); int outputSize = (int) (outputLimit - outputAddress); int remaining = (int) (inputLimit - inputAddress); long output = outputAddress; long input = inputAddress; CompressionContext context = new CompressionContext(parameters, inputAddress, remaining); do { checkArgument(outputSize >= SIZE_OF_BLOCK_HEADER + MIN_BLOCK_SIZE, "Output buffer too small"); int lastBlockFlag = blockSize >= remaining ? 1 : 0; blockSize = Math.min(blockSize, remaining); int compressedSize = 0; if (remaining > 0) { compressedSize = compressBlock(inputBase, input, blockSize, outputBase, output + SIZE_OF_BLOCK_HEADER, outputSize - SIZE_OF_BLOCK_HEADER, context, parameters); } if (compressedSize == 0) { // block is not compressible checkArgument(blockSize + SIZE_OF_BLOCK_HEADER <= outputSize, "Output size too small"); int blockHeader = lastBlockFlag | (RAW_BLOCK << 1) | (blockSize << 3); put24BitLittleEndian(outputBase, output, blockHeader); UnsafeUtils.copyMemory(inputBase, input, outputBase, output + SIZE_OF_BLOCK_HEADER, blockSize); compressedSize = SIZE_OF_BLOCK_HEADER + blockSize; } else { int blockHeader = lastBlockFlag | (COMPRESSED_BLOCK << 1) | (compressedSize << 3); put24BitLittleEndian(outputBase, output, blockHeader); compressedSize += SIZE_OF_BLOCK_HEADER; } input += blockSize; remaining -= blockSize; output += compressedSize; outputSize -= compressedSize; } while (remaining > 0); return (int) (output - outputAddress); } private static int compressBlock(byte[] inputBase, long inputAddress, int inputSize, byte[] outputBase, long outputAddress, int outputSize, CompressionContext context, CompressionParameters parameters) { if (inputSize < MIN_BLOCK_SIZE + SIZE_OF_BLOCK_HEADER + 1) { // don't even attempt compression below a certain input size return 0; } context.blockCompressionState.enforceMaxDistance(inputAddress + inputSize, 1 << parameters.getWindowLog()); context.sequenceStore.reset(); int lastLiteralsSize = parameters.getStrategy() .getCompressor() .compressBlock(inputBase, inputAddress, inputSize, context.sequenceStore, context.blockCompressionState, context.offsets, parameters); long lastLiteralsAddress = inputAddress + inputSize - lastLiteralsSize; // append [lastLiteralsAddress .. lastLiteralsSize] to sequenceStore literals buffer context.sequenceStore.appendLiterals(inputBase, lastLiteralsAddress, lastLiteralsSize); // convert length/offsets into codes context.sequenceStore.generateCodes(); long outputLimit = outputAddress + outputSize; long output = outputAddress; int compressedLiteralsSize = encodeLiterals( context.huffmanContext, parameters, outputBase, output, (int) (outputLimit - output), context.sequenceStore.literalsBuffer, context.sequenceStore.literalsLength); output += compressedLiteralsSize; int compressedSequencesSize = SequenceEncoder.compressSequences(outputBase, output, (int) (outputLimit - output), context.sequenceStore, parameters.getStrategy(), context.sequenceEncodingContext); int compressedSize = compressedLiteralsSize + compressedSequencesSize; if (compressedSize == 0) { // not compressible return compressedSize; } // Check compressibility int maxCompressedSize = inputSize - calculateMinimumGain(inputSize, parameters.getStrategy()); if (compressedSize > maxCompressedSize) { return 0; // not compressed } // confirm repeated offsets and entropy tables context.commit(); return compressedSize; } private static int encodeLiterals( HuffmanCompressionContext context, CompressionParameters parameters, byte[] outputBase, long outputAddress, int outputSize, byte[] literals, int literalsSize) { // TODO: move this to Strategy boolean bypassCompression = (parameters.getStrategy() == CompressionParameters.Strategy.FAST) && (parameters.getTargetLength() > 0); if (bypassCompression || literalsSize <= MINIMUM_LITERALS_SIZE) { return rawLiterals(outputBase, outputAddress, outputSize, literals, 0, literalsSize); } int headerSize = 3 + (literalsSize >= 1024 ? 1 : 0) + (literalsSize >= 16384 ? 1 : 0); checkArgument(headerSize + 1 <= outputSize, "Output buffer too small"); int[] counts = new int[MAX_SYMBOL_COUNT]; // TODO: preallocate Histogram.count(literals, literalsSize, counts); int maxSymbol = Histogram.findMaxSymbol(counts, MAX_SYMBOL); int largestCount = Histogram.findLargestCount(counts, maxSymbol); long literalsAddress = 0; if (largestCount == literalsSize) { // all bytes in input are equal return rleLiterals(outputBase, outputAddress, outputSize, literals, 0, literalsSize); } else if (largestCount <= (literalsSize >>> 7) + 4) { // heuristic: probably not compressible enough return rawLiterals(outputBase, outputAddress, outputSize, literals, 0, literalsSize); } HuffmanCompressionTable previousTable = context.getPreviousTable(); HuffmanCompressionTable table; int serializedTableSize; boolean reuseTable; boolean canReuse = previousTable.isValid(counts, maxSymbol); // heuristic: use existing table for small inputs if valid // TODO: move to Strategy boolean preferReuse = parameters.getStrategy().ordinal() < CompressionParameters.Strategy.LAZY.ordinal() && literalsSize <= 1024; if (preferReuse && canReuse) { table = previousTable; reuseTable = true; serializedTableSize = 0; } else { HuffmanCompressionTable newTable = context.borrowTemporaryTable(); newTable.initialize( counts, maxSymbol, HuffmanCompressionTable.optimalNumberOfBits(MAX_HUFFMAN_TABLE_LOG, literalsSize, maxSymbol), context.getCompressionTableWorkspace()); serializedTableSize = newTable.write(outputBase, outputAddress + headerSize, outputSize - headerSize, context.getTableWriterWorkspace()); // Check if using previous huffman table is beneficial if (canReuse && previousTable.estimateCompressedSize(counts, maxSymbol) <= serializedTableSize + newTable.estimateCompressedSize(counts, maxSymbol)) { table = previousTable; reuseTable = true; serializedTableSize = 0; context.discardTemporaryTable(); } else { table = newTable; reuseTable = false; } } int compressedSize; boolean singleStream = literalsSize < 256; if (singleStream) { compressedSize = HuffmanCompressor.compressSingleStream(outputBase, outputAddress + headerSize + serializedTableSize, outputSize - headerSize - serializedTableSize, literals, literalsAddress, literalsSize, table); } else { compressedSize = HuffmanCompressor.compress4streams(outputBase, outputAddress + headerSize + serializedTableSize, outputSize - headerSize - serializedTableSize, literals, literalsAddress, literalsSize, table); } int totalSize = serializedTableSize + compressedSize; int minimumGain = calculateMinimumGain(literalsSize, parameters.getStrategy()); if (compressedSize == 0 || totalSize >= literalsSize - minimumGain) { // incompressible or no savings // discard any temporary table we might have borrowed above context.discardTemporaryTable(); return rawLiterals(outputBase, outputAddress, outputSize, literals, 0, literalsSize); } int encodingType = reuseTable ? TREELESS_LITERALS_BLOCK : COMPRESSED_LITERALS_BLOCK; // Build header switch (headerSize) { case 3: { // 2 - 2 - 10 - 10 int header = encodingType | ((singleStream ? 0 : 1) << 2) | (literalsSize << 4) | (totalSize << 14); put24BitLittleEndian(outputBase, outputAddress, header); break; } case 4: { // 2 - 2 - 14 - 14 int header = encodingType | (2 << 2) | (literalsSize << 4) | (totalSize << 18); UnsafeUtils.putInt(outputBase, outputAddress, header); break; } case 5: { // 2 - 2 - 18 - 18 int header = encodingType | (3 << 2) | (literalsSize << 4) | (totalSize << 22); UnsafeUtils.putInt(outputBase, outputAddress, header); UnsafeUtils.putByte(outputBase, outputAddress + SIZE_OF_INT, (byte) (totalSize >>> 10)); break; } default: // not possible : headerSize is {3,4,5} throw new IllegalStateException(); } return headerSize + totalSize; } private static int rleLiterals(byte[] outputBase, long outputAddress, int outputSize, byte[] inputBase, long inputAddress, int inputSize) { int headerSize = 1 + (inputSize > 31 ? 1 : 0) + (inputSize > 4095 ? 1 : 0); switch (headerSize) { case 1: // 2 - 1 - 5 UnsafeUtils.putByte(outputBase, outputAddress, (byte) (RLE_LITERALS_BLOCK | (inputSize << 3))); break; case 2: // 2 - 2 - 12 UnsafeUtils.putShort(outputBase, outputAddress, (RLE_LITERALS_BLOCK | (1 << 2) | (inputSize << 4))); break; case 3: // 2 - 2 - 20 UnsafeUtils.putInt(outputBase, outputAddress, RLE_LITERALS_BLOCK | 3 << 2 | inputSize << 4); break; default: // impossible. headerSize is {1,2,3} throw new IllegalStateException(); } UnsafeUtils.putByte(outputBase, outputAddress + headerSize, UnsafeUtils.getByte(inputBase, inputAddress)); return headerSize + 1; } private static int calculateMinimumGain(int inputSize, CompressionParameters.Strategy strategy) { // TODO: move this to Strategy to avoid hardcoding a specific strategy here int minLog = strategy == CompressionParameters.Strategy.BTULTRA ? 7 : 6; return (inputSize >>> minLog) + 2; } private static int rawLiterals(byte[] outputBase, long outputAddress, int outputSize, byte[] inputBase, long inputAddress, int inputSize) { int headerSize = 1; if (inputSize >= 32) { headerSize++; } if (inputSize >= 4096) { headerSize++; } checkArgument(inputSize + headerSize <= outputSize, "Output buffer too small"); switch (headerSize) { case 1: UnsafeUtils.putByte(outputBase, outputAddress, (byte) (RAW_LITERALS_BLOCK | (inputSize << 3))); break; case 2: UnsafeUtils.putShort(outputBase, outputAddress, (RAW_LITERALS_BLOCK | (1 << 2) | (inputSize << 4))); break; case 3: put24BitLittleEndian(outputBase, outputAddress, RAW_LITERALS_BLOCK | (3 << 2) | (inputSize << 4)); break; default: throw new AssertionError(); } // TODO: ensure this test is correct checkArgument(inputSize + 1 <= outputSize, "Output buffer too small"); UnsafeUtils.copyMemory(inputBase, inputAddress, outputBase, outputAddress + headerSize, inputSize); return headerSize + inputSize; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.Processor; import org.apache.camel.processor.Throttler; import org.apache.camel.spi.RouteContext; import org.apache.camel.util.concurrent.ExecutorServiceHelper; /** * Represents an XML &lt;throttle/&gt; element * * @version $Revision$ */ @XmlRootElement(name = "throttle") @XmlAccessorType(XmlAccessType.FIELD) public class ThrottleDefinition extends OutputDefinition<ThrottleDefinition> implements ExecutorServiceAwareDefinition<ThrottleDefinition> { @XmlTransient private ExecutorService executorService; @XmlAttribute private String executorServiceRef; @XmlAttribute private Long maximumRequestsPerPeriod; @XmlAttribute private Long timePeriodMillis; @XmlAttribute private Boolean asyncDelayed; @XmlAttribute private Boolean callerRunsWhenRejected; public ThrottleDefinition() { } public ThrottleDefinition(long maximumRequestsPerPeriod) { this.maximumRequestsPerPeriod = maximumRequestsPerPeriod; } @Override public String toString() { return "Throttle[" + getMaximumRequestsPerPeriod() + " request per " + getTimePeriodMillis() + " millis -> " + getOutputs() + "]"; } @Override public String getShortName() { return "throttle"; } @Override public String getLabel() { return "" + getMaximumRequestsPerPeriod() + " per " + getTimePeriodMillis() + " (ms)"; } @Override public Processor createProcessor(RouteContext routeContext) throws Exception { Processor childProcessor = this.createChildProcessor(routeContext, true); ScheduledExecutorService scheduled = null; if (getAsyncDelayed() != null && getAsyncDelayed()) { scheduled = ExecutorServiceHelper.getConfiguredScheduledExecutorService(routeContext, "Throttle", this); if (scheduled == null) { scheduled = routeContext.getCamelContext().getExecutorServiceStrategy().newScheduledThreadPool(this, "Throttle"); } } // should be default 1000 millis long period = getTimePeriodMillis() != null ? getTimePeriodMillis() : 1000L; Throttler answer = new Throttler(childProcessor, getMaximumRequestsPerPeriod(), period, scheduled); if (getAsyncDelayed() != null) { answer.setAsyncDelayed(getAsyncDelayed()); } if (getCallerRunsWhenRejected() == null) { // should be true by default answer.setCallerRunsWhenRejected(true); } else { answer.setCallerRunsWhenRejected(getCallerRunsWhenRejected()); } return answer; } // Fluent API // ------------------------------------------------------------------------- /** * Sets the time period during which the maximum request count is valid for * * @param timePeriodMillis period in millis * @return the builder */ public ThrottleDefinition timePeriodMillis(long timePeriodMillis) { setTimePeriodMillis(timePeriodMillis); return this; } /** * Sets the time period during which the maximum request count per period * * @param maximumRequestsPerPeriod the maximum request count number per time period * @return the builder */ public ThrottleDefinition maximumRequestsPerPeriod(Long maximumRequestsPerPeriod) { setMaximumRequestsPerPeriod(maximumRequestsPerPeriod); return this; } /** * Whether or not the caller should run the task when it was rejected by the thread pool. * <p/> * Is by default <tt>true</tt> * * @param callerRunsWhenRejected whether or not the caller should run * @return the builder */ public ThrottleDefinition callerRunsWhenRejected(boolean callerRunsWhenRejected) { setCallerRunsWhenRejected(callerRunsWhenRejected); return this; } /** * Enables asynchronous delay which means the thread will <b>noy</b> block while delaying. * * @return the builder */ public ThrottleDefinition asyncDelayed() { setAsyncDelayed(true); return this; } public ThrottleDefinition executorService(ExecutorService executorService) { setExecutorService(executorService); return this; } public ThrottleDefinition executorServiceRef(String executorServiceRef) { setExecutorServiceRef(executorServiceRef); return this; } // Properties // ------------------------------------------------------------------------- public Long getMaximumRequestsPerPeriod() { return maximumRequestsPerPeriod; } public void setMaximumRequestsPerPeriod(Long maximumRequestsPerPeriod) { this.maximumRequestsPerPeriod = maximumRequestsPerPeriod; } public Long getTimePeriodMillis() { return timePeriodMillis; } public void setTimePeriodMillis(Long timePeriodMillis) { this.timePeriodMillis = timePeriodMillis; } public Boolean getAsyncDelayed() { return asyncDelayed; } public void setAsyncDelayed(Boolean asyncDelayed) { this.asyncDelayed = asyncDelayed; } public Boolean getCallerRunsWhenRejected() { return callerRunsWhenRejected; } public void setCallerRunsWhenRejected(Boolean callerRunsWhenRejected) { this.callerRunsWhenRejected = callerRunsWhenRejected; } public ExecutorService getExecutorService() { return executorService; } public void setExecutorService(ExecutorService executorService) { this.executorService = executorService; } public String getExecutorServiceRef() { return executorServiceRef; } public void setExecutorServiceRef(String executorServiceRef) { this.executorServiceRef = executorServiceRef; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.controller.reporting; import org.apache.nifi.annotation.configuration.DefaultSchedule; import org.apache.nifi.bundle.BundleCoordinate; import org.apache.nifi.components.ConfigurableComponent; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.validation.ValidationStatus; import org.apache.nifi.components.validation.ValidationTrigger; import org.apache.nifi.controller.AbstractComponentNode; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.controller.ControllerServiceLookup; import org.apache.nifi.controller.LoggableComponent; import org.apache.nifi.controller.ProcessScheduler; import org.apache.nifi.controller.ReloadComponent; import org.apache.nifi.controller.ReportingTaskNode; import org.apache.nifi.controller.ScheduledState; import org.apache.nifi.controller.TerminationAwareLogger; import org.apache.nifi.controller.ValidationContextFactory; import org.apache.nifi.controller.service.ControllerServiceNode; import org.apache.nifi.controller.service.ControllerServiceProvider; import org.apache.nifi.controller.service.StandardConfigurationContext; import org.apache.nifi.nar.ExtensionManager; import org.apache.nifi.registry.ComponentVariableRegistry; import org.apache.nifi.reporting.ReportingTask; import org.apache.nifi.scheduling.SchedulingStrategy; import org.apache.nifi.util.CharacterFilterUtils; import org.apache.nifi.util.FormatUtils; import org.apache.nifi.util.file.classloader.ClassLoaderUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.annotation.AnnotationUtils; import java.net.URL; import java.util.Collection; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; public abstract class AbstractReportingTaskNode extends AbstractComponentNode implements ReportingTaskNode { private static final Logger LOG = LoggerFactory.getLogger(AbstractReportingTaskNode.class); private final AtomicReference<ReportingTaskDetails> reportingTaskRef; private final ProcessScheduler processScheduler; private final ControllerServiceLookup serviceLookup; private final AtomicReference<SchedulingStrategy> schedulingStrategy = new AtomicReference<>(SchedulingStrategy.TIMER_DRIVEN); private final AtomicReference<String> schedulingPeriod = new AtomicReference<>("5 mins"); private volatile String comment; private volatile ScheduledState scheduledState = ScheduledState.STOPPED; public AbstractReportingTaskNode(final LoggableComponent<ReportingTask> reportingTask, final String id, final ControllerServiceProvider controllerServiceProvider, final ProcessScheduler processScheduler, final ValidationContextFactory validationContextFactory, final ComponentVariableRegistry variableRegistry, final ReloadComponent reloadComponent, final ExtensionManager extensionManager, final ValidationTrigger validationTrigger) { this(reportingTask, id, controllerServiceProvider, processScheduler, validationContextFactory, reportingTask.getComponent().getClass().getSimpleName(), reportingTask.getComponent().getClass().getCanonicalName(), variableRegistry, reloadComponent, extensionManager, validationTrigger, false); } public AbstractReportingTaskNode(final LoggableComponent<ReportingTask> reportingTask, final String id, final ControllerServiceProvider controllerServiceProvider, final ProcessScheduler processScheduler, final ValidationContextFactory validationContextFactory, final String componentType, final String componentCanonicalClass, final ComponentVariableRegistry variableRegistry, final ReloadComponent reloadComponent, final ExtensionManager extensionManager, final ValidationTrigger validationTrigger, final boolean isExtensionMissing) { super(id, validationContextFactory, controllerServiceProvider, componentType, componentCanonicalClass, variableRegistry, reloadComponent, extensionManager, validationTrigger, isExtensionMissing); this.reportingTaskRef = new AtomicReference<>(new ReportingTaskDetails(reportingTask)); this.processScheduler = processScheduler; this.serviceLookup = controllerServiceProvider; final Class<?> reportingClass = reportingTask.getComponent().getClass(); DefaultSchedule dsc = AnnotationUtils.findAnnotation(reportingClass, DefaultSchedule.class); if(dsc != null) { try { this.setSchedulingStrategy(dsc.strategy()); } catch (Throwable ex) { LOG.error(String.format("Error while setting scheduling strategy from DefaultSchedule annotation: %s", ex.getMessage()), ex); } try { this.setSchedulingPeriod(dsc.period()); } catch (Throwable ex) { this.setSchedulingStrategy(SchedulingStrategy.TIMER_DRIVEN); LOG.error(String.format("Error while setting scheduling period from DefaultSchedule annotation: %s", ex.getMessage()), ex); } } } @Override public ConfigurableComponent getComponent() { return reportingTaskRef.get().getReportingTask(); } @Override public BundleCoordinate getBundleCoordinate() { return reportingTaskRef.get().getBundleCoordinate(); } @Override public TerminationAwareLogger getLogger() { return reportingTaskRef.get().getComponentLog(); } @Override public void setSchedulingStrategy(final SchedulingStrategy schedulingStrategy) { this.schedulingStrategy.set(schedulingStrategy); } @Override public SchedulingStrategy getSchedulingStrategy() { return schedulingStrategy.get(); } @Override public String getSchedulingPeriod() { return schedulingPeriod.get(); } @Override public long getSchedulingPeriod(final TimeUnit timeUnit) { return FormatUtils.getTimeDuration(schedulingPeriod.get(), timeUnit); } @Override public void setSchedulingPeriod(final String schedulingPeriod) { this.schedulingPeriod.set(schedulingPeriod); } @Override public ReportingTask getReportingTask() { return reportingTaskRef.get().getReportingTask(); } @Override public void setReportingTask(final LoggableComponent<ReportingTask> reportingTask) { if (isRunning()) { throw new IllegalStateException("Cannot modify Reporting Task configuration while Reporting Task is running"); } this.reportingTaskRef.set(new ReportingTaskDetails(reportingTask)); } @Override public void reload(final Set<URL> additionalUrls) throws ReportingTaskInstantiationException { if (isRunning()) { throw new IllegalStateException("Cannot reload Reporting Task while Reporting Task is running"); } String additionalResourcesFingerprint = ClassLoaderUtils.generateAdditionalUrlsFingerprint(additionalUrls); setAdditionalResourcesFingerprint(additionalResourcesFingerprint); getReloadComponent().reload(this, getCanonicalClassName(), getBundleCoordinate(), additionalUrls); } @Override public boolean isRunning() { return processScheduler.isScheduled(this) || processScheduler.getActiveThreadCount(this) > 0; } @Override public boolean isValidationNecessary() { return !processScheduler.isScheduled(this) || getValidationStatus() != ValidationStatus.VALID; } @Override public int getActiveThreadCount() { return processScheduler.getActiveThreadCount(this); } @Override public ConfigurationContext getConfigurationContext() { return new StandardConfigurationContext(this, serviceLookup, getSchedulingPeriod(), getVariableRegistry()); } @Override public void verifyModifiable() throws IllegalStateException { if (isRunning()) { throw new IllegalStateException("Cannot modify Reporting Task while the Reporting Task is running"); } } @Override public ScheduledState getScheduledState() { return scheduledState; } @Override public void setScheduledState(final ScheduledState state) { this.scheduledState = state; } public boolean isDisabled() { return scheduledState == ScheduledState.DISABLED; } @Override public String getComments() { return comment; } @Override public void setComments(final String comment) { this.comment = CharacterFilterUtils.filterInvalidXmlCharacters(comment); } @Override public void verifyCanDelete() { if (isRunning()) { throw new IllegalStateException("Cannot delete " + getReportingTask().getIdentifier() + " because it is currently running"); } } @Override public void verifyCanDisable() { if (isRunning()) { throw new IllegalStateException("Cannot disable " + getReportingTask().getIdentifier() + " because it is currently running"); } if (isDisabled()) { throw new IllegalStateException("Cannot disable " + getReportingTask().getIdentifier() + " because it is already disabled"); } } @Override public void verifyCanEnable() { if (!isDisabled()) { throw new IllegalStateException("Cannot enable " + getReportingTask().getIdentifier() + " because it is not disabled"); } } @Override public void verifyCanStart() { if (isDisabled()) { throw new IllegalStateException("Cannot start " + getReportingTask().getIdentifier() + " because it is currently disabled"); } if (isRunning()) { throw new IllegalStateException("Cannot start " + getReportingTask().getIdentifier() + " because it is already running"); } } @Override public void verifyCanStop() { if (!isRunning()) { throw new IllegalStateException("Cannot stop " + getReportingTask().getIdentifier() + " because it is not running"); } } @Override public void verifyCanUpdate() { if (isRunning()) { throw new IllegalStateException("Cannot update " + getReportingTask().getIdentifier() + " because it is currently running"); } } @Override public void verifyCanClearState() { verifyCanUpdate(); } @Override public void verifyCanStart(final Set<ControllerServiceNode> ignoredReferences) { switch (getScheduledState()) { case DISABLED: throw new IllegalStateException(this.getIdentifier() + " cannot be started because it is disabled"); case RUNNING: throw new IllegalStateException(this.getIdentifier() + " cannot be started because it is already running"); case STOPPED: break; } final int activeThreadCount = getActiveThreadCount(); if (activeThreadCount > 0) { throw new IllegalStateException(this.getIdentifier() + " cannot be started because it has " + activeThreadCount + " active threads already"); } final Collection<ValidationResult> validationResults = getValidationErrors(ignoredReferences); if (!validationResults.isEmpty()) { throw new IllegalStateException(this.getIdentifier() + " cannot be started because it is not currently valid"); } } @Override public String toString() { return "ReportingTask[id=" + getIdentifier() + "]"; } @Override public String getProcessGroupIdentifier() { return null; } }
package com.mopub.mobileads.util.vast; import android.content.Context; import android.view.Display; import android.view.WindowManager; import com.mopub.common.CacheService; import com.mopub.common.util.AsyncTasks; import com.mopub.mobileads.VastVideoDownloadTask; import java.util.*; import static com.mopub.mobileads.VastVideoDownloadTask.VastVideoDownloadTaskListener; import static com.mopub.mobileads.util.vast.VastXmlManagerAggregator.VastXmlManagerAggregatorListener; public class VastManager implements VastXmlManagerAggregatorListener { public interface VastManagerListener { public void onVastVideoConfigurationPrepared(final VastVideoConfiguration vastVideoConfiguration); } private static final double ASPECT_RATIO_WEIGHT = 40; private static final double AREA_WEIGHT = 60; private static final List<String> VIDEO_MIME_TYPES = Arrays.asList("video/mp4", "video/3gpp"); private static final List<String> COMPANION_IMAGE_MIME_TYPES = Arrays.asList("image/jpeg", "image/png", "image/bmp", "image/gif"); private VastManagerListener mVastManagerListener; private VastXmlManagerAggregator mVastXmlManagerAggregator; private double mScreenAspectRatio; private int mScreenArea; public VastManager(final Context context) { initializeScreenDimensions(context); } public void prepareVastVideoConfiguration(final String vastXml, final VastManagerListener vastManagerListener) { if (mVastXmlManagerAggregator == null) { mVastManagerListener = vastManagerListener; mVastXmlManagerAggregator = new VastXmlManagerAggregator(this); AsyncTasks.safeExecuteOnExecutor(mVastXmlManagerAggregator, vastXml); } } public void cancel() { if (mVastXmlManagerAggregator != null) { mVastXmlManagerAggregator.cancel(true); mVastXmlManagerAggregator = null; } } @Override public void onAggregationComplete(final List<VastXmlManager> vastXmlManagers) { mVastXmlManagerAggregator = null; if (vastXmlManagers == null) { if (mVastManagerListener != null) { mVastManagerListener.onVastVideoConfigurationPrepared(null); } return; } final VastVideoConfiguration vastVideoConfiguration = createVastVideoConfigurationFromXml(vastXmlManagers); if (updateDiskMediaFileUrl(vastVideoConfiguration)) { if (mVastManagerListener != null) { mVastManagerListener.onVastVideoConfigurationPrepared(vastVideoConfiguration); } return; } final VastVideoDownloadTask vastVideoDownloadTask = new VastVideoDownloadTask( new VastVideoDownloadTaskListener() { @Override public void onComplete(boolean success) { if (success && updateDiskMediaFileUrl(vastVideoConfiguration)) { if (mVastManagerListener != null) { mVastManagerListener.onVastVideoConfigurationPrepared(vastVideoConfiguration); } } else { if (mVastManagerListener != null) { mVastManagerListener.onVastVideoConfigurationPrepared(null); } } } } ); AsyncTasks.safeExecuteOnExecutor( vastVideoDownloadTask, vastVideoConfiguration.getNetworkMediaFileUrl() ); } private boolean updateDiskMediaFileUrl(final VastVideoConfiguration vastVideoConfiguration) { final String networkMediaFileUrl = vastVideoConfiguration.getNetworkMediaFileUrl(); if (CacheService.containsKeyDiskCache(networkMediaFileUrl)) { final String filePathDiskCache = CacheService.getFilePathDiskCache(networkMediaFileUrl); vastVideoConfiguration.setDiskMediaFileUrl(filePathDiskCache); return true; } return false; } private void initializeScreenDimensions(final Context context) { // This currently assumes that all vast videos will be played in landscape final Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); int x = display.getWidth(); int y = display.getHeight(); // For landscape, width is always greater than height int screenWidth = Math.max(x, y); int screenHeight = Math.min(x, y); mScreenAspectRatio = (double) screenWidth / screenHeight; mScreenArea = screenWidth * screenHeight; } private VastVideoConfiguration createVastVideoConfigurationFromXml(final List<VastXmlManager> xmlManagers) { final VastVideoConfiguration vastVideoConfiguration = new VastVideoConfiguration(); final List<VastXmlManager.MediaXmlManager> mediaXmlManagers = new ArrayList<VastXmlManager.MediaXmlManager>(); final List<VastXmlManager.ImageCompanionAdXmlManager> companionXmlManagers = new ArrayList<VastXmlManager.ImageCompanionAdXmlManager>(); for (VastXmlManager xmlManager : xmlManagers) { vastVideoConfiguration.addImpressionTrackers(xmlManager.getImpressionTrackers()); vastVideoConfiguration.addStartTrackers(xmlManager.getVideoStartTrackers()); vastVideoConfiguration.addFirstQuartileTrackers(xmlManager.getVideoFirstQuartileTrackers()); vastVideoConfiguration.addMidpointTrackers(xmlManager.getVideoMidpointTrackers()); vastVideoConfiguration.addThirdQuartileTrackers(xmlManager.getVideoThirdQuartileTrackers()); vastVideoConfiguration.addCompleteTrackers(xmlManager.getVideoCompleteTrackers()); vastVideoConfiguration.addClickTrackers(xmlManager.getClickTrackers()); if (vastVideoConfiguration.getClickThroughUrl() == null) { vastVideoConfiguration.setClickThroughUrl(xmlManager.getClickThroughUrl()); } mediaXmlManagers.addAll(xmlManager.getMediaXmlManagers()); companionXmlManagers.addAll(xmlManager.getCompanionAdXmlManagers()); } vastVideoConfiguration.setNetworkMediaFileUrl(getBestMediaFileUrl(mediaXmlManagers)); vastVideoConfiguration.setVastCompanionAd(getBestCompanionAd(companionXmlManagers)); return vastVideoConfiguration; } String getBestMediaFileUrl(final List<VastXmlManager.MediaXmlManager> managers) { final List<VastXmlManager.MediaXmlManager> mediaXmlManagers = new ArrayList<VastXmlManager.MediaXmlManager>(managers); double bestMediaFitness = Double.POSITIVE_INFINITY; String bestMediaFileUrl = null; final Iterator<VastXmlManager.MediaXmlManager> xmlManagerIterator = mediaXmlManagers.iterator(); while (xmlManagerIterator.hasNext()) { final VastXmlManager.MediaXmlManager mediaXmlManager = xmlManagerIterator.next(); final String mediaType = mediaXmlManager.getType(); final String mediaUrl = mediaXmlManager.getMediaUrl(); if (!VIDEO_MIME_TYPES.contains(mediaType) || mediaUrl == null) { xmlManagerIterator.remove(); continue; } final Integer mediaWidth = mediaXmlManager.getWidth(); final Integer mediaHeight = mediaXmlManager.getHeight(); if (mediaWidth == null || mediaWidth <= 0 || mediaHeight == null || mediaHeight <= 0) { continue; } final double mediaFitness = calculateFitness(mediaWidth, mediaHeight); if (mediaFitness < bestMediaFitness) { bestMediaFitness = mediaFitness; bestMediaFileUrl = mediaUrl; } } if (bestMediaFileUrl == null && !mediaXmlManagers.isEmpty()) { bestMediaFileUrl = mediaXmlManagers.get(0).getMediaUrl(); } return bestMediaFileUrl; } VastCompanionAd getBestCompanionAd(final List<VastXmlManager.ImageCompanionAdXmlManager> managers) { final List<VastXmlManager.ImageCompanionAdXmlManager> companionXmlManagers = new ArrayList<VastXmlManager.ImageCompanionAdXmlManager>(managers); double bestCompanionFitness = Double.POSITIVE_INFINITY; VastXmlManager.ImageCompanionAdXmlManager bestCompanionXmlManager = null; final Iterator<VastXmlManager.ImageCompanionAdXmlManager> xmlManagerIterator = companionXmlManagers.iterator(); while (xmlManagerIterator.hasNext()) { final VastXmlManager.ImageCompanionAdXmlManager companionXmlManager = xmlManagerIterator.next(); final String imageType = companionXmlManager.getType(); final String imageUrl = companionXmlManager.getImageUrl(); if (!COMPANION_IMAGE_MIME_TYPES.contains(imageType) || imageUrl == null) { xmlManagerIterator.remove(); continue; } final Integer imageWidth = companionXmlManager.getWidth(); final Integer imageHeight = companionXmlManager.getHeight(); if (imageWidth == null || imageWidth <= 0 || imageHeight == null || imageHeight <= 0) { continue; } final double companionFitness = calculateFitness(imageWidth, imageHeight); if (companionFitness < bestCompanionFitness) { bestCompanionFitness = companionFitness; bestCompanionXmlManager = companionXmlManager; } } if (bestCompanionXmlManager == null && !companionXmlManagers.isEmpty()) { bestCompanionXmlManager = companionXmlManagers.get(0); } if (bestCompanionXmlManager != null) { return new VastCompanionAd( bestCompanionXmlManager.getWidth(), bestCompanionXmlManager.getHeight(), bestCompanionXmlManager.getImageUrl(), bestCompanionXmlManager.getClickThroughUrl(), new ArrayList<String>(bestCompanionXmlManager.getClickTrackers()) ); } return null; } private double calculateFitness(final int width, final int height) { final double mediaAspectRatio = (double) width / height; final int mediaArea = width * height; final double aspectRatioRatio = mediaAspectRatio / mScreenAspectRatio; final double areaRatio = (double) mediaArea / mScreenArea; return ASPECT_RATIO_WEIGHT * Math.abs(Math.log(aspectRatioRatio)) + AREA_WEIGHT * Math.abs(Math.log(areaRatio)); } @Deprecated // for testing int getScreenArea() { return mScreenArea; } @Deprecated // for testing double getScreenAspectRatio() { return mScreenAspectRatio; } }
/* * Created on 21.12.2004 * * TODO To change the template for this generated file go to * Window - Preferences - Java - Code Style - Code Templates */ package org.dcm4chex.archive.web.maverick.mcmc.model; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.servlet.http.HttpServletRequest; import org.dcm4chex.archive.ejb.interfaces.MediaComposer; import org.dcm4chex.archive.ejb.interfaces.MediaComposerHome; import org.dcm4chex.archive.ejb.interfaces.MediaDTO; import org.dcm4chex.archive.util.EJBHomeFactory; import org.dcm4chex.archive.web.maverick.mcmc.MCMConsoleCtrl; /** * @author franz.willer * * The Model for Media Creation Managment WEB interface. */ public class MCMModel { /** The session attribute name to store the model in http session. */ public static final String MCMMODEL_ATTR_NAME = "mcmModel"; /** * Status for action 'queue' * <p> * This value is used in xsl to create queue action button for a media with this status. */ public static final String STATI_FOR_QUEUE = String.valueOf( MediaDTO.OPEN ); /** Errorcode: no error */ public static final String NO_ERROR ="OK"; /** Errorcode: unsupported action */ public static final String ERROR_UNSUPPORTED_ACTION = "UNSUPPORTED_ACTION"; public static String ERROR_MEDIA_DELETE = "MEDIA_DELETE_FAILED"; /** holds current error code. */ private String errorCode = NO_ERROR; /** Popup message */ private String popupMsg = null; /** Holds the current offset for paging */ private int offset = 0; /** Holds the limit for paging */ private int limit = 10; /** Holds the total number of results of last search. */ private int total = 0; /** Holds the current list of media for the view. */ private MediaList mediaList; /** Holds the filter for media search. */ private MCMFilter filter; /** Holds availability status of MCM SCP */ private boolean mcmNotAvail = false; /** * Holds the 'checked' flag for 'checkMCM' checkbox. * <p> * checkMCM request parameter is only present if mcmNotAvail is true. * <p> * The availability check of mcm is done either before 'queue' action or * if checkMCM request parameter is true. */ private boolean checkAvail = false; private final boolean admin; private boolean mcmUser = false; /** * Creates the model. * <p> * Perform an initial media search with the default filter. <br> * (search for all media with status COLLECTING) * <p> * performs an initial availability check for MCM_SCP service. */ private MCMModel(boolean admin) { this.admin = admin; getFilter(); filterMediaList( true ); mcmNotAvail = ! MCMConsoleCtrl.getMcmScuDelegate().checkMcmScpAvail(); } /** * Get the model for an http request. * <p> * Look in the session for an associated model via <code>MCMMODEL_ATTR_NAME</code><br> * If there is no model stored in session (first request) a new model is created and stored in session. * * @param request A http request. * * @return The model for given request. */ public static final MCMModel getModel( HttpServletRequest request ) { MCMModel model = (MCMModel) request.getSession().getAttribute(MCMMODEL_ATTR_NAME); if (model == null) { model = new MCMModel(request.isUserInRole("WebAdmin")); model.mcmUser = request.isUserInRole("McmUser"); request.getSession().setAttribute(MCMMODEL_ATTR_NAME, model); model.setErrorCode( NO_ERROR ); //reset error code } return model; } /** * @return Returns true if the user have WebAdmin role. */ public boolean isAdmin() { return admin; } /** * @return Returns the mcmUser. */ public boolean isMcmUser() { return mcmUser; } /** * Set the error code of this model. * * @param errorCode The error code */ public void setErrorCode(String errorCode) { this.errorCode = errorCode; } /** * Get current error code of this model. * * @return error code. */ public String getErrorCode() { return errorCode; } /** * @return Returns the popupMsg. */ public String getPopupMsg() { return popupMsg; } /** * @param popupMsg The popupMsg to set. */ public void setPopupMsg(String popupMsg) { this.popupMsg = popupMsg; } /** * Returns the status for 'queue' action. * <p> * This value is used used in the view to create a 'queue' action button/link * for media with this status. * * @return The status for queue action */ public String getStatiForQueue() { return STATI_FOR_QUEUE; } /** * Returns current list of media. * * @return List of media. */ public List getMediaList() { return mediaList; } /** * Returns the Filter that is used to search media. * * @return current filter. */ public MCMFilter getFilter() { if ( filter == null ) filter = new MCMFilter(); return filter; } /** * Performa a media search with current filter settings. * <p> * If <code>newSearch is true</code> the <code>offset</code> is set to <code>0</code> (get first result page). * <p> * The result of the search is stored in <code>mediaList</code> and <code>total</code> is updated * with the total number of results for this search. * * @param newSearch */ public void filterMediaList( boolean newSearch ) { if ( newSearch ) offset = 0; try { Collection col = new ArrayList(); Long start = null; Long end = null; int[] stati = null; if ( filter.selectedStati() != null ) { //not all stati = filter.selectedStati(); } if ( ! mcmNotAvail && ( stati == null || filter.getSelectedStatiAsString().indexOf( String.valueOf( MediaDTO.BURNING ) ) != -1 ) ) { //perform get media creation status if filter contains PROCESSING media status. MCMConsoleCtrl.getMcmScuDelegate().updateMediaStatus(); } if ( MCMFilter.DATE_FILTER_ALL.equals( filter.getCreateOrUpdateDate() ) ) { total = lookupMediaComposer().findByCreatedTime( col, start, end, stati, new Integer( offset ), new Integer( limit ), filter.isDescent() ); } else { start = filter.startDateAsLong(); end = filter.endDateAsLong(); if ( MCMFilter.CREATED_FILTER.equals( filter.getCreateOrUpdateDate() ) ) { total = lookupMediaComposer().findByCreatedTime( col, start, end, stati, new Integer( offset ), new Integer( limit ), filter.isDescent() ); } else if ( MCMFilter.UPDATED_FILTER.equals( filter.getCreateOrUpdateDate() ) ) { total = lookupMediaComposer().findByUpdatedTime( col, start, end, stati, new Integer( offset ), new Integer( limit ), filter.isDescent() ); } } mediaList = new MediaList( col ); col.clear(); } catch ( Exception x ) { //TODO x.printStackTrace(); mediaList = new MediaList(); } } /** * Returns the MediaComposer bean. * * @return The MediaComposer bean. * @throws Exception */ protected MediaComposer lookupMediaComposer() throws Exception { MediaComposerHome home = (MediaComposerHome) EJBHomeFactory.getFactory().lookup( MediaComposerHome.class, MediaComposerHome.JNDI_NAME); return home.create(); } /** * Returns current page limit. * * @return Returns the limit. */ public int getLimit() { return limit; } /** * Set current page limit. * * @param limit The limit to set. */ public void setLimit(int limit) { this.limit = limit; } /** * Return current offset (page number; starts with 0). * * @return Returns the offset. */ public int getOffset() { return offset; } /** * Set current page offset * @param offset The offset to set. */ public void setOffset(int offset) { this.offset = offset; } /** * Return the total number of results of the last search. * * @return Returns the total. */ public int getTotal() { return total; } /** * @return Returns the mcmNotAvail. */ public boolean isMcmNotAvail() { return mcmNotAvail; } /** * @param mcmNotAvail The mcmNotAvail to set. */ public void setMcmNotAvail(boolean mcmNotAvail) { this.mcmNotAvail = mcmNotAvail; } /** * @return Returns the checkAvail. */ public boolean isCheckAvail() { return checkAvail; } /** * @param checkAvail The checkAvail to set. */ public void setCheckAvail(boolean checkAvail) { this.checkAvail = checkAvail; } /** * Goto previous page. */ public void performPrevious() { if ( offset - limit >= 0 ) { offset -= limit; filterMediaList( false ); } } /** * Goto next page. * */ public void performNext() { if ( offset + limit < total ) { offset += limit; filterMediaList( false ); } } /** * Update the mediaStatus of media with given pk. * <p> * This method updates the status with information in current model and in MediaLocal bean. * * @param mediaPk The pk of the media to change. * @param status The new media staus. * @param statusInfo Info text for the new status. */ public void updateMediaStatus(int mediaPk, int status, String statusInfo) { MediaData md = mediaDataFromList( mediaPk ); if ( md != null ) { md.setMediaStatus( status ); md.setMediaStatusInfo( statusInfo ); try { this.lookupMediaComposer().setMediaStatus( mediaPk, status, statusInfo ); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } /** * Returns the MediaData object for given media pk. * * @param mediaPk PK of a media. * * @return The MediaData object. */ public MediaData mediaDataFromList( int mediaPk ) { int pos = getMediaList().indexOf( new MediaData( mediaPk) ); if ( pos != -1 ) { return (MediaData) this.mediaList.get( pos ); } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.tests.e2e.hive; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileOutputStream; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Map; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.sentry.binding.hive.conf.HiveAuthzConf; import org.apache.sentry.binding.hive.v2.HiveAuthzBindingSessionHookV2; import org.apache.sentry.binding.hive.v2.SentryAuthorizerFactory; import org.apache.sentry.provider.file.PolicyFile; import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.base.Charsets; import com.google.common.collect.Maps; public class TestServerConfiguration extends AbstractTestWithHiveServer { private static Context context; private static Map<String, String> properties; private PolicyFile policyFile; @BeforeClass public static void setup() throws Exception { properties = Maps.newHashMap(); context = createContext(properties); } @AfterClass public static void tearDown() throws Exception { if(context != null) { context.close(); } } @Before public void setupPolicyFile() throws Exception { policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); } /** * hive.server2.enable.impersonation must be disabled */ @Test public void testImpersonationIsDisabled() throws Exception { Map<String, String> properties = Maps.newHashMap(); properties.put(HiveServerFactory.ACCESS_TESTING_MODE, "false"); properties.put("hive.server2.enable.impersonation", "true"); verifyInvalidConfigurationException(properties); } /** * hive.server2.authentication must be set to LDAP or KERBEROS */ @Test public void testAuthenticationIsStrong() throws Exception { Map<String, String> properties = Maps.newHashMap(); properties.put(HiveServerFactory.ACCESS_TESTING_MODE, "false"); properties.put("hive.server2.authentication", "NONE"); verifyInvalidConfigurationException(properties); } private void verifyInvalidConfigurationException(Map<String, String> properties) throws Exception{ Context context = createContext(properties); policyFile .setUserGroupMapping(StaticUserGroup.getStaticMapping()) .write(context.getPolicyFile()); Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); try { statement.execute("create table test (a string)"); Assert.fail("Expected SQLException"); } catch (SQLException e) { context.verifyInvalidConfigurationException(e); } finally { if (context != null) { context.close(); } } } /** * Test removal of policy file */ @Test public void testRemovalOfPolicyFile() throws Exception { Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); try { statement.execute("DROP TABLE IF EXISTS test CASCADE"); statement.execute("create table test (a string)"); Assert.fail("Expected SQLException"); } catch (SQLException e) { context.verifyAuthzException(e); } } /** * Test corruption of policy file */ @Test public void testCorruptionOfPolicyFile() throws Exception { File policyFile = context.getPolicyFile(); FileOutputStream out = new FileOutputStream(policyFile); out.write("this is not valid".getBytes(Charsets.UTF_8)); out.close(); Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); try { statement.execute("DROP TABLE IF EXISTS test CASCADE"); statement.execute("create table test (a string)"); Assert.fail("Expected SQLException"); } catch (SQLException e) { context.verifyAuthzException(e); } } @Test public void testAddDeleteDFSRestriction() throws Exception { policyFile .addRolesToGroup(USERGROUP1, "all_db1") .addRolesToGroup(USERGROUP2, "select_tb1") .addPermissionsToRole("select_tb1", "server=server1->db=db_1->table=tbl_1->action=select") .addPermissionsToRole("all_db1", "server=server1->db=db_1") .setUserGroupMapping(StaticUserGroup.getStaticMapping()) .write(context.getPolicyFile()); Connection connection = context.createConnection(USER1_1); Statement statement = context.createStatement(connection); // disallow external executables. The external.exec is set to false by session hooks context.assertAuthzException(statement, "ADD JAR /usr/lib/hive/lib/hbase.jar"); context.assertAuthzException(statement, "ADD FILE /tmp/tt.py"); context.assertAuthzException(statement, "DFS -ls"); context.assertAuthzException(statement, "DELETE JAR /usr/lib/hive/lib/hbase.jar"); context.assertAuthzException(statement, "DELETE FILE /tmp/tt.py"); statement.close(); connection.close(); } /** * Test that the required access configs are set by session hook */ @Test public void testAccessConfigRestrictions() throws Exception { policyFile .setUserGroupMapping(StaticUserGroup.getStaticMapping()) .write(context.getPolicyFile()); String testUser = USER1_1; // verify the config is set correctly by session hook verifyConfig(testUser, ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true"); verifyConfig(testUser, ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "false"); verifyConfig(testUser, ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator"); verifyConfig(testUser, ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, SentryAuthorizerFactory.class.getName()); verifyConfig(testUser, ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY.varname, "true"); verifyConfig(testUser, ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname, "set"); verifyConfig(testUser, ConfVars.SCRATCHDIRPERMISSION.varname, HiveAuthzBindingSessionHookV2.SCRATCH_DIR_PERMISSIONS); verifyConfig(testUser, HiveConf.ConfVars.HIVE_CONF_RESTRICTED_LIST.varname, HiveAuthzBindingSessionHookV2.ACCESS_RESTRICT_LIST); verifyConfig(testUser, HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, testUser); } private void verifyConfig(String userName, String confVar, String expectedValue) throws Exception { Connection connection = context.createConnection(userName); Statement statement = context.createStatement(connection); statement.execute("set " + confVar); ResultSet res = statement.getResultSet(); assertTrue(res.next()); String configValue = res.getString(1); assertNotNull(configValue); String restrictListValues = (configValue.split("="))[1]; assertFalse(restrictListValues.isEmpty()); for (String restrictConfig: expectedValue.split(",")) { assertTrue(restrictListValues.toLowerCase().contains(restrictConfig.toLowerCase())); } } /** * Test access to default DB with explicit privilege requirement * Admin should be able to run use default with server level access * User with db level access should be able to run use default * User with table level access should be able to run use default * User with no access to default db objects, should NOT be able run use default * @throws Exception */ @Test public void testDefaultDbRestrictivePrivilege() throws Exception { Map<String, String> properties = Maps.newHashMap(); properties.put(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "true"); Context context = createContext(properties); policyFile .addRolesToGroup(USERGROUP1, "all_default") .addRolesToGroup(USERGROUP2, "select_default") .addRolesToGroup(USERGROUP3, "all_db1") .addPermissionsToRole("all_default", "server=server1->db=default") .addPermissionsToRole("select_default", "server=server1->db=default->table=tab_2->action=select") .addPermissionsToRole("all_db1", "server=server1->db=DB_1") .setUserGroupMapping(StaticUserGroup.getStaticMapping()) .write(context.getPolicyFile()); Connection connection = context.createConnection(ADMIN1); Statement statement = context.createStatement(connection); statement.execute("use default"); connection = context.createConnection(USER1_1); statement = context.createStatement(connection); statement.execute("use default"); connection = context.createConnection(USER2_1); statement = context.createStatement(connection); statement.execute("use default"); connection = context.createConnection(USER3_1); statement = context.createStatement(connection); try { // user3 doesn't have any implicit permission for default statement.execute("use default"); assertFalse("user3 shouldn't be able switch to default", true); } catch (SQLException e) { context.verifyAuthzException(e); } context.close(); } }
/** * Copyright (c) 2005-2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.esb.integration.common.utils.clients; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpHeaders; import org.apache.http.HttpResponse; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.client.methods.*; import org.apache.http.entity.ContentProducer; import org.apache.http.entity.EntityTemplate; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.HttpConnectionParams; import org.apache.http.params.HttpParams; import org.apache.http.protocol.HttpContext; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Map; import java.util.zip.GZIPOutputStream; /** * Simple HTTP client implementation that can be used to send test HTTP requests. */ public class SimpleHttpClient { private DefaultHttpClient client; public SimpleHttpClient() { this.client = new DefaultHttpClient(new ThreadSafeClientConnManager()); HttpParams params = client.getParams(); HttpConnectionParams.setConnectionTimeout(params, 30000); HttpConnectionParams.setSoTimeout(params, 30000); client.setHttpRequestRetryHandler(new HttpRequestRetryHandler() { public boolean retryRequest(IOException e, int i, HttpContext httpContext) { return false; } }); } /** * Send a HTTP GET request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doGet(String url, Map<String, String> headers) throws IOException { HttpUriRequest request = new HttpGet(url); setHeaders(headers, request); return client.execute(request); } /** * Send a HTTP POST request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @param payload Content payload that should be sent * @param contentType Content-type of the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doPost(String url, final Map<String, String> headers, final String payload, String contentType) throws IOException { HttpUriRequest request = new HttpPost(url); setHeaders(headers, request); HttpEntityEnclosingRequest entityEncReq = (HttpEntityEnclosingRequest) request; final boolean zip = headers != null && "gzip".equals(headers.get(HttpHeaders.CONTENT_ENCODING)); EntityTemplate ent = new EntityTemplate(new ContentProducer() { public void writeTo(OutputStream outputStream) throws IOException { OutputStream out = outputStream; if (zip) { out = new GZIPOutputStream(outputStream); } out.write(payload.getBytes()); out.flush(); out.close(); } }); ent.setContentType(contentType); if (zip) { ent.setContentEncoding("gzip"); } entityEncReq.setEntity(ent); return client.execute(request); } /** * Extracts the payload from a HTTP response. For a given HttpResponse object, this * method can be called only once. * * @param response HttpResponse instance to be extracted * @return Content payload * @throws IOException If an error occurs while reading from the response */ public String getResponsePayload(HttpResponse response) throws IOException { if (response.getEntity() != null) { InputStream in = response.getEntity().getContent(); int length; byte[] tmp = new byte[2048]; StringBuilder buffer = new StringBuilder(); while ((length = in.read(tmp)) != -1) { buffer.append(new String(tmp, 0, length)); } return buffer.toString(); } return null; } /** * Send a HTTP PATCH request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @param payload Content payload that should be sent * @param contentType Content-type of the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doPatch(String url, final Map<String, String> headers, final String payload, String contentType) throws IOException { HttpUriRequest request = new HttpPatch(url); setHeaders(headers, request); HttpEntityEnclosingRequest entityEncReq = (HttpEntityEnclosingRequest) request; final boolean zip = headers != null && "gzip".equals(headers.get(HttpHeaders.CONTENT_ENCODING)); EntityTemplate ent = new EntityTemplate(new ContentProducer() { public void writeTo(OutputStream outputStream) throws IOException { OutputStream out = outputStream; if (zip) { out = new GZIPOutputStream(outputStream); } out.write(payload.getBytes()); out.flush(); out.close(); } }); ent.setContentType(contentType); if (zip) { ent.setContentEncoding("gzip"); } entityEncReq.setEntity(ent); return client.execute(request); } /** * Send a HTTP OPTIONS request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @param payload Content payload that should be sent * @param contentType Content-type of the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doOptions(String url, final Map<String, String> headers, final String payload, String contentType) throws IOException { HttpUriRequest request = new HttpOptions(url); setHeaders(headers, request); if(payload != null) { HttpEntityEnclosingRequest entityEncReq = (HttpEntityEnclosingRequest) request; final boolean zip = headers != null && "gzip".equals(headers.get(HttpHeaders.CONTENT_ENCODING)); EntityTemplate ent = new EntityTemplate(new ContentProducer() { public void writeTo(OutputStream outputStream) throws IOException { OutputStream out = outputStream; if (zip) { out = new GZIPOutputStream(outputStream); } out.write(payload.getBytes()); out.flush(); out.close(); } }); ent.setContentType(contentType); if (zip) { ent.setContentEncoding("gzip"); } entityEncReq.setEntity(ent); } return client.execute(request); } /** * Send a HTTP Head request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doHead(String url, final Map<String, String> headers) throws IOException { HttpUriRequest request = new HttpHead(url); setHeaders(headers, request); return client.execute(request); } /** * Send a HTTP DELETE request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doDelete(String url, final Map<String, String> headers) throws IOException { HttpUriRequest request = new HttpDelete(url); setHeaders(headers, request); return client.execute(request); } /** * Send a HTTP PUT request to the specified URL * * @param url Target endpoint URL * @param headers Any HTTP headers that should be added to the request * @param payload Content payload that should be sent * @param contentType Content-type of the request * @return Returned HTTP response * @throws IOException If an error occurs while making the invocation */ public HttpResponse doPut(String url, final Map<String, String> headers, final String payload, String contentType) throws IOException { HttpUriRequest request = new HttpPut(url); setHeaders(headers, request); HttpEntityEnclosingRequest entityEncReq = (HttpEntityEnclosingRequest) request; final boolean zip = headers != null && "gzip".equals(headers.get(HttpHeaders.CONTENT_ENCODING)); EntityTemplate ent = new EntityTemplate(new ContentProducer() { public void writeTo(OutputStream outputStream) throws IOException { OutputStream out = outputStream; if (zip) { out = new GZIPOutputStream(outputStream); } out.write(payload.getBytes()); out.flush(); out.close(); } }); ent.setContentType(contentType); if (zip) { ent.setContentEncoding("gzip"); } entityEncReq.setEntity(ent); return client.execute(request); } private void setHeaders(Map<String, String> headers, HttpUriRequest request) { if (headers != null && headers.size() > 0) { for (Map.Entry<String, String> header : headers.entrySet()) { request.setHeader(header.getKey(), header.getValue()); } } } }
/* BloomFilter * * $Id$ * * Created on Jun 21, 2005 * * Copyright (C) 2005 Internet Archive; a slight adaptation of * LGPL work (C) Sebastiano Vigna * * This file is part of the Heritrix web crawler (crawler.archive.org). * * Heritrix is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * any later version. * * Heritrix is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser Public License for more details. * * You should have received a copy of the GNU Lesser Public License * along with Heritrix; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.archive.util; import java.io.Serializable; import java.security.SecureRandom; import java.util.Random; /** A Bloom filter. * * ADAPTED/IMPROVED VERSION OF MG4J it.unimi.dsi.mg4j.util.BloomFilter * * <p>KEY CHANGES: * * <ul> * <li>NUMBER_OF_WEIGHTS is 2083, to better avoid collisions between * similar strings (common in the domain of URIs)</li> * * <li>Removed dependence on cern.colt MersenneTwister (replaced with * SecureRandom) and QuickBitVector (replaced with local methods).</li> * * <li>Adapted to allow long bit indices</li> * * <li>Stores bitfield in an array of up to 2^22 arrays of 2^26 longs. Thus, * bitfield may grow to 2^48 longs in size -- 2PiB, 2*54 bitfield indexes. * (I expect this will outstrip available RAM for the next few years.)</li> * </ul> * * <hr> * * <P>Instances of this class represent a set of character sequences (with * false positives) using a Bloom filter. Because of the way Bloom filters work, * you cannot remove elements. * * <P>Bloom filters have an expected error rate, depending on the number * of hash functions used, on the filter size and on the number of elements in * the filter. This implementation uses a variable optimal number of hash * functions, depending on the expected number of elements. More precisely, a * Bloom filter for <var>n</var> character sequences with <var>d</var> hash * functions will use ln 2 <var>d</var><var>n</var> &#8776; * 1.44 <var>d</var><var>n</var> bits; false positives will happen with * probability 2<sup>-<var>d</var></sup>. * * <P>Hash functions are generated at creation time using universal hashing. * Each hash function uses {@link #NUMBER_OF_WEIGHTS} random integers, which * are cyclically multiplied by the character codes in a character sequence. * The resulting integers are XOR-ed together. * * <P>This class exports access methods that are very similar to those of * {@link java.util.Set}, but it does not implement that interface, as too * many non-optional methods would be unimplementable (e.g., iterators). * * @author Sebastiano Vigna * @contributor Gordon Mohr */ public class BloomFilter64bit implements Serializable, BloomFilter { private static final long serialVersionUID = 2L; /** The number of weights used to create hash functions. */ final static int NUMBER_OF_WEIGHTS = 2083; // CHANGED FROM 16 /** The number of bits in this filter. */ final protected long m; /** if bitfield is an exact power of 2 in length, it is this power */ protected int power = -1; /** The expected number of inserts; determines calculated size */ final protected long expectedInserts; /** The number of hash functions used by this filter. */ final protected int d; /** The underlying bit vector */ final protected long[][] bits; /** The random integers used to generate the hash functions. */ final protected long[][] weight; /** The number of elements currently in the filter. It may be * smaller than the actual number of additions of distinct character * sequences because of false positives. */ int size; /** The natural logarithm of 2, used in the computation of the number of bits. */ final static double NATURAL_LOG_OF_2 = Math.log( 2 ); /** power-of-two to use as maximum size of bitfield subarrays */ protected final static int SUBARRAY_POWER_OF_TWO = 26; // 512MiB of longs /** number of longs in one subarray */ protected final static int SUBARRAY_LENGTH_IN_LONGS = 1 << SUBARRAY_POWER_OF_TWO; /** mask for lowest SUBARRAY_POWER_OF_TWO bits */ protected final static int SUBARRAY_MASK = SUBARRAY_LENGTH_IN_LONGS - 1; //0x0FFFFFFF final static boolean DEBUG = false; /** Creates a new Bloom filter with given number of hash functions and * expected number of elements. * * @param n the expected number of elements. * @param d the number of hash functions; if the filter add not more * than <code>n</code> elements, false positives will happen with * probability 2<sup>-<var>d</var></sup>. */ public BloomFilter64bit( final long n, final int d) { this(n,d, new SecureRandom(), false); } public BloomFilter64bit( final long n, final int d, boolean roundUp) { this(n,d, new SecureRandom(), roundUp); } /** Creates a new Bloom filter with given number of hash functions and * expected number of elements. * * @param n the expected number of elements. * @param d the number of hash functions; if the filter add not more * than <code>n</code> elements, false positives will happen with * probability 2<sup>-<var>d</var></sup>. * @param Random weightsGenerator may provide a seeded Random for reproducible * internal universal hash function weighting * @param roundUp if true, round bit size up to next-nearest-power-of-2 */ public BloomFilter64bit( final long n, final int d, Random weightsGenerator, boolean roundUp ) { this.expectedInserts = n; this.d = d; long lenInLongs = (long)Math.ceil( ( (long)n * (long)d / NATURAL_LOG_OF_2 ) / 64L ); if ( lenInLongs > (1L<<48) ) { throw new IllegalArgumentException( "This filter would require " + lenInLongs + " longs, " + "greater than this classes maximum of 2^48 longs (2PiB)." ); } long lenInBits = lenInLongs * 64L; if(roundUp) { int pow = 0; while((1L<<pow) < lenInBits) { pow++; } this.power = pow; this.m = 1L<<pow; lenInLongs = m/64L; } else { this.m = lenInBits; } int arrayOfArraysLength = (int)((lenInLongs+SUBARRAY_LENGTH_IN_LONGS-1)/SUBARRAY_LENGTH_IN_LONGS); bits = new long[ (int)(arrayOfArraysLength) ][]; // ensure last subarray is no longer than necessary long lenInLongsRemaining = lenInLongs; for(int i = 0; i < bits.length; i++) { bits[i] = new long[(int)Math.min(lenInLongsRemaining,SUBARRAY_LENGTH_IN_LONGS)]; lenInLongsRemaining -= bits[i].length; } if ( DEBUG ) System.err.println( "Number of bits: " + m ); weight = new long[ d ][]; for( int i = 0; i < d; i++ ) { weight[ i ] = new long[ NUMBER_OF_WEIGHTS ]; for( int j = 0; j < NUMBER_OF_WEIGHTS; j++ ) weight[ i ][ j ] = weightsGenerator.nextLong(); } } /** The number of character sequences in the filter. * * @return the number of character sequences in the filter (but * see {@link #contains(CharSequence)}). */ public int size() { return size; } /** Hashes the given sequence with the given hash function. * * @param s a character sequence. * @param l the length of <code>s</code>. * @param k a hash function index (smaller than {@link #d}). * @return the position in the filter corresponding to <code>s</code> for the hash function <code>k</code>. */ protected long hash( final CharSequence s, final int l, final int k ) { final long[] w = weight[ k ]; long h = 0; int i = l; while( i-- != 0 ) h ^= s.charAt( i ) * w[ i % NUMBER_OF_WEIGHTS ]; long retVal; if(power>0) { retVal = h >>> (64-power); } else { // ####----####---- retVal = ( h & 0x7FFFFFFFFFFFFFFFL ) % m; } return retVal; } public long[] bitIndexesFor(CharSequence s) { long[] ret = new long[d]; for(int i = 0; i < d; i++) { ret[i] = hash(s,s.length(),i); } return ret; } /** Checks whether the given character sequence is in this filter. * * <P>Note that this method may return true on a character sequence that is has * not been added to the filter. This will happen with probability 2<sub>-<var>d</var></sub>, * where <var>d</var> is the number of hash functions specified at creation time, if * the number of the elements in the filter is less than <var>n</var>, the number * of expected elements specified at creation time. * * @param s a character sequence. * @return true if the sequence is in the filter (or if a sequence with the * same hash sequence is in the filter). */ public boolean contains( final CharSequence s ) { int i = d, l = s.length(); while( i-- != 0 ) if ( ! getBit( hash( s, l, i ) ) ) return false; return true; } /** Adds a character sequence to the filter. * * @param s a character sequence. * @return true if the character sequence was not in the filter (but see {@link #contains(CharSequence)}). */ public boolean add( final CharSequence s ) { boolean result = false; int i = d, l = s.length(); long h; while( i-- != 0 ) { h = hash( s, l, i ); if ( ! setGetBit( h ) ) { result = true; } } if ( result ) size++; return result; } protected final static long ADDRESS_BITS_PER_UNIT = 6; // 64=2^6 protected final static long BIT_INDEX_MASK = (1<<6)-1; // = 63 = 2^BITS_PER_UNIT - 1; /** * Returns from the local bitvector the value of the bit with * the specified index. The value is <tt>true</tt> if the bit * with the index <tt>bitIndex</tt> is currently set; otherwise, * returns <tt>false</tt>. * * (adapted from cern.colt.bitvector.QuickBitVector) * * @param bitIndex the bit index. * @return the value of the bit with the specified index. */ public boolean getBit(long bitIndex) { long longIndex = bitIndex >>> ADDRESS_BITS_PER_UNIT; int arrayIndex = (int) (longIndex >>> SUBARRAY_POWER_OF_TWO); int subarrayIndex = (int) (longIndex & SUBARRAY_MASK); return ((bits[arrayIndex][subarrayIndex] & (1L << (bitIndex & BIT_INDEX_MASK))) != 0); } /** * Changes the bit with index <tt>bitIndex</tt> in local bitvector. * * (adapted from cern.colt.bitvector.QuickBitVector) * * @param bitIndex the index of the bit to be set. */ protected void setBit( long bitIndex) { long longIndex = bitIndex >>> ADDRESS_BITS_PER_UNIT; int arrayIndex = (int) (longIndex >>> SUBARRAY_POWER_OF_TWO); int subarrayIndex = (int) (longIndex & SUBARRAY_MASK); bits[arrayIndex][subarrayIndex] |= (1L << (bitIndex & BIT_INDEX_MASK)); } /** * Sets the bit with index <tt>bitIndex</tt> in local bitvector -- * returning the old value. * * (adapted from cern.colt.bitvector.QuickBitVector) * * @param bitIndex the index of the bit to be set. */ protected boolean setGetBit( long bitIndex) { long longIndex = bitIndex >>> ADDRESS_BITS_PER_UNIT; int arrayIndex = (int) (longIndex >>> SUBARRAY_POWER_OF_TWO); int subarrayIndex = (int) (longIndex & SUBARRAY_MASK); long mask = 1L << (bitIndex & BIT_INDEX_MASK); boolean ret = (bits[arrayIndex][subarrayIndex] & mask)!=0; bits[arrayIndex][subarrayIndex] |= mask; return ret; } /* (non-Javadoc) * @see org.archive.util.BloomFilter#getSizeBytes() */ public long getSizeBytes() { // account for ragged-sized last array return 8*(((bits.length-1)*bits[0].length)+bits[bits.length-1].length); } @Override public long getExpectedInserts() { return expectedInserts; } @Override public long getHashCount() { return d; } }
package sh.komet.fx.stage; import static sh.isaac.api.logic.LogicalExpressionBuilder.And; import static sh.isaac.api.logic.LogicalExpressionBuilder.ConceptAssertion; import static sh.isaac.api.logic.LogicalExpressionBuilder.NecessarySet; import java.io.IOException; import java.util.NoSuchElementException; import java.util.UUID; import java.util.concurrent.ExecutionException; import javafx.application.Platform; import javafx.concurrent.Task; import javafx.fxml.FXMLLoader; import javafx.scene.Scene; import javafx.scene.image.Image; import javafx.scene.layout.BorderPane; import javafx.stage.Stage; import javafx.stage.StageStyle; import sh.isaac.MetaData; import sh.isaac.api.ConceptProxy; //import org.scenicview.ScenicView; import sh.isaac.api.ConfigurationService; import sh.isaac.api.Get; import sh.isaac.api.LookupService; import sh.isaac.api.bootstrap.TermAux; import sh.isaac.api.classifier.ClassifierResults; import sh.isaac.api.classifier.ClassifierService; import sh.isaac.api.component.concept.ConceptBuilder; import sh.isaac.api.constants.DatabaseInitialization; import sh.isaac.api.constants.MemoryConfiguration; import sh.isaac.api.constants.SystemPropertyConstants; import sh.isaac.api.coordinate.Coordinates; import sh.isaac.api.coordinate.WriteCoordinate; import sh.isaac.api.coordinate.WriteCoordinateImpl; import sh.isaac.api.logic.LogicalExpression; import sh.isaac.api.logic.LogicalExpressionBuilder; import sh.isaac.api.task.TimedTaskWithProgressTracker; import sh.isaac.api.transaction.Transaction; import sh.isaac.api.util.UuidT5Generator; import sh.isaac.komet.iconography.Iconography; import sh.isaac.komet.iconography.IconographyHelper; import sh.isaac.komet.preferences.UserPreferencesPanel; import sh.isaac.model.builder.ConceptBuilderImpl; import sh.komet.gui.contract.MenuProvider; import sh.komet.gui.contract.preferences.KometPreferences; import sh.komet.gui.contract.preferences.PreferenceGroup; import sh.komet.gui.contract.preferences.WindowPreferences; import sh.komet.gui.util.FxConfiguration; import sh.komet.gui.util.FxGet; public class StartupAfterSelection extends TimedTaskWithProgressTracker<Void> { private final MainApp mainApp; private KometPreferences kometPreferences; private final boolean reimportMetadata; public StartupAfterSelection(MainApp mainApp, boolean reimportMetadata) { this.mainApp = mainApp; this.reimportMetadata = reimportMetadata; this.updateTitle("Setting up user interface"); Get.activeTasks().add(this); } @Override protected Void call() throws Exception { try { Platform.runLater(() -> { try { FXMLLoader sourceLoader = new FXMLLoader(getClass().getResource("/fxml/StartupScreen.fxml")); BorderPane sourceRoot = sourceLoader.load(); StartupScreenController startupSceneController = sourceLoader.getController(); Stage stage = new Stage(StageStyle.UTILITY); stage.setResizable(false); Scene sourceScene = new Scene(sourceRoot, 965, 495); stage.setScene(sourceScene); stage.getScene() .getStylesheets() .add(MainApp.class.getResource("/user.css").toString()); stage.getScene() .getStylesheets() .add(Iconography.class.getResource("/sh/isaac/komet/iconography/Iconography.css").toString()); stage.show(); mainApp.replacePrimaryStage(stage); } catch (IOException e) { e.printStackTrace(); } }); this.updateMessage("Starting preference service"); LookupService.startupPreferenceProvider(); mainApp.configurationPreferences = FxGet.kometConfigurationRootNode(); if (mainApp.configurationPreferences.getBoolean(PreferenceGroup.Keys.INITIALIZED, false)) { mainApp.firstRun = false; } Get.configurationService().setSingleUserMode(true); //TODO eventually, this needs to be replaced with a proper user identifier Get.configurationService().setDatabaseInitializationMode(DatabaseInitialization.LOAD_METADATA); Get.configurationService().getGlobalDatastoreConfiguration().setMemoryConfiguration(MemoryConfiguration.ALL_CHRONICLES_IN_MEMORY); this.updateMessage("Starting Solor services"); System.setProperty(SystemPropertyConstants.FAIL_ON_DATABASE_IDENTITY_MISMATCH, Boolean.FALSE.toString()); System.setProperty(FxConfiguration.SHOW_BETA_PROPERTY, Boolean.TRUE.toString()); LookupService.startupIsaac(); addUsers(); UserPreferencesPanel.login(); if (FxGet.fxConfiguration().isShowBetaFeaturesEnabled()) { LOG.info("Beta features enabled"); } if (reimportMetadata) { TimedTaskWithProgressTracker<Void> tt = new TimedTaskWithProgressTracker<Void>() { { this.updateTitle("Updating metadata"); } @Override protected Void call() throws Exception { Get.activeTasks().add(this); try { Get.metadataService().reimportMetadata(); return null; } finally { Get.activeTasks().remove(this); } } }; Get.executor().submit(tt); } // open one new stage with defaults // Create a node for stage preferences Platform.runLater(new OpenWindows()); } catch (Exception e) { e.printStackTrace(); } finally { Get.activeTasks().remove(this); } return null; } /** * Add users for Komet GUI * TODO replace with some external mechanism to provide user info? Or a login / self register system in the GUI * TODO these "terminology" authors should not be here at all, they should each be created by the terminology loader itself... */ private void addUsers() { String[] users = new String[] {"Keith Campbell", "Deloitte User", "Bootstrap administrator", "Clinvar author", "UMLS author", "LOINC author", "LIVD author", "CVX author", "SNOMED author", "RxNorm author", "HL7 author", "CDC author", "NLM author", "NCI author", "VA author", "DOD author", "FEHRM author", "Logica author", "Susan Castillo", "Penni Hernandez", "Ioana Singureanu"}; Transaction t = Get.commitService().newTransaction("create users"); WriteCoordinate writeCoordinate = new WriteCoordinateImpl(MetaData.USER____SOLOR.getNid(), MetaData.USERS_MODULE____SOLOR.getNid(), MetaData.PRIMORDIAL_PATH____SOLOR.getNid(), t); try { int created = 0; for (String user : users) { ConceptBuilder cb = new ConceptBuilderImpl(user, ConceptProxy.METADATA_SEMANTIC_TAG, null, TermAux.ENGLISH_LANGUAGE, TermAux.US_DIALECT_ASSEMBLAGE, Coordinates.Logic.ElPlusPlus(), TermAux.SOLOR_CONCEPT_ASSEMBLAGE.getNid()); cb.setT5UuidNested(UuidT5Generator.PATH_ID_FROM_FS_DESC); UUID conceptId = cb.getPrimordialUuid(); if (!Get.identifierService().hasUuid(conceptId)) { LogicalExpressionBuilder defBuilder = Get.logicalExpressionBuilderService().getLogicalExpressionBuilder(); NecessarySet(And(ConceptAssertion(MetaData.USER____SOLOR.getNid(), defBuilder))); LogicalExpression logicalExpression = defBuilder.build(); cb.addLogicalExpression(logicalExpression); cb.buildAndWrite(writeCoordinate).get(); created++; } } if (created > 0) { t.commit().get(); } else { t.cancel().get(); } LOG.info("Created {} users", created); } catch (NoSuchElementException | IllegalArgumentException | IllegalStateException | InterruptedException | ExecutionException e) { LOG.error("Unexpected problem adding missing users!", e); } } private class OpenWindows extends TimedTaskWithProgressTracker<Void> { public OpenWindows() { super(); this.updateTitle("Opening windows"); Get.activeTasks().add(this); } @Override protected Void call() throws Exception { try { FxGet.load(); kometPreferences = FxGet.kometPreferences(); kometPreferences.loadPreferences(); if (Get.metadataService() .wasMetadataImported()) { final ClassifierService logicService = Get.logicService() .getClassifierService(Coordinates.Manifold.DevelopmentInferredRegularNameSort()); final Task<ClassifierResults> classifyTask = logicService.classify(); final ClassifierResults classifierResults = classifyTask.get(); } // To update metadata if new metadata is available after database was built. kometPreferences.reloadPreferences(); boolean replacePrimaryStage = true; for (WindowPreferences windowPreference : kometPreferences.getWindowPreferenceItems()) { LOG.info("Opening " + windowPreference.getWindowName().get()); this.updateMessage("Opening " + windowPreference.getWindowName().get()); try { UUID stageUuid = windowPreference.getWindowUuid(); FXMLLoader loader = new FXMLLoader(getClass().getResource("/fxml/KometStageScene.fxml")); BorderPane root = loader.load(); KometStageController controller = loader.getController(); root.setId(stageUuid.toString()); Stage stage = new Stage(StageStyle.UNIFIED); stage.getProperties().put(FxGet.PROPERTY_KEYS.WINDOW_PREFERENCES, windowPreference); Scene scene = new Scene(mainApp.setupStageMenus(stage, root, windowPreference)); stage.setScene(scene); stage.setX(windowPreference.xLocationProperty().doubleValue()); stage.setY(windowPreference.yLocationProperty().doubleValue()); stage.setWidth(windowPreference.widthProperty().doubleValue()); stage.setHeight(windowPreference.heightProperty().doubleValue()); stage.setTitle(FxGet.configurationName()); controller.setWindowPreferenceItem(windowPreference, stage); stage.getIcons().add(new Image(MainApp.class.getResourceAsStream("/icons/KOMET.ico"))); stage.getIcons().add(new Image(MainApp.class.getResourceAsStream("/icons/KOMET.png"))); stage.setTitle(windowPreference.getWindowName().getValue() + ": " + Get.dataStore().getDataStorePath().toFile().getName()); // GraphController.setSceneForControllers(scene); scene.getStylesheets() .add(FxGet.fxConfiguration().getUserCSSURL().toString()); scene.getStylesheets() .add(IconographyHelper.getStyleSheetStringUrl()); FxGet.statusMessageService() .addScene(scene, controller::reportStatus); stage.setOnCloseRequest(MenuProvider::handleCloseRequest); if (replacePrimaryStage) { replacePrimaryStage = false; mainApp.replacePrimaryStage(stage); } stage.show(); //ScenicView.show(stage.getScene()); MenuProvider.WINDOW_COUNT.incrementAndGet(); mainApp.configurationPreferences.sync(); // ScenicView.show(scene); } catch (Exception e) { e.printStackTrace(); } } } catch (Exception e) { e.printStackTrace(); } finally { Get.activeTasks().remove(this); } return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.arrow.algorithm.dictionary; import static junit.framework.TestCase.assertTrue; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Random; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.VarBinaryVector; import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.dictionary.Dictionary; import org.apache.arrow.vector.dictionary.DictionaryEncoder; import org.apache.arrow.vector.types.pojo.DictionaryEncoding; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * Test cases for {@link LinearDictionaryEncoder}. */ public class TestLinearDictionaryEncoder { private final int VECTOR_LENGTH = 50; private final int DICTIONARY_LENGTH = 10; private BufferAllocator allocator; byte[] zero = "000".getBytes(StandardCharsets.UTF_8); byte[] one = "111".getBytes(StandardCharsets.UTF_8); byte[] two = "222".getBytes(StandardCharsets.UTF_8); byte[][] data = new byte[][]{zero, one, two}; @Before public void prepare() { allocator = new RootAllocator(1024 * 1024); } @After public void shutdown() { allocator.close(); } @Test public void testEncodeAndDecode() { Random random = new Random(); try (VarCharVector rawVector = new VarCharVector("original vector", allocator); IntVector encodedVector = new IntVector("encoded vector", allocator); VarCharVector dictionary = new VarCharVector("dictionary", allocator)) { // set up dictionary dictionary.allocateNew(); for (int i = 0; i < DICTIONARY_LENGTH; i++) { // encode "i" as i dictionary.setSafe(i, String.valueOf(i).getBytes()); } dictionary.setValueCount(DICTIONARY_LENGTH); // set up raw vector rawVector.allocateNew(10 * VECTOR_LENGTH, VECTOR_LENGTH); for (int i = 0; i < VECTOR_LENGTH; i++) { int val = (random.nextInt() & Integer.MAX_VALUE) % DICTIONARY_LENGTH; rawVector.set(i, String.valueOf(val).getBytes()); } rawVector.setValueCount(VECTOR_LENGTH); LinearDictionaryEncoder<IntVector, VarCharVector> encoder = new LinearDictionaryEncoder<>(dictionary, false); // perform encoding encodedVector.allocateNew(); encoder.encode(rawVector, encodedVector); // verify encoding results assertEquals(rawVector.getValueCount(), encodedVector.getValueCount()); for (int i = 0; i < VECTOR_LENGTH; i++) { assertArrayEquals(rawVector.get(i), String.valueOf(encodedVector.get(i)).getBytes()); } // perform decoding Dictionary dict = new Dictionary(dictionary, new DictionaryEncoding(1L, false, null)); try (VarCharVector decodedVector = (VarCharVector) DictionaryEncoder.decode(encodedVector, dict)) { // verify decoding results assertEquals(encodedVector.getValueCount(), decodedVector.getValueCount()); for (int i = 0; i < VECTOR_LENGTH; i++) { assertArrayEquals(String.valueOf(encodedVector.get(i)).getBytes(), decodedVector.get(i)); } } } } @Test public void testEncodeAndDecodeWithNull() { Random random = new Random(); try (VarCharVector rawVector = new VarCharVector("original vector", allocator); IntVector encodedVector = new IntVector("encoded vector", allocator); VarCharVector dictionary = new VarCharVector("dictionary", allocator)) { // set up dictionary dictionary.allocateNew(); dictionary.setNull(0); for (int i = 1; i < DICTIONARY_LENGTH; i++) { // encode "i" as i dictionary.setSafe(i, String.valueOf(i).getBytes()); } dictionary.setValueCount(DICTIONARY_LENGTH); // set up raw vector rawVector.allocateNew(10 * VECTOR_LENGTH, VECTOR_LENGTH); for (int i = 0; i < VECTOR_LENGTH; i++) { if (i % 10 == 0) { rawVector.setNull(i); } else { int val = (random.nextInt() & Integer.MAX_VALUE) % (DICTIONARY_LENGTH - 1) + 1; rawVector.set(i, String.valueOf(val).getBytes()); } } rawVector.setValueCount(VECTOR_LENGTH); LinearDictionaryEncoder<IntVector, VarCharVector> encoder = new LinearDictionaryEncoder<>(dictionary, true); // perform encoding encodedVector.allocateNew(); encoder.encode(rawVector, encodedVector); // verify encoding results assertEquals(rawVector.getValueCount(), encodedVector.getValueCount()); for (int i = 0; i < VECTOR_LENGTH; i++) { if (i % 10 == 0) { assertEquals(0, encodedVector.get(i)); } else { assertArrayEquals(rawVector.get(i), String.valueOf(encodedVector.get(i)).getBytes()); } } // perform decoding Dictionary dict = new Dictionary(dictionary, new DictionaryEncoding(1L, false, null)); try (VarCharVector decodedVector = (VarCharVector) DictionaryEncoder.decode(encodedVector, dict)) { // verify decoding results assertEquals(encodedVector.getValueCount(), decodedVector.getValueCount()); for (int i = 0; i < VECTOR_LENGTH; i++) { if (i % 10 == 0) { assertTrue(decodedVector.isNull(i)); } else { assertArrayEquals(String.valueOf(encodedVector.get(i)).getBytes(), decodedVector.get(i)); } } } } } @Test public void testEncodeNullWithoutNullInDictionary() { try (VarCharVector rawVector = new VarCharVector("original vector", allocator); IntVector encodedVector = new IntVector("encoded vector", allocator); VarCharVector dictionary = new VarCharVector("dictionary", allocator)) { // set up dictionary, with no null in it. dictionary.allocateNew(); for (int i = 0; i < DICTIONARY_LENGTH; i++) { // encode "i" as i dictionary.setSafe(i, String.valueOf(i).getBytes()); } dictionary.setValueCount(DICTIONARY_LENGTH); // the vector to encode has a null inside. rawVector.allocateNew(1); rawVector.setNull(0); rawVector.setValueCount(1); encodedVector.allocateNew(); LinearDictionaryEncoder<IntVector, VarCharVector> encoder = new LinearDictionaryEncoder<>(dictionary, true); // the encoder should encode null, but no null in the dictionary, // so an exception should be thrown. assertThrows(IllegalArgumentException.class, () -> { encoder.encode(rawVector, encodedVector); }); } } @Test public void testEncodeStrings() { // Create a new value vector try (final VarCharVector vector = new VarCharVector("foo", allocator); final IntVector encoded = new IntVector("encoded", allocator); final VarCharVector dictionaryVector = new VarCharVector("dict", allocator)) { vector.allocateNew(512, 5); encoded.allocateNew(); // set some values vector.setSafe(0, zero, 0, zero.length); vector.setSafe(1, one, 0, one.length); vector.setSafe(2, one, 0, one.length); vector.setSafe(3, two, 0, two.length); vector.setSafe(4, zero, 0, zero.length); vector.setValueCount(5); // set some dictionary values dictionaryVector.allocateNew(512, 3); dictionaryVector.setSafe(0, zero, 0, one.length); dictionaryVector.setSafe(1, one, 0, two.length); dictionaryVector.setSafe(2, two, 0, zero.length); dictionaryVector.setValueCount(3); LinearDictionaryEncoder<IntVector, VarCharVector> encoder = new LinearDictionaryEncoder<>(dictionaryVector); encoder.encode(vector, encoded); // verify indices assertEquals(5, encoded.getValueCount()); assertEquals(0, encoded.get(0)); assertEquals(1, encoded.get(1)); assertEquals(1, encoded.get(2)); assertEquals(2, encoded.get(3)); assertEquals(0, encoded.get(4)); // now run through the decoder and verify we get the original back Dictionary dict = new Dictionary(dictionaryVector, new DictionaryEncoding(1L, false, null)); try (VarCharVector decoded = (VarCharVector) DictionaryEncoder.decode(encoded, dict)) { assertEquals(vector.getValueCount(), decoded.getValueCount()); for (int i = 0; i < 5; i++) { assertEquals(vector.getObject(i), decoded.getObject(i)); } } } } @Test public void testEncodeLargeVector() { // Create a new value vector try (final VarCharVector vector = new VarCharVector("foo", allocator); final IntVector encoded = new IntVector("encoded", allocator); final VarCharVector dictionaryVector = new VarCharVector("dict", allocator)) { vector.allocateNew(); encoded.allocateNew(); int count = 10000; for (int i = 0; i < 10000; ++i) { vector.setSafe(i, data[i % 3], 0, data[i % 3].length); } vector.setValueCount(count); dictionaryVector.allocateNew(512, 3); dictionaryVector.setSafe(0, zero, 0, one.length); dictionaryVector.setSafe(1, one, 0, two.length); dictionaryVector.setSafe(2, two, 0, zero.length); dictionaryVector.setValueCount(3); LinearDictionaryEncoder<IntVector, VarCharVector> encoder = new LinearDictionaryEncoder<>(dictionaryVector); encoder.encode(vector, encoded); assertEquals(count, encoded.getValueCount()); for (int i = 0; i < count; ++i) { assertEquals(i % 3, encoded.get(i)); } // now run through the decoder and verify we get the original back Dictionary dict = new Dictionary(dictionaryVector, new DictionaryEncoding(1L, false, null)); try (VarCharVector decoded = (VarCharVector) DictionaryEncoder.decode(encoded, dict)) { assertEquals(vector.getClass(), decoded.getClass()); assertEquals(vector.getValueCount(), decoded.getValueCount()); for (int i = 0; i < count; ++i) { assertEquals(vector.getObject(i), decoded.getObject(i)); } } } } @Test public void testEncodeBinaryVector() { // Create a new value vector try (final VarBinaryVector vector = new VarBinaryVector("foo", allocator); final VarBinaryVector dictionaryVector = new VarBinaryVector("dict", allocator); final IntVector encoded = new IntVector("encoded", allocator)) { vector.allocateNew(512, 5); vector.allocateNew(); encoded.allocateNew(); // set some values vector.setSafe(0, zero, 0, zero.length); vector.setSafe(1, one, 0, one.length); vector.setSafe(2, one, 0, one.length); vector.setSafe(3, two, 0, two.length); vector.setSafe(4, zero, 0, zero.length); vector.setValueCount(5); // set some dictionary values dictionaryVector.allocateNew(512, 3); dictionaryVector.setSafe(0, zero, 0, one.length); dictionaryVector.setSafe(1, one, 0, two.length); dictionaryVector.setSafe(2, two, 0, zero.length); dictionaryVector.setValueCount(3); LinearDictionaryEncoder<IntVector, VarBinaryVector> encoder = new LinearDictionaryEncoder<>(dictionaryVector); encoder.encode(vector, encoded); assertEquals(5, encoded.getValueCount()); assertEquals(0, encoded.get(0)); assertEquals(1, encoded.get(1)); assertEquals(1, encoded.get(2)); assertEquals(2, encoded.get(3)); assertEquals(0, encoded.get(4)); // now run through the decoder and verify we get the original back Dictionary dict = new Dictionary(dictionaryVector, new DictionaryEncoding(1L, false, null)); try (VarBinaryVector decoded = (VarBinaryVector) DictionaryEncoder.decode(encoded, dict)) { assertEquals(vector.getClass(), decoded.getClass()); assertEquals(vector.getValueCount(), decoded.getValueCount()); for (int i = 0; i < 5; i++) { Assert.assertTrue(Arrays.equals(vector.getObject(i), decoded.getObject(i))); } } } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes an Elastic IP address. * </p> */ public class Address implements Serializable, Cloneable { /** * <p> * The ID of the instance that the address is associated with (if any). * </p> */ private String instanceId; /** * <p> * The Elastic IP address. * </p> */ private String publicIp; /** * <p> * The ID representing the allocation of the address for use with EC2-VPC. * </p> */ private String allocationId; /** * <p> * The ID representing the association of the address with an instance in a * VPC. * </p> */ private String associationId; /** * <p> * Indicates whether this Elastic IP address is for use with instances in * EC2-Classic (<code>standard</code>) or instances in a VPC ( * <code>vpc</code>). * </p> */ private String domain; /** * <p> * The ID of the network interface. * </p> */ private String networkInterfaceId; /** * <p> * The ID of the AWS account that owns the network interface. * </p> */ private String networkInterfaceOwnerId; /** * <p> * The private IP address associated with the Elastic IP address. * </p> */ private String privateIpAddress; /** * <p> * The ID of the instance that the address is associated with (if any). * </p> * * @param instanceId * The ID of the instance that the address is associated with (if * any). */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The ID of the instance that the address is associated with (if any). * </p> * * @return The ID of the instance that the address is associated with (if * any). */ public String getInstanceId() { return this.instanceId; } /** * <p> * The ID of the instance that the address is associated with (if any). * </p> * * @param instanceId * The ID of the instance that the address is associated with (if * any). * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * <p> * The Elastic IP address. * </p> * * @param publicIp * The Elastic IP address. */ public void setPublicIp(String publicIp) { this.publicIp = publicIp; } /** * <p> * The Elastic IP address. * </p> * * @return The Elastic IP address. */ public String getPublicIp() { return this.publicIp; } /** * <p> * The Elastic IP address. * </p> * * @param publicIp * The Elastic IP address. * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withPublicIp(String publicIp) { setPublicIp(publicIp); return this; } /** * <p> * The ID representing the allocation of the address for use with EC2-VPC. * </p> * * @param allocationId * The ID representing the allocation of the address for use with * EC2-VPC. */ public void setAllocationId(String allocationId) { this.allocationId = allocationId; } /** * <p> * The ID representing the allocation of the address for use with EC2-VPC. * </p> * * @return The ID representing the allocation of the address for use with * EC2-VPC. */ public String getAllocationId() { return this.allocationId; } /** * <p> * The ID representing the allocation of the address for use with EC2-VPC. * </p> * * @param allocationId * The ID representing the allocation of the address for use with * EC2-VPC. * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withAllocationId(String allocationId) { setAllocationId(allocationId); return this; } /** * <p> * The ID representing the association of the address with an instance in a * VPC. * </p> * * @param associationId * The ID representing the association of the address with an * instance in a VPC. */ public void setAssociationId(String associationId) { this.associationId = associationId; } /** * <p> * The ID representing the association of the address with an instance in a * VPC. * </p> * * @return The ID representing the association of the address with an * instance in a VPC. */ public String getAssociationId() { return this.associationId; } /** * <p> * The ID representing the association of the address with an instance in a * VPC. * </p> * * @param associationId * The ID representing the association of the address with an * instance in a VPC. * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withAssociationId(String associationId) { setAssociationId(associationId); return this; } /** * <p> * Indicates whether this Elastic IP address is for use with instances in * EC2-Classic (<code>standard</code>) or instances in a VPC ( * <code>vpc</code>). * </p> * * @param domain * Indicates whether this Elastic IP address is for use with * instances in EC2-Classic (<code>standard</code>) or instances in a * VPC (<code>vpc</code>). * @see DomainType */ public void setDomain(String domain) { this.domain = domain; } /** * <p> * Indicates whether this Elastic IP address is for use with instances in * EC2-Classic (<code>standard</code>) or instances in a VPC ( * <code>vpc</code>). * </p> * * @return Indicates whether this Elastic IP address is for use with * instances in EC2-Classic (<code>standard</code>) or instances in * a VPC (<code>vpc</code>). * @see DomainType */ public String getDomain() { return this.domain; } /** * <p> * Indicates whether this Elastic IP address is for use with instances in * EC2-Classic (<code>standard</code>) or instances in a VPC ( * <code>vpc</code>). * </p> * * @param domain * Indicates whether this Elastic IP address is for use with * instances in EC2-Classic (<code>standard</code>) or instances in a * VPC (<code>vpc</code>). * @return Returns a reference to this object so that method calls can be * chained together. * @see DomainType */ public Address withDomain(String domain) { setDomain(domain); return this; } /** * <p> * Indicates whether this Elastic IP address is for use with instances in * EC2-Classic (<code>standard</code>) or instances in a VPC ( * <code>vpc</code>). * </p> * * @param domain * Indicates whether this Elastic IP address is for use with * instances in EC2-Classic (<code>standard</code>) or instances in a * VPC (<code>vpc</code>). * @return Returns a reference to this object so that method calls can be * chained together. * @see DomainType */ public void setDomain(DomainType domain) { this.domain = domain.toString(); } /** * <p> * Indicates whether this Elastic IP address is for use with instances in * EC2-Classic (<code>standard</code>) or instances in a VPC ( * <code>vpc</code>). * </p> * * @param domain * Indicates whether this Elastic IP address is for use with * instances in EC2-Classic (<code>standard</code>) or instances in a * VPC (<code>vpc</code>). * @return Returns a reference to this object so that method calls can be * chained together. * @see DomainType */ public Address withDomain(DomainType domain) { setDomain(domain); return this; } /** * <p> * The ID of the network interface. * </p> * * @param networkInterfaceId * The ID of the network interface. */ public void setNetworkInterfaceId(String networkInterfaceId) { this.networkInterfaceId = networkInterfaceId; } /** * <p> * The ID of the network interface. * </p> * * @return The ID of the network interface. */ public String getNetworkInterfaceId() { return this.networkInterfaceId; } /** * <p> * The ID of the network interface. * </p> * * @param networkInterfaceId * The ID of the network interface. * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withNetworkInterfaceId(String networkInterfaceId) { setNetworkInterfaceId(networkInterfaceId); return this; } /** * <p> * The ID of the AWS account that owns the network interface. * </p> * * @param networkInterfaceOwnerId * The ID of the AWS account that owns the network interface. */ public void setNetworkInterfaceOwnerId(String networkInterfaceOwnerId) { this.networkInterfaceOwnerId = networkInterfaceOwnerId; } /** * <p> * The ID of the AWS account that owns the network interface. * </p> * * @return The ID of the AWS account that owns the network interface. */ public String getNetworkInterfaceOwnerId() { return this.networkInterfaceOwnerId; } /** * <p> * The ID of the AWS account that owns the network interface. * </p> * * @param networkInterfaceOwnerId * The ID of the AWS account that owns the network interface. * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withNetworkInterfaceOwnerId(String networkInterfaceOwnerId) { setNetworkInterfaceOwnerId(networkInterfaceOwnerId); return this; } /** * <p> * The private IP address associated with the Elastic IP address. * </p> * * @param privateIpAddress * The private IP address associated with the Elastic IP address. */ public void setPrivateIpAddress(String privateIpAddress) { this.privateIpAddress = privateIpAddress; } /** * <p> * The private IP address associated with the Elastic IP address. * </p> * * @return The private IP address associated with the Elastic IP address. */ public String getPrivateIpAddress() { return this.privateIpAddress; } /** * <p> * The private IP address associated with the Elastic IP address. * </p> * * @param privateIpAddress * The private IP address associated with the Elastic IP address. * @return Returns a reference to this object so that method calls can be * chained together. */ public Address withPrivateIpAddress(String privateIpAddress) { setPrivateIpAddress(privateIpAddress); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceId() != null) sb.append("InstanceId: " + getInstanceId() + ","); if (getPublicIp() != null) sb.append("PublicIp: " + getPublicIp() + ","); if (getAllocationId() != null) sb.append("AllocationId: " + getAllocationId() + ","); if (getAssociationId() != null) sb.append("AssociationId: " + getAssociationId() + ","); if (getDomain() != null) sb.append("Domain: " + getDomain() + ","); if (getNetworkInterfaceId() != null) sb.append("NetworkInterfaceId: " + getNetworkInterfaceId() + ","); if (getNetworkInterfaceOwnerId() != null) sb.append("NetworkInterfaceOwnerId: " + getNetworkInterfaceOwnerId() + ","); if (getPrivateIpAddress() != null) sb.append("PrivateIpAddress: " + getPrivateIpAddress()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Address == false) return false; Address other = (Address) obj; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getPublicIp() == null ^ this.getPublicIp() == null) return false; if (other.getPublicIp() != null && other.getPublicIp().equals(this.getPublicIp()) == false) return false; if (other.getAllocationId() == null ^ this.getAllocationId() == null) return false; if (other.getAllocationId() != null && other.getAllocationId().equals(this.getAllocationId()) == false) return false; if (other.getAssociationId() == null ^ this.getAssociationId() == null) return false; if (other.getAssociationId() != null && other.getAssociationId().equals(this.getAssociationId()) == false) return false; if (other.getDomain() == null ^ this.getDomain() == null) return false; if (other.getDomain() != null && other.getDomain().equals(this.getDomain()) == false) return false; if (other.getNetworkInterfaceId() == null ^ this.getNetworkInterfaceId() == null) return false; if (other.getNetworkInterfaceId() != null && other.getNetworkInterfaceId().equals( this.getNetworkInterfaceId()) == false) return false; if (other.getNetworkInterfaceOwnerId() == null ^ this.getNetworkInterfaceOwnerId() == null) return false; if (other.getNetworkInterfaceOwnerId() != null && other.getNetworkInterfaceOwnerId().equals( this.getNetworkInterfaceOwnerId()) == false) return false; if (other.getPrivateIpAddress() == null ^ this.getPrivateIpAddress() == null) return false; if (other.getPrivateIpAddress() != null && other.getPrivateIpAddress().equals( this.getPrivateIpAddress()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getPublicIp() == null) ? 0 : getPublicIp().hashCode()); hashCode = prime * hashCode + ((getAllocationId() == null) ? 0 : getAllocationId() .hashCode()); hashCode = prime * hashCode + ((getAssociationId() == null) ? 0 : getAssociationId() .hashCode()); hashCode = prime * hashCode + ((getDomain() == null) ? 0 : getDomain().hashCode()); hashCode = prime * hashCode + ((getNetworkInterfaceId() == null) ? 0 : getNetworkInterfaceId().hashCode()); hashCode = prime * hashCode + ((getNetworkInterfaceOwnerId() == null) ? 0 : getNetworkInterfaceOwnerId().hashCode()); hashCode = prime * hashCode + ((getPrivateIpAddress() == null) ? 0 : getPrivateIpAddress() .hashCode()); return hashCode; } @Override public Address clone() { try { return (Address) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package jp.wasabeef.recyclerview.animators; /* * Copyright (C) 2015 Wasabeef * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import android.support.v4.view.ViewCompat; import android.support.v4.view.ViewPropertyAnimatorCompat; import android.support.v4.view.ViewPropertyAnimatorListener; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.RecyclerView.ViewHolder; import android.view.View; import java.util.ArrayList; import java.util.List; import jp.wasabeef.recyclerview.animators.internal.ViewHelper; public abstract class BaseItemAnimator extends RecyclerView.ItemAnimator { private static final boolean DEBUG = false; private ArrayList<ViewHolder> mPendingRemovals = new ArrayList<>(); private ArrayList<ViewHolder> mPendingAdditions = new ArrayList<>(); private ArrayList<MoveInfo> mPendingMoves = new ArrayList<>(); private ArrayList<ChangeInfo> mPendingChanges = new ArrayList<>(); private ArrayList<ArrayList<ViewHolder>> mAdditionsList = new ArrayList<>(); private ArrayList<ArrayList<MoveInfo>> mMovesList = new ArrayList<>(); private ArrayList<ArrayList<ChangeInfo>> mChangesList = new ArrayList<>(); protected ArrayList<ViewHolder> mAddAnimations = new ArrayList<>(); private ArrayList<ViewHolder> mMoveAnimations = new ArrayList<>(); protected ArrayList<ViewHolder> mRemoveAnimations = new ArrayList<>(); private ArrayList<ViewHolder> mChangeAnimations = new ArrayList<>(); private static class MoveInfo { public ViewHolder holder; public int fromX, fromY, toX, toY; private MoveInfo(ViewHolder holder, int fromX, int fromY, int toX, int toY) { this.holder = holder; this.fromX = fromX; this.fromY = fromY; this.toX = toX; this.toY = toY; } } private static class ChangeInfo { public ViewHolder oldHolder, newHolder; public int fromX, fromY, toX, toY; private ChangeInfo(ViewHolder oldHolder, ViewHolder newHolder) { this.oldHolder = oldHolder; this.newHolder = newHolder; } private ChangeInfo(ViewHolder oldHolder, ViewHolder newHolder, int fromX, int fromY, int toX, int toY) { this(oldHolder, newHolder); this.fromX = fromX; this.fromY = fromY; this.toX = toX; this.toY = toY; } @Override public String toString() { return "ChangeInfo{" + "oldHolder=" + oldHolder + ", newHolder=" + newHolder + ", fromX=" + fromX + ", fromY=" + fromY + ", toX=" + toX + ", toY=" + toY + '}'; } } @Override public void runPendingAnimations() { boolean removalsPending = !mPendingRemovals.isEmpty(); boolean movesPending = !mPendingMoves.isEmpty(); boolean changesPending = !mPendingChanges.isEmpty(); boolean additionsPending = !mPendingAdditions.isEmpty(); if (!removalsPending && !movesPending && !additionsPending && !changesPending) { // nothing to animate return; } // First, remove stuff for (ViewHolder holder : mPendingRemovals) { animateRemoveImpl(holder); } mPendingRemovals.clear(); // Next, move stuff if (movesPending) { final ArrayList<MoveInfo> moves = new ArrayList<MoveInfo>(); moves.addAll(mPendingMoves); mMovesList.add(moves); mPendingMoves.clear(); Runnable mover = new Runnable() { @Override public void run() { for (MoveInfo moveInfo : moves) { animateMoveImpl(moveInfo.holder, moveInfo.fromX, moveInfo.fromY, moveInfo.toX, moveInfo.toY); } moves.clear(); mMovesList.remove(moves); } }; if (removalsPending) { View view = moves.get(0).holder.itemView; ViewCompat.postOnAnimationDelayed(view, mover, getRemoveDuration()); } else { mover.run(); } } // Next, change stuff, to run in parallel with move animations if (changesPending) { final ArrayList<ChangeInfo> changes = new ArrayList<ChangeInfo>(); changes.addAll(mPendingChanges); mChangesList.add(changes); mPendingChanges.clear(); Runnable changer = new Runnable() { @Override public void run() { for (ChangeInfo change : changes) { animateChangeImpl(change); } changes.clear(); mChangesList.remove(changes); } }; if (removalsPending) { ViewHolder holder = changes.get(0).oldHolder; ViewCompat.postOnAnimationDelayed(holder.itemView, changer, getRemoveDuration()); } else { changer.run(); } } // Next, add stuff if (additionsPending) { final ArrayList<ViewHolder> additions = new ArrayList<ViewHolder>(); additions.addAll(mPendingAdditions); mAdditionsList.add(additions); mPendingAdditions.clear(); Runnable adder = new Runnable() { public void run() { for (ViewHolder holder : additions) { animateAddImpl(holder); } additions.clear(); mAdditionsList.remove(additions); } }; if (removalsPending || movesPending || changesPending) { long removeDuration = removalsPending ? getRemoveDuration() : 0; long moveDuration = movesPending ? getMoveDuration() : 0; long changeDuration = changesPending ? getChangeDuration() : 0; long totalDelay = removeDuration + Math.max(moveDuration, changeDuration); View view = additions.get(0).itemView; ViewCompat.postOnAnimationDelayed(view, adder, totalDelay); } else { adder.run(); } } } protected abstract void animateRemoveImpl(final RecyclerView.ViewHolder holder); protected abstract void animateAddImpl(final RecyclerView.ViewHolder holder); protected void preAnimateRemove(final RecyclerView.ViewHolder holder) { ViewHelper.clear(holder.itemView); } protected void preAnimateAdd(final RecyclerView.ViewHolder holder) { ViewHelper.clear(holder.itemView); } @Override public boolean animateRemove(final ViewHolder holder) { endAnimation(holder); preAnimateRemove(holder); mPendingRemovals.add(holder); return true; } @Override public boolean animateAdd(final ViewHolder holder) { endAnimation(holder); preAnimateAdd(holder); mPendingAdditions.add(holder); return true; } @Override public boolean animateMove(final ViewHolder holder, int fromX, int fromY, int toX, int toY) { final View view = holder.itemView; fromX += ViewCompat.getTranslationX(holder.itemView); fromY += ViewCompat.getTranslationY(holder.itemView); endAnimation(holder); int deltaX = toX - fromX; int deltaY = toY - fromY; if (deltaX == 0 && deltaY == 0) { dispatchMoveFinished(holder); return false; } if (deltaX != 0) { ViewCompat.setTranslationX(view, -deltaX); } if (deltaY != 0) { ViewCompat.setTranslationY(view, -deltaY); } mPendingMoves.add(new MoveInfo(holder, fromX, fromY, toX, toY)); return true; } private void animateMoveImpl(final ViewHolder holder, int fromX, int fromY, int toX, int toY) { final View view = holder.itemView; final int deltaX = toX - fromX; final int deltaY = toY - fromY; if (deltaX != 0) { ViewCompat.animate(view).translationX(0); } if (deltaY != 0) { ViewCompat.animate(view).translationY(0); } // TODO: make EndActions end listeners instead, since end actions aren't called when // vpas are canceled (and can't end them. why?) // need listener functionality in VPACompat for this. Ick. mMoveAnimations.add(holder); final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view); animation.setDuration(getMoveDuration()).setListener(new VpaListenerAdapter() { @Override public void onAnimationStart(View view) { dispatchMoveStarting(holder); } @Override public void onAnimationCancel(View view) { if (deltaX != 0) { ViewCompat.setTranslationX(view, 0); } if (deltaY != 0) { ViewCompat.setTranslationY(view, 0); } } @Override public void onAnimationEnd(View view) { animation.setListener(null); dispatchMoveFinished(holder); mMoveAnimations.remove(holder); dispatchFinishedWhenDone(); } }).start(); } @Override public boolean animateChange(ViewHolder oldHolder, ViewHolder newHolder, int fromX, int fromY, int toX, int toY) { final float prevTranslationX = ViewCompat.getTranslationX(oldHolder.itemView); final float prevTranslationY = ViewCompat.getTranslationY(oldHolder.itemView); final float prevAlpha = ViewCompat.getAlpha(oldHolder.itemView); endAnimation(oldHolder); int deltaX = (int) (toX - fromX - prevTranslationX); int deltaY = (int) (toY - fromY - prevTranslationY); // recover prev translation state after ending animation ViewCompat.setTranslationX(oldHolder.itemView, prevTranslationX); ViewCompat.setTranslationY(oldHolder.itemView, prevTranslationY); ViewCompat.setAlpha(oldHolder.itemView, prevAlpha); if (newHolder != null && newHolder.itemView != null) { // carry over translation values endAnimation(newHolder); ViewCompat.setTranslationX(newHolder.itemView, -deltaX); ViewCompat.setTranslationY(newHolder.itemView, -deltaY); ViewCompat.setAlpha(newHolder.itemView, 0); } mPendingChanges.add(new ChangeInfo(oldHolder, newHolder, fromX, fromY, toX, toY)); return true; } private void animateChangeImpl(final ChangeInfo changeInfo) { final ViewHolder holder = changeInfo.oldHolder; final View view = holder == null ? null : holder.itemView; final ViewHolder newHolder = changeInfo.newHolder; final View newView = newHolder != null ? newHolder.itemView : null; if (view != null) { mChangeAnimations.add(changeInfo.oldHolder); final ViewPropertyAnimatorCompat oldViewAnim = ViewCompat.animate(view).setDuration( getChangeDuration()); oldViewAnim.translationX(changeInfo.toX - changeInfo.fromX); oldViewAnim.translationY(changeInfo.toY - changeInfo.fromY); oldViewAnim.alpha(0).setListener(new VpaListenerAdapter() { @Override public void onAnimationStart(View view) { dispatchChangeStarting(changeInfo.oldHolder, true); } @Override public void onAnimationEnd(View view) { oldViewAnim.setListener(null); ViewCompat.setAlpha(view, 1); ViewCompat.setTranslationX(view, 0); ViewCompat.setTranslationY(view, 0); dispatchChangeFinished(changeInfo.oldHolder, true); mChangeAnimations.remove(changeInfo.oldHolder); dispatchFinishedWhenDone(); } }).start(); } if (newView != null) { mChangeAnimations.add(changeInfo.newHolder); final ViewPropertyAnimatorCompat newViewAnimation = ViewCompat.animate(newView); newViewAnimation.translationX(0).translationY(0).setDuration(getChangeDuration()). alpha(1).setListener(new VpaListenerAdapter() { @Override public void onAnimationStart(View view) { dispatchChangeStarting(changeInfo.newHolder, false); } @Override public void onAnimationEnd(View view) { newViewAnimation.setListener(null); ViewCompat.setAlpha(newView, 1); ViewCompat.setTranslationX(newView, 0); ViewCompat.setTranslationY(newView, 0); dispatchChangeFinished(changeInfo.newHolder, false); mChangeAnimations.remove(changeInfo.newHolder); dispatchFinishedWhenDone(); } }).start(); } } private void endChangeAnimation(List<ChangeInfo> infoList, ViewHolder item) { for (int i = infoList.size() - 1; i >= 0; i--) { ChangeInfo changeInfo = infoList.get(i); if (endChangeAnimationIfNecessary(changeInfo, item)) { if (changeInfo.oldHolder == null && changeInfo.newHolder == null) { infoList.remove(changeInfo); } } } } private void endChangeAnimationIfNecessary(ChangeInfo changeInfo) { if (changeInfo.oldHolder != null) { endChangeAnimationIfNecessary(changeInfo, changeInfo.oldHolder); } if (changeInfo.newHolder != null) { endChangeAnimationIfNecessary(changeInfo, changeInfo.newHolder); } } private boolean endChangeAnimationIfNecessary(ChangeInfo changeInfo, ViewHolder item) { boolean oldItem = false; if (changeInfo.newHolder == item) { changeInfo.newHolder = null; } else if (changeInfo.oldHolder == item) { changeInfo.oldHolder = null; oldItem = true; } else { return false; } ViewCompat.setAlpha(item.itemView, 1); ViewCompat.setTranslationX(item.itemView, 0); ViewCompat.setTranslationY(item.itemView, 0); dispatchChangeFinished(item, oldItem); return true; } @Override public void endAnimation(ViewHolder item) { final View view = item.itemView; // this will trigger end callback which should set properties to their target values. ViewCompat.animate(view).cancel(); // TODO if some other animations are chained to end, how do we cancel them as well? for (int i = mPendingMoves.size() - 1; i >= 0; i--) { MoveInfo moveInfo = mPendingMoves.get(i); if (moveInfo.holder == item) { ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(item); mPendingMoves.remove(i); } } endChangeAnimation(mPendingChanges, item); if (mPendingRemovals.remove(item)) { ViewHelper.clear(item.itemView); dispatchRemoveFinished(item); } if (mPendingAdditions.remove(item)) { ViewHelper.clear(item.itemView); dispatchAddFinished(item); } for (int i = mChangesList.size() - 1; i >= 0; i--) { ArrayList<ChangeInfo> changes = mChangesList.get(i); endChangeAnimation(changes, item); if (changes.isEmpty()) { mChangesList.remove(i); } } for (int i = mMovesList.size() - 1; i >= 0; i--) { ArrayList<MoveInfo> moves = mMovesList.get(i); for (int j = moves.size() - 1; j >= 0; j--) { MoveInfo moveInfo = moves.get(j); if (moveInfo.holder == item) { ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(item); moves.remove(j); if (moves.isEmpty()) { mMovesList.remove(i); } break; } } } for (int i = mAdditionsList.size() - 1; i >= 0; i--) { ArrayList<ViewHolder> additions = mAdditionsList.get(i); if (additions.remove(item)) { ViewHelper.clear(item.itemView); dispatchAddFinished(item); if (additions.isEmpty()) { mAdditionsList.remove(i); } } } // animations should be ended by the cancel above. if (mRemoveAnimations.remove(item) && DEBUG) { throw new IllegalStateException("after animation is cancelled, item should not be in " + "mRemoveAnimations list"); } if (mAddAnimations.remove(item) && DEBUG) { throw new IllegalStateException("after animation is cancelled, item should not be in " + "mAddAnimations list"); } if (mChangeAnimations.remove(item) && DEBUG) { throw new IllegalStateException("after animation is cancelled, item should not be in " + "mChangeAnimations list"); } if (mMoveAnimations.remove(item) && DEBUG) { throw new IllegalStateException("after animation is cancelled, item should not be in " + "mMoveAnimations list"); } dispatchFinishedWhenDone(); } @Override public boolean isRunning() { return (!mPendingAdditions.isEmpty() || !mPendingChanges.isEmpty() || !mPendingMoves.isEmpty() || !mPendingRemovals.isEmpty() || !mMoveAnimations.isEmpty() || !mRemoveAnimations.isEmpty() || !mAddAnimations.isEmpty() || !mChangeAnimations.isEmpty() || !mMovesList.isEmpty() || !mAdditionsList.isEmpty() || !mChangesList.isEmpty()); } /** * Check the state of currently pending and running animations. If there are none * pending/running, call #dispatchAnimationsFinished() to notify any * listeners. */ private void dispatchFinishedWhenDone() { if (!isRunning()) { dispatchAnimationsFinished(); } } @Override public void endAnimations() { int count = mPendingMoves.size(); for (int i = count - 1; i >= 0; i--) { MoveInfo item = mPendingMoves.get(i); View view = item.holder.itemView; ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(item.holder); mPendingMoves.remove(i); } count = mPendingRemovals.size(); for (int i = count - 1; i >= 0; i--) { ViewHolder item = mPendingRemovals.get(i); dispatchRemoveFinished(item); mPendingRemovals.remove(i); } count = mPendingAdditions.size(); for (int i = count - 1; i >= 0; i--) { ViewHolder item = mPendingAdditions.get(i); ViewHelper.clear(item.itemView); dispatchAddFinished(item); mPendingAdditions.remove(i); } count = mPendingChanges.size(); for (int i = count - 1; i >= 0; i--) { endChangeAnimationIfNecessary(mPendingChanges.get(i)); } mPendingChanges.clear(); if (!isRunning()) { return; } int listCount = mMovesList.size(); for (int i = listCount - 1; i >= 0; i--) { ArrayList<MoveInfo> moves = mMovesList.get(i); count = moves.size(); for (int j = count - 1; j >= 0; j--) { MoveInfo moveInfo = moves.get(j); ViewHolder item = moveInfo.holder; View view = item.itemView; ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(moveInfo.holder); moves.remove(j); if (moves.isEmpty()) { mMovesList.remove(moves); } } } listCount = mAdditionsList.size(); for (int i = listCount - 1; i >= 0; i--) { ArrayList<ViewHolder> additions = mAdditionsList.get(i); count = additions.size(); for (int j = count - 1; j >= 0; j--) { ViewHolder item = additions.get(j); View view = item.itemView; ViewCompat.setAlpha(view, 1); dispatchAddFinished(item); additions.remove(j); if (additions.isEmpty()) { mAdditionsList.remove(additions); } } } listCount = mChangesList.size(); for (int i = listCount - 1; i >= 0; i--) { ArrayList<ChangeInfo> changes = mChangesList.get(i); count = changes.size(); for (int j = count - 1; j >= 0; j--) { endChangeAnimationIfNecessary(changes.get(j)); if (changes.isEmpty()) { mChangesList.remove(changes); } } } cancelAll(mRemoveAnimations); cancelAll(mMoveAnimations); cancelAll(mAddAnimations); cancelAll(mChangeAnimations); dispatchAnimationsFinished(); } void cancelAll(List<ViewHolder> viewHolders) { for (int i = viewHolders.size() - 1; i >= 0; i--) { ViewCompat.animate(viewHolders.get(i).itemView).cancel(); } } private static class VpaListenerAdapter implements ViewPropertyAnimatorListener { @Override public void onAnimationStart(View view) { } @Override public void onAnimationEnd(View view) { } @Override public void onAnimationCancel(View view) { } } protected class DefaultAddVpaListener extends VpaListenerAdapter { RecyclerView.ViewHolder mViewHolder; public DefaultAddVpaListener(final RecyclerView.ViewHolder holder) { mViewHolder = holder; } @Override public void onAnimationStart(View view) { dispatchAddStarting(mViewHolder); } @Override public void onAnimationCancel(View view) { ViewHelper.clear(view); } @Override public void onAnimationEnd(View view) { ViewHelper.clear(view); dispatchAddFinished(mViewHolder); mAddAnimations.remove(mViewHolder); dispatchFinishedWhenDone(); } } protected class DefaultRemoveVpaListener extends VpaListenerAdapter { RecyclerView.ViewHolder mViewHolder; public DefaultRemoveVpaListener(final RecyclerView.ViewHolder holder) { mViewHolder = holder; } @Override public void onAnimationStart(View view) { dispatchRemoveStarting(mViewHolder); } @Override public void onAnimationCancel(View view) { ViewHelper.clear(view); } @Override public void onAnimationEnd(View view) { ViewHelper.clear(view); dispatchRemoveFinished(mViewHolder); mRemoveAnimations.remove(mViewHolder); dispatchFinishedWhenDone(); } } }
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.engine.paths; import com.google.common.collect.ImmutableList; import org.lwjgl.LWJGLUtil; import org.terasology.engine.paths.windows.SavedGamesPathFinder; import javax.swing.*; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; /** * @author Immortius */ public final class PathManager { public static final String TERASOLOGY_FOLDER_NAME = "Terasology"; public static final Path LINUX_HOME_SUBPATH = Paths.get(".local", "share", "terasology"); private static final String SAVED_GAMES_DIR = "saves"; private static final String LOG_DIR = "logs"; private static final String SHADER_LOG_DIR = "shaders"; private static final String MOD_DIR = "modules"; private static final String SCREENSHOT_DIR = "screenshots"; private static final String NATIVES_DIR = "natives"; private static PathManager instance; private Path installPath; private Path homePath; private Path savesPath; private Path logPath; private Path shaderLogPath; private Path currentWorldPath; private ImmutableList<Path> modPaths = ImmutableList.of(); private Path screenshotPath; private Path nativesPath; private PathManager() { // By default, the path should be the code location (where terasology.jar is) try { URL urlToSource = PathManager.class.getProtectionDomain().getCodeSource().getLocation(); Path codeLocation = Paths.get(urlToSource.toURI()); System.out.println("codeLocation: " + codeLocation); if (Files.isRegularFile(codeLocation)) { installPath = findNativesHome(codeLocation.getParent(), 5); if (installPath == null) { System.out.println("Failed to find the natives dir - unable to launch!"); throw new RuntimeException("Failed to find natives from .jar launch"); } } } catch (URISyntaxException e) { // Can't use logger, because logger not set up when PathManager is used. System.out.println("Failed to convert code location to uri"); } // We might be running from an IDE which can cause the installPath to be null. Try current working directory. if (installPath == null) { installPath = Paths.get("").toAbsolutePath(); System.out.println("installPath was null, running from IDE or headless server? Setting to: " + installPath); installPath = findNativesHome(installPath, 5); if (installPath == null) { System.out.println("Failed to find the natives dir - unable to launch!"); throw new RuntimeException("Failed to find natives from likely IDE launch"); } } homePath = installPath; } /** * Searches for a parent directory containing the natives directory * * @param startPath path to start from * @param maxDepth max directory levels to search * @return the adjusted path containing the natives directory or null if not found */ private Path findNativesHome(Path startPath, int maxDepth) { int levelsToSearch = maxDepth; Path checkedPath = startPath; while (levelsToSearch > 0) { File dirToTest = new File(checkedPath.toFile(), NATIVES_DIR); if (dirToTest.exists()) { System.out.println("Found the natives dir: " + dirToTest); return checkedPath; } checkedPath = checkedPath.getParent(); if (checkedPath.equals(startPath.getRoot())) { System.out.println("Uh oh, reached the root path, giving up"); return null; } levelsToSearch--; } System.out.println("Failed to find the natives dir within " + maxDepth + " levels of " + startPath); return null; } public static PathManager getInstance() { if (instance == null) { instance = new PathManager(); } return instance; } public void useOverrideHomePath(Path rootPath) throws IOException { this.homePath = rootPath; updateDirs(); } public void useDefaultHomePath() throws IOException { switch (LWJGLUtil.getPlatform()) { case LWJGLUtil.PLATFORM_LINUX: homePath = Paths.get(System.getProperty("user.home")).resolve(LINUX_HOME_SUBPATH); break; case LWJGLUtil.PLATFORM_MACOSX: homePath = Paths.get(System.getProperty("user.home"), "Library", "Application Support", TERASOLOGY_FOLDER_NAME); break; case LWJGLUtil.PLATFORM_WINDOWS: String savedGamesPath = SavedGamesPathFinder.findSavedGamesPath(); if (savedGamesPath == null) { savedGamesPath = SavedGamesPathFinder.findDocumentsPath(); } Path rawPath; if (savedGamesPath != null) { rawPath = Paths.get(savedGamesPath); } else { rawPath = new JFileChooser().getFileSystemView().getDefaultDirectory().toPath(); } homePath = rawPath.resolve(TERASOLOGY_FOLDER_NAME); break; default: homePath = Paths.get(System.getProperty("user.home")).resolve(LINUX_HOME_SUBPATH); break; } updateDirs(); } public Path getHomePath() { return homePath; } public Path getInstallPath() { return installPath; } public Path getSavesPath() { return savesPath; } public Path getLogPath() { return logPath; } public Path getShaderLogPath() { return shaderLogPath; } public List<Path> getModulePaths() { return modPaths; } public Path getScreenshotPath() { return screenshotPath; } public Path getNativesPath() { return nativesPath; } private void updateDirs() throws IOException { Files.createDirectories(homePath); savesPath = homePath.resolve(SAVED_GAMES_DIR); Files.createDirectories(savesPath); logPath = homePath.resolve(LOG_DIR); Files.createDirectories(logPath); shaderLogPath = logPath.resolve(SHADER_LOG_DIR); Files.createDirectories(shaderLogPath); Path homeModPath = homePath.resolve(MOD_DIR); Files.createDirectories(homeModPath); Path installModPath = installPath.resolve(MOD_DIR); Files.createDirectories(installModPath); if (Files.isSameFile(homeModPath, installModPath)) { modPaths = ImmutableList.of(homeModPath); } else { modPaths = ImmutableList.of(installModPath, homeModPath); } screenshotPath = homePath.resolve(SCREENSHOT_DIR); Files.createDirectories(screenshotPath); nativesPath = installPath.resolve(NATIVES_DIR); if (currentWorldPath == null) { currentWorldPath = homePath; } } public Path getHomeModPath() { return modPaths.get(0); } public Path getSavePath(String title) { return savesPath.resolve(title.replaceAll("[^A-Za-z0-9-_ ]", "")); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive.coercions; import io.airlift.slice.Slice; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.DoubleType; import io.prestosql.spi.type.RealType; import java.util.function.Function; import static io.prestosql.spi.type.DecimalConversions.doubleToLongDecimal; import static io.prestosql.spi.type.DecimalConversions.doubleToShortDecimal; import static io.prestosql.spi.type.DecimalConversions.longDecimalToDouble; import static io.prestosql.spi.type.DecimalConversions.longDecimalToReal; import static io.prestosql.spi.type.DecimalConversions.longToLongCast; import static io.prestosql.spi.type.DecimalConversions.longToShortCast; import static io.prestosql.spi.type.DecimalConversions.realToLongDecimal; import static io.prestosql.spi.type.DecimalConversions.realToShortDecimal; import static io.prestosql.spi.type.DecimalConversions.shortDecimalToDouble; import static io.prestosql.spi.type.DecimalConversions.shortDecimalToReal; import static io.prestosql.spi.type.DecimalConversions.shortToLongCast; import static io.prestosql.spi.type.DecimalConversions.shortToShortCast; import static io.prestosql.spi.type.Decimals.longTenToNth; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.RealType.REAL; public final class DecimalCoercers { private DecimalCoercers() {} public static Function<Block, Block> createDecimalToDecimalCoercer(DecimalType fromType, DecimalType toType) { if (fromType.isShort()) { if (toType.isShort()) { return new ShortDecimalToShortDecimalCoercer(fromType, toType); } else { return new ShortDecimalToLongDecimalCoercer(fromType, toType); } } else { if (toType.isShort()) { return new LongDecimalToShortDecimalCoercer(fromType, toType); } else { return new LongDecimalToLongDecimalCoercer(fromType, toType); } } } private static class ShortDecimalToShortDecimalCoercer extends TypeCoercer<DecimalType, DecimalType> { private final long rescale; public ShortDecimalToShortDecimalCoercer(DecimalType fromType, DecimalType toType) { super(fromType, toType); rescale = longTenToNth(Math.abs(toType.getScale() - fromType.getScale())); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { long returnValue = shortToShortCast(fromType.getLong(block, position), fromType.getPrecision(), fromType.getScale(), toType.getPrecision(), toType.getScale(), rescale, rescale / 2); toType.writeLong(blockBuilder, returnValue); } } private static class ShortDecimalToLongDecimalCoercer extends TypeCoercer<DecimalType, DecimalType> { public ShortDecimalToLongDecimalCoercer(DecimalType fromType, DecimalType toType) { super(fromType, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { Slice coercedValue = shortToLongCast(fromType.getLong(block, position), fromType.getPrecision(), fromType.getScale(), toType.getPrecision(), toType.getScale()); toType.writeSlice(blockBuilder, coercedValue); } } private static class LongDecimalToShortDecimalCoercer extends TypeCoercer<DecimalType, DecimalType> { public LongDecimalToShortDecimalCoercer(DecimalType fromType, DecimalType toType) { super(fromType, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { long returnValue = longToShortCast(fromType.getSlice(block, position), fromType.getPrecision(), fromType.getScale(), toType.getPrecision(), toType.getScale()); toType.writeLong(blockBuilder, returnValue); } } private static class LongDecimalToLongDecimalCoercer extends TypeCoercer<DecimalType, DecimalType> { public LongDecimalToLongDecimalCoercer(DecimalType fromType, DecimalType toType) { super(fromType, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { Slice coercedValue = longToLongCast(fromType.getSlice(block, position), fromType.getPrecision(), fromType.getScale(), toType.getPrecision(), toType.getScale()); toType.writeSlice(blockBuilder, coercedValue); } } public static Function<Block, Block> createDecimalToDoubleCoercer(DecimalType fromType) { if (fromType.isShort()) { return new ShortDecimalToDoubleCoercer(fromType); } else { return new LongDecimalToDoubleCoercer(fromType); } } private static class ShortDecimalToDoubleCoercer extends TypeCoercer<DecimalType, DoubleType> { private final long rescale; public ShortDecimalToDoubleCoercer(DecimalType fromType) { super(fromType, DOUBLE); rescale = longTenToNth(fromType.getScale()); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeDouble(blockBuilder, shortDecimalToDouble(fromType.getLong(block, position), rescale)); } } private static class LongDecimalToDoubleCoercer extends TypeCoercer<DecimalType, DoubleType> { public LongDecimalToDoubleCoercer(DecimalType fromType) { super(fromType, DOUBLE); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeDouble(blockBuilder, longDecimalToDouble(fromType.getSlice(block, position), fromType.getScale())); } } public static Function<Block, Block> createDecimalToRealCoercer(DecimalType fromType) { if (fromType.isShort()) { return new ShortDecimalToRealCoercer(fromType); } else { return new LongDecimalToRealCoercer(fromType); } } private static class ShortDecimalToRealCoercer extends TypeCoercer<DecimalType, RealType> { private final long rescale; public ShortDecimalToRealCoercer(DecimalType fromType) { super(fromType, REAL); rescale = longTenToNth(fromType.getScale()); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeLong(blockBuilder, shortDecimalToReal(fromType.getLong(block, position), rescale)); } } private static class LongDecimalToRealCoercer extends TypeCoercer<DecimalType, RealType> { public LongDecimalToRealCoercer(DecimalType fromType) { super(fromType, REAL); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeLong(blockBuilder, longDecimalToReal(fromType.getSlice(block, position), fromType.getScale())); } } public static Function<Block, Block> createDoubleToDecimalCoercer(DecimalType toType) { if (toType.isShort()) { return new DoubleToShortDecimalCoercer(toType); } else { return new DoubleToLongDecimalCoercer(toType); } } private static class DoubleToShortDecimalCoercer extends TypeCoercer<DoubleType, DecimalType> { public DoubleToShortDecimalCoercer(DecimalType toType) { super(DOUBLE, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeLong(blockBuilder, doubleToShortDecimal(fromType.getDouble(block, position), toType.getPrecision(), toType.getScale())); } } private static class DoubleToLongDecimalCoercer extends TypeCoercer<DoubleType, DecimalType> { public DoubleToLongDecimalCoercer(DecimalType toType) { super(DOUBLE, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeSlice(blockBuilder, doubleToLongDecimal(fromType.getDouble(block, position), toType.getPrecision(), toType.getScale())); } } public static Function<Block, Block> createRealToDecimalCoercer(DecimalType toType) { if (toType.isShort()) { return new RealToShortDecimalCoercer(toType); } else { return new RealToLongDecimalCoercer(toType); } } private static class RealToShortDecimalCoercer extends TypeCoercer<RealType, DecimalType> { public RealToShortDecimalCoercer(DecimalType toType) { super(REAL, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeLong(blockBuilder, realToShortDecimal(fromType.getLong(block, position), toType.getPrecision(), toType.getScale())); } } private static class RealToLongDecimalCoercer extends TypeCoercer<RealType, DecimalType> { public RealToLongDecimalCoercer(DecimalType toType) { super(REAL, toType); } @Override protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) { toType.writeSlice(blockBuilder, realToLongDecimal(fromType.getLong(block, position), toType.getPrecision(), toType.getScale())); } } }
package com.wordpress.tipsforjava.swing.table; import java.awt.*; import java.awt.event.*; import java.util.List; import java.util.*; import javax.swing.*; import javax.swing.table.*; import javax.swing.text.*; /** * The RXTable provides some extensions to the default JTable * * 1) Select All editing - when a text related cell is placed in editing mode * the text is selected. Controlled by invoking a "setSelectAll..." method. * * 2) reorderColumns - static convenience method for reodering table columns * * @author Rob Camick * @author Darryl Burke */ public class RXTable extends JTable { private boolean isSelectAllForMouseEvent = false; private boolean isSelectAllForActionEvent = false; private boolean isSelectAllForKeyEvent = false; // // Constructors // /** * Constructs a default <code>RXTable</code> that is initialized with a default * data model, a default column model, and a default selection * model. */ public RXTable() { this(null, null, null); } /** * Constructs a <code>RXTable</code> that is initialized with * <code>dm</code> as the data model, a default column model, * and a default selection model. * * @param dm the data model for the table */ public RXTable(TableModel dm) { this(dm, null, null); } /** * Constructs a <code>RXTable</code> that is initialized with * <code>dm</code> as the data model, <code>cm</code> * as the column model, and a default selection model. * * @param dm the data model for the table * @param cm the column model for the table */ public RXTable(TableModel dm, TableColumnModel cm) { this(dm, cm, null); } /** * Constructs a <code>RXTable</code> that is initialized with * <code>dm</code> as the data model, <code>cm</code> as the * column model, and <code>sm</code> as the selection model. * If any of the parameters are <code>null</code> this method * will initialize the table with the corresponding default model. * The <code>autoCreateColumnsFromModel</code> flag is set to false * if <code>cm</code> is non-null, otherwise it is set to true * and the column model is populated with suitable * <code>TableColumns</code> for the columns in <code>dm</code>. * * @param dm the data model for the table * @param cm the column model for the table * @param sm the row selection model for the table */ public RXTable(TableModel dm, TableColumnModel cm, ListSelectionModel sm) { super(dm, cm, sm); } /** * Constructs a <code>RXTable</code> with <code>numRows</code> * and <code>numColumns</code> of empty cells using * <code>DefaultTableModel</code>. The columns will have * names of the form "A", "B", "C", etc. * * @param numRows the number of rows the table holds * @param numColumns the number of columns the table holds */ public RXTable(int numRows, int numColumns) { this(new DefaultTableModel(numRows, numColumns)); } /** * Constructs a <code>RXTable</code> to display the values in the * <code>Vector</code> of <code>Vectors</code>, <code>rowData</code>, * with column names, <code>columnNames</code>. The * <code>Vectors</code> contained in <code>rowData</code> * should contain the values for that row. In other words, * the value of the cell at row 1, column 5 can be obtained * with the following code: * <p> * <pre>((Vector)rowData.elementAt(1)).elementAt(5);</pre> * <p> * @param rowData the data for the new table * @param columnNames names of each column */ public RXTable(Vector rowData, Vector columnNames) { this(new DefaultTableModel(rowData, columnNames)); } /** * Constructs a <code>RXTable</code> to display the values in the two dimensional array, * <code>rowData</code>, with column names, <code>columnNames</code>. * <code>rowData</code> is an array of rows, so the value of the cell at row 1, * column 5 can be obtained with the following code: * <p> * <pre> rowData[1][5]; </pre> * <p> * All rows must be of the same length as <code>columnNames</code>. * <p> * @param rowData the data for the new table * @param columnNames names of each column */ public RXTable(final Object[][] rowData, final Object[] columnNames) { super(rowData, columnNames); } // // Overridden methods // /* * Override to provide Select All editing functionality */ public boolean editCellAt(int row, int column, EventObject e) { boolean result = super.editCellAt(row, column, e); if (isSelectAllForMouseEvent || isSelectAllForActionEvent || isSelectAllForKeyEvent) { selectAll(e); } return result; } /* * Select the text when editing on a text related cell is started */ private void selectAll(EventObject e) { final Component editor = getEditorComponent(); if (editor == null || ! (editor instanceof JTextComponent)) return; if (e == null) { ((JTextComponent)editor).selectAll(); return; } // Typing in the cell was used to activate the editor if (e instanceof KeyEvent && isSelectAllForKeyEvent) { ((JTextComponent)editor).selectAll(); return; } // F2 was used to activate the editor if (e instanceof ActionEvent && isSelectAllForActionEvent) { ((JTextComponent)editor).selectAll(); return; } // A mouse click was used to activate the editor. // Generally this is a double click and the second mouse click is // passed to the editor which would remove the text selection unless // we use the invokeLater() if (e instanceof MouseEvent && isSelectAllForMouseEvent) { SwingUtilities.invokeLater(new Runnable() { public void run() { ((JTextComponent)editor).selectAll(); } }); } } // // Newly added methods // /* * Sets the Select All property for for all event types */ public void setSelectAllForEdit(boolean isSelectAllForEdit) { setSelectAllForMouseEvent( isSelectAllForEdit ); setSelectAllForActionEvent( isSelectAllForEdit ); setSelectAllForKeyEvent( isSelectAllForEdit ); } /* * Set the Select All property when editing is invoked by the mouse */ public void setSelectAllForMouseEvent(boolean isSelectAllForMouseEvent) { this.isSelectAllForMouseEvent = isSelectAllForMouseEvent; } /* * Set the Select All property when editing is invoked by the "F2" key */ public void setSelectAllForActionEvent(boolean isSelectAllForActionEvent) { this.isSelectAllForActionEvent = isSelectAllForActionEvent; } /* * Set the Select All property when editing is invoked by * typing directly into the cell */ public void setSelectAllForKeyEvent(boolean isSelectAllForKeyEvent) { this.isSelectAllForKeyEvent = isSelectAllForKeyEvent; } // // Static, convenience methods // /** * Convenience method to order the table columns of a table. The columns * are ordered based on the column names specified in the array. If the * column name is not found then no column is moved. This means you can * specify a null value to preserve the current order of a given column. * * @param table the table containing the columns to be sorted * @param columnNames an array containing the column names in the * order they should be displayed */ public static void reorderColumns(JTable table, Object... columnNames) { TableColumnModel model = table.getColumnModel(); for (int newIndex = 0; newIndex < columnNames.length; newIndex++) { try { Object columnName = columnNames[newIndex]; int index = model.getColumnIndex(columnName); model.moveColumn(index, newIndex); } catch(IllegalArgumentException e) {} } } } // End of Class RXTable
package com.gentics.mesh.core.rest; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import com.gentics.mesh.Mesh; import com.gentics.mesh.core.rest.event.MeshEventModel; import com.gentics.mesh.core.rest.event.branch.BranchMeshEventModel; import com.gentics.mesh.core.rest.event.branch.BranchMicroschemaAssignModel; import com.gentics.mesh.core.rest.event.branch.BranchSchemaAssignEventModel; import com.gentics.mesh.core.rest.event.branch.BranchTaggedEventModel; import com.gentics.mesh.core.rest.event.group.GroupRoleAssignModel; import com.gentics.mesh.core.rest.event.group.GroupUserAssignModel; import com.gentics.mesh.core.rest.event.impl.MeshElementEventModelImpl; import com.gentics.mesh.core.rest.event.job.JobEventModel; import com.gentics.mesh.core.rest.event.job.ProjectVersionPurgeEventModel; import com.gentics.mesh.core.rest.event.migration.BranchMigrationMeshEventModel; import com.gentics.mesh.core.rest.event.migration.MicroschemaMigrationMeshEventModel; import com.gentics.mesh.core.rest.event.migration.SchemaMigrationMeshEventModel; import com.gentics.mesh.core.rest.event.node.NodeMeshEventModel; import com.gentics.mesh.core.rest.event.node.NodeMovedEventModel; import com.gentics.mesh.core.rest.event.node.NodeTaggedEventModel; import com.gentics.mesh.core.rest.event.project.ProjectBranchEventModel; import com.gentics.mesh.core.rest.event.project.ProjectMicroschemaEventModel; import com.gentics.mesh.core.rest.event.project.ProjectSchemaEventModel; import com.gentics.mesh.core.rest.event.role.PermissionChangedEventModel; import com.gentics.mesh.core.rest.event.s3binary.S3BinaryEventModel; import com.gentics.mesh.core.rest.event.search.SearchIndexSyncEventModel; import com.gentics.mesh.core.rest.event.tag.TagMeshEventModel; import com.gentics.mesh.core.rest.event.tagfamily.TagFamilyMeshEventModel; import com.gentics.mesh.etc.config.MeshOptions; import io.reactivex.Completable; import io.reactivex.functions.Action; import io.vertx.core.Vertx; import io.vertx.core.eventbus.EventBus; import io.vertx.core.eventbus.MessageConsumer; /** * Central list of used eventbus addresses. */ public enum MeshEvent { PROJECT_VERSION_PURGE_START("mesh.project.version_purge.start", ProjectVersionPurgeEventModel.class, "Emitted once a version purge job starts", Examples::versionPurgeEvent), PROJECT_VERSION_PURGE_FINISHED("mesh.project.version_purge.finished", ProjectVersionPurgeEventModel.class, "Emitted once a version purge job finishes successully or failed", Examples::versionPurgeEvent), /** * Schema migration start event. */ SCHEMA_MIGRATION_START("mesh.schema.migration.start", SchemaMigrationMeshEventModel.class, "Emitted once a schema migration starts.", Examples::schemaMigrationEvent), /** * Schema migration finished event (contains status information) */ SCHEMA_MIGRATION_FINISHED("mesh.schema.migration.finished", SchemaMigrationMeshEventModel.class, "Emitted once the migration finishes successful or failed.", Examples::schemaMigrationEvent), /** * Event which is send once the schema gets assigned to a branch. */ SCHEMA_BRANCH_ASSIGN("mesh.schema-branch.assign", BranchSchemaAssignEventModel.class, "Emitted once a schema has been assigned to a branch.", Examples::schemaBranchAssignEvent), /** * Event which is send once the schema gets unassigned from a branch. */ SCHEMA_BRANCH_UNASSIGN("mesh.schema-branch.unassign", BranchSchemaAssignEventModel.class, "Emitted once a schema has been unassigned from a branch.", Examples::schemaBranchAssignEvent), /** * Event which is send once the microschema gets assigned to a branch. */ MICROSCHEMA_BRANCH_ASSIGN("mesh.microschema-branch.assign", BranchMicroschemaAssignModel.class, "Emitted once a microschema gets assigned to a branch.", Examples::microschemaBranchAssignEvent), /** * Event which is send once the microschema gets unassigned from a branch. */ MICROSCHEMA_BRANCH_UNASSIGN("mesh.microschema-branch.unassign", BranchMicroschemaAssignModel.class, "Emitted once a microschema gets unassigned from a branch.", Examples::microschemaBranchAssignEvent), /** * Microschema migration start event. */ MICROSCHEMA_MIGRATION_START("mesh.microschema.migration.start", MicroschemaMigrationMeshEventModel.class, "Emitted when a microschema migration starts.", Examples::microschemaMigrationEvent), /** * Microschema migration finished event. */ MICROSCHEMA_MIGRATION_FINISHED("mesh.microschema.migration.finished", MicroschemaMigrationMeshEventModel.class, "Emitted when a microschema migration finishes.", Examples::microschemaMigrationEvent), /** * Branch migration start event. */ BRANCH_MIGRATION_START("mesh.branch.migration.start", BranchMigrationMeshEventModel.class, "Emitted when a branch migration job starts.", Examples::branchMigrationEvent), /** * Branch migration finished event. */ BRANCH_MIGRATION_FINISHED("mesh.branch.migration.finished", BranchMigrationMeshEventModel.class, "Emitted when a branch migration job finishes.", Examples::branchMigrationEvent), /** * Event which is send once the mesh instance is fully started and ready to accept requests. */ STARTUP("mesh.startup-complete", null, "Emitted once the Gentics Mesh instance is fully started and ready to accept requests."), /** * Address for handler which will process registered job. */ JOB_WORKER_ADDRESS("job.worker", null, "Event which will trigger job processing."), /** * Event which is send once a new node is joining the cluster. */ CLUSTER_NODE_JOINING("mesh.cluster.node.joining", null, "Emitted when a node joins the cluster."), /** * Event which is send once a node finished joining the cluster. */ CLUSTER_NODE_JOINED("mesh.cluster.node.joined", null, "Emitted when a node joined the cluster."), /** * Event which is send once a node is about to leave the cluster. */ CLUSTER_NODE_LEAVING("mesh.cluster.node.leaving", null, "Emitted when a node is leaving the cluster."), /** * Event which is send once a node left the cluster. */ CLUSTER_NODE_LEFT("mesh.cluster.node.left", null, "Emitted when a cluster node left the cluster."), /** * Event which is send once the database status (offline, online, not_available, backup, synchronizing) changes. */ CLUSTER_DATABASE_CHANGE_STATUS("mesh.cluster.db.status", null, "Emitted when the database status changes. (e.g. offline, online, backup, syncing)"), /** * Event which is send to update the permission stores. */ CLEAR_PERMISSION_STORE("mesh.clear-permission-store", null, "Event which will clear the permission stores."), /** * Event which is send to update the webroot path stores. */ CLEAR_PATH_STORE("mesh.clear-path-store", null, "Event which will clear the path stores."), /* User */ USER_CREATED("mesh.user.created", MeshElementEventModelImpl.class, "Emitted when a user was created.", Examples::userEvent), USER_UPDATED("mesh.user.updated", MeshElementEventModelImpl.class, "Emitted when a user was updated.", Examples::userEvent), USER_DELETED("mesh.user.deleted", MeshElementEventModelImpl.class, "Emitted when a user was deleted.", Examples::userEvent), /* Group */ GROUP_CREATED("mesh.group.created", MeshElementEventModelImpl.class, "Emitted when a group was created.", Examples::groupEvent), GROUP_UPDATED("mesh.group.updated", MeshElementEventModelImpl.class, "Emitted when a group was updated.", Examples::groupEvent), GROUP_DELETED("mesh.group.deleted", MeshElementEventModelImpl.class, "Emitted when a group was deleted.", Examples::groupEvent), GROUP_USER_ASSIGNED("mesh.group-user.assigned", GroupUserAssignModel.class, "Emitted when a user was assigned to a group.", Examples::groupUserAssignEvent), GROUP_USER_UNASSIGNED("mesh.group-user.unassigned", GroupUserAssignModel.class, "Emitted when a user was unassigned from a group.", Examples::groupUserAssignEvent), GROUP_ROLE_ASSIGNED("mesh.group-role.assigned", GroupRoleAssignModel.class, "Emitted when a role was assigned to a group.", Examples::groupRoleAssignEvent), GROUP_ROLE_UNASSIGNED("mesh.group-role.unassigned", GroupRoleAssignModel.class, "Emitted when a role was unassigned from a group.", Examples::groupRoleAssignEvent), /* Role */ ROLE_CREATED("mesh.role.created", MeshElementEventModelImpl.class, "Emitted when a role was created.", Examples::roleEvent), ROLE_UPDATED("mesh.role.updated", MeshElementEventModelImpl.class, "Emitted when a role was updated.", Examples::roleEvent), ROLE_DELETED("mesh.role.deleted", MeshElementEventModelImpl.class, "Emitted when a role was deleted.", Examples::roleEvent), ROLE_PERMISSIONS_CHANGED("mesh.role.permissions.changed", PermissionChangedEventModel.class, "Emitted when the role permissions were changed.", Examples::rolePermissionChangedEvent), /* Tag */ TAG_CREATED("mesh.tag.created", TagMeshEventModel.class, "Emitted when a tag was created.", Examples::tagEvent), TAG_UPDATED("mesh.tag.updated", TagMeshEventModel.class, "Emitted when a tag was updated.", Examples::tagEvent), TAG_DELETED("mesh.tag.deleted", TagMeshEventModel.class, "Emitted when a tag was deleted.", Examples::tagEvent), /* Tag Family */ TAG_FAMILY_CREATED("mesh.tagfamily.created", TagFamilyMeshEventModel.class, "Emitted when a tag family was created.", Examples::tagFamilyEvent), TAG_FAMILY_UPDATED("mesh.tagfamily.updated", TagFamilyMeshEventModel.class, "Emitted when a tag family was updated.", Examples::tagFamilyEvent), TAG_FAMILY_DELETED("mesh.tagfamily.deleted", TagFamilyMeshEventModel.class, "Emitted when a tag family was deleted.", Examples::tagFamilyEvent), /* Project */ PROJECT_CREATED("mesh.project.created", MeshElementEventModelImpl.class, "Emitted when a project was created.", Examples::projectEvent), PROJECT_UPDATED("mesh.project.updated", MeshElementEventModelImpl.class, "Emitted when a project was updated.", Examples::projectEvent), PROJECT_DELETED("mesh.project.deleted", MeshElementEventModelImpl.class, "Emitted when a project was deleted.", Examples::projectEvent), PROJECT_SCHEMA_ASSIGNED("mesh.project-schema.assigned", ProjectSchemaEventModel.class, "Emitted when a schema was assigned to a project."), PROJECT_SCHEMA_UNASSIGNED("mesh.project-schema.unassigned", ProjectSchemaEventModel.class, "Emitted when a schema was unassigned from a project."), PROJECT_MICROSCHEMA_ASSIGNED("mesh.project-microschema.assigned", ProjectMicroschemaEventModel.class, "Emitted when a microschema was assigned to a projec.t"), PROJECT_MICROSCHEMA_UNASSIGNED("mesh.project-microschema.unassigned", ProjectMicroschemaEventModel.class, "Emitted when a microschema was unassigned from a project."), PROJECT_LATEST_BRANCH_UPDATED("mesh.project-latest-branch.updated", ProjectBranchEventModel.class, "Emitted when the latest branch reference of a project was updated."), /* Node */ NODE_CREATED("mesh.node.created", NodeMeshEventModel.class, "Emitted when a node was created.", Examples::nodeEvent), NODE_UPDATED("mesh.node.updated", NodeMeshEventModel.class, "Emitted when a node was updated.", Examples::nodeEvent), NODE_DELETED("mesh.node.deleted", NodeMeshEventModel.class, "Emitted when a node was deleted.", Examples::nodeEvent), NODE_TAGGED("mesh.node.tagged", NodeTaggedEventModel.class, "Emitted when a node was tagged.", Examples::nodeTaggedEvent), NODE_UNTAGGED("mesh.node.untagged", NodeTaggedEventModel.class, "Emitted when a node was untagged.", Examples::nodeTaggedEvent), NODE_PUBLISHED("mesh.node.published", NodeMeshEventModel.class, "Emitted whena a node or node content was published.", Examples::nodeContentEvent), NODE_UNPUBLISHED("mesh.node.unpublished", NodeMeshEventModel.class, "Emitted when a node or node content was unpublished.", Examples::nodeContentEvent), NODE_MOVED("mesh.node.moved", NodeMovedEventModel.class, "Emitted when a node was moved.", Examples::nodeMovedEvent), NODE_CONTENT_DELETED("mesh.node-content.deleted", NodeMeshEventModel.class, "Emitted when a content of a node was deleted. (e.g. English language was deleted)", Examples::nodeContentEvent), NODE_CONTENT_CREATED("mesh.node-content.created", NodeMeshEventModel.class, "Emitted when a content of a node was created. (e.g. English translation was added)", Examples::nodeContentEvent), NODE_REFERENCE_UPDATED("mesh.node-reference.updated", NodeMeshEventModel.class, "Emitted when a referencing node gets indirectly updated. (e.g. via deleting a node in the node list of the referenced node.)", Examples::nodeEvent), /* Schema */ SCHEMA_CREATED("mesh.schema.created", MeshElementEventModelImpl.class, "Emitted when a schema was created.", Examples::schemaEvent), SCHEMA_UPDATED("mesh.schema.updated", MeshElementEventModelImpl.class, "Emitted when a schema was updated.", Examples::schemaEvent), SCHEMA_DELETED("mesh.schema.deleted", MeshElementEventModelImpl.class, "Emitted when a schema was deleted", Examples::schemaEvent), /* Microschema */ MICROSCHEMA_CREATED("mesh.microschema.created", MeshElementEventModelImpl.class, "Emitted when a microschema was created.", Examples::microschemaEvent), MICROSCHEMA_UPDATED("mesh.microschema.updated", MeshElementEventModelImpl.class, "Emitted when a microschema was updated.", Examples::microschemaEvent), MICROSCHEMA_DELETED("mesh.microschema.deleted", MeshElementEventModelImpl.class, "Emitted when a microschema was deleted.", Examples::microschemaEvent), /* Branch */ BRANCH_CREATED("mesh.branch.created", BranchMeshEventModel.class, "Emitted when a branch was created.", Examples::branchEvent), BRANCH_UPDATED("mesh.branch.updated", BranchMeshEventModel.class, "Emitted when a branch was updated.", Examples::branchEvent), BRANCH_DELETED("mesh.branch.deleted", BranchMeshEventModel.class, "Emitted when a branch was deleted.", Examples::branchEvent), BRANCH_TAGGED("mesh.branch.tagged", BranchTaggedEventModel.class, "Emitted when a branch was tagged.", Examples::branchTaggingEvent), BRANCH_UNTAGGED("mesh.branch.untagged", BranchTaggedEventModel.class, "Emitted when a branch was untagged.", Examples::branchTaggingEvent), /* Job */ JOB_CREATED("mesh.jobn.created", JobEventModel.class, "Emitted when a job was created.", Examples::jobEvent), JOB_UPDATED("mesh.job.updated", JobEventModel.class, "Emitted when a job was updated.", Examples::jobEvent), JOB_DELETED("mesh.job.deleted", JobEventModel.class, "Emitted when a job was deleted.", Examples::jobEvent), /* Search index related (SYNC) */ /** * Address for the handler which will process index sync requests. */ INDEX_SYNC_REQUEST("mesh.search.index.sync.request", SearchIndexSyncEventModel.class, "Event address which can be used to trigger the sync process."), /** * Emitted when an index sync process starts. */ INDEX_SYNC_START("mesh.search.index.sync.start", null, "Emitted when the index sync process starts."), /** * Address to which index sync results will be published (failed, succeeded) */ INDEX_SYNC_FINISHED("mesh.search.index.sync.finished", null, "Emitted when the index sync process finishes."), /* Search index related (CLEAR) */ /** * Event which will trigger the index clear process. */ INDEX_CLEAR_REQUEST("mesh.search.index.clear.request", null, "Event address which will trigger a index clear."), /** * Emitted when an index clear is starting. */ INDEX_CLEAR_START("mesh.search.index.clear.start", null, "Emitted when the index clear process starts."), /** * Emitted when an index clear has finished. */ INDEX_CLEAR_FINISHED("mesh.search.index.clear.finished", null, "Emitted when the index clear process finishes."), /** * Event address which will trigger an index check. */ INDEX_CHECK_REQUEST("mesh.search.index.check.request", null, "Event address which will trigger an index check."), /** * Emitted when an index check process starts. */ INDEX_CHECK_START("mesh.search.index.check.start", null, "Emitted when the index check process starts."), /** * Address to which index check results will be published (failed, succeeded) */ INDEX_CHECK_FINISHED("mesh.search.index.check.finished", null, "Emitted when the index check process finishes."), /** * Event that is emitted when the search verticle has been working and is now idle. */ SEARCH_IDLE("mesh.search.process.idle", null, "Emitted when the search interation process has been working and is now in idle."), IS_SEARCH_IDLE("mesh.search.process.isidle", null, "When emitted, this event will be answered with the current idle status."), /** * Event that will cause all pending Elasticsearch requests to be sent. */ SEARCH_FLUSH_REQUEST("mesh.search.flush.request", null, "Event which will cause all pending Elasticsearch requests to be sent."), /** * Event that will cause all pending Elasticsearch requests to be sent. */ SEARCH_REFRESH_REQUEST("mesh.search.refresh.request", null, "Event which will cause all search indices to be refreshed, so that changes can be queried."), // Backup & Restore Events GRAPH_BACKUP_START("mesh.graph.backup.start", null, "Emitted once the backup process starts."), GRAPH_BACKUP_FINISHED("mesh.graph.backup.finished", null, "Emitted once the backup process finishes."), GRAPH_RESTORE_START("mesh.graph.restore.start", null, "Emitted once the restore process starts."), GRAPH_RESTORE_FINISHED("mesh.graph.restore.finished", null, "Emitted once the restore process finishes."), GRAPH_EXPORT_START("mesh.graph.export.start", null, "Emitted once the graph database export process starts."), GRAPH_EXPORT_FINISHED("mesh.graph.export.finished", null, "Emitted once the graph database export process finishes"), GRAPH_IMPORT_START("mesh.graph.import.start", null, "Emitted once the graph database import process starts."), GRAPH_IMPORT_FINISHED("mesh.graph.import.finished", null, "Emitted once the graph database import process finishes."), REPAIR_START("mesh.graph.repair.start", null, "Emitted once the repair operation is started."), REPAIR_FINISHED("mesh.graph.repair.finished", null, "Emitted once the repair operation finishes."), // Plugin Events PLUGIN_DEPLOYING("mesh.plugin.deploying", null, "Emitted once a plugin is being deployed."), PLUGIN_PRE_REGISTERED("mesh.plugin.pre-registered", null, "Emitted once a plugin has been pre-registered."), PLUGIN_REGISTERED("mesh.plugin.registered", null, "Emitted once a plugin has been registered."), PLUGIN_DEPLOYED("mesh.plugin.deployed", null, "Emitted once a plugin has been deployed."), PLUGIN_DEPLOY_FAILED("mesh.plugin.deploy.failed", null, "Emitted when a plugin deployment fails."), PLUGIN_UNDEPLOYING("mesh.plugin.undeploying", null, "Emitted once a plugin is being undeployed."), PLUGIN_UNDEPLOYED("mesh.plugin.undeployed", null, "Emitted once a plugin has been undeployed."), /* S3 Binary */ S3BINARY_CREATED("mesh.s3binary.created", S3BinaryEventModel.class, "Emitted when a S3 binary field was created."), S3BINARY_DELETED("mesh.s3binary.deleted", S3BinaryEventModel.class, "Emitted when a S3 binary field gets deleted."), S3BINARY_METADATA_EXTRACTED("mesh.s3binary.metadata.extracted", S3BinaryEventModel.class, "Emitted when the metadata of a S3 binary field is extracted."); public final String address; public final Class<? extends MeshEventModel> bodyModel; public final String description; private final Supplier<? extends MeshEventModel> exampleGenerator; private static final Map<String, MeshEvent> events = createEventMap(); /** * Gets the event with the given address. Returns an empty optional if the address is invalid. * * @param address * @return */ public static Optional<MeshEvent> fromAddress(String address) { return Optional.ofNullable(events.get(address)); } private static Map<String, MeshEvent> createEventMap() { return Stream.of(values()) .collect(Collectors.toMap( MeshEvent::getAddress, Function.identity())); } MeshEvent(String address, Class<? extends MeshEventModel> bodyModel, String description) { this(address, bodyModel, description, () -> null); } <R extends MeshEventModel> MeshEvent(String address, Class<R> bodyModel, String description, Supplier<R> exampleGenerator) { this.address = address; this.bodyModel = bodyModel; this.description = description; this.exampleGenerator = exampleGenerator; } /** * Invoke the given runnable and wait for the event. * * @param mesh * @param event * @param runnable * @return */ public static Completable doAndWaitForEvent(Mesh mesh, MeshEvent event, Action runnable) { return doAndWaitForEvent(mesh.getVertx(), event, runnable); } /** * Invoke the given runnable and wait for the event. * * @param vertx * @param event * @param runnable * @return */ public static Completable doAndWaitForEvent(Vertx vertx, MeshEvent event, Action runnable) { return Completable.create(sub -> { EventBus eventbus = vertx.eventBus(); MessageConsumer<Object> consumer = eventbus.consumer(event.address) .handler(ev -> sub.onComplete()) .exceptionHandler(sub::onError); // The handler will be invoked once the event listener is registered consumer.completionHandler(ignore -> { try { runnable.run(); } catch (Exception e) { throw new RuntimeException(e); } }); sub.setCancellable(consumer::unregister); }); } /** * Async await for the given event. * * @param mesh * @param event * @return */ public static Completable waitForEvent(Mesh mesh, MeshEvent event) { return doAndWaitForEvent(mesh, event, () -> { }); } @Override public String toString() { return address; } /** * Trigger the job processing event via the mesh server API. This is only possible in embedded mode or within plugins. * * @param mesh */ public static void triggerJobWorker(Mesh mesh) { triggerJobWorker(mesh.getVertx().eventBus(), mesh.getOptions()); } /** * Trigger the job processing event via the Vert.x API. This is only possible in embedded mode or within plugins. * * @param eb event bus * @param options current Mesh options */ public static void triggerJobWorker(EventBus eb, MeshOptions options) { eb.publish(JOB_WORKER_ADDRESS + options.getNodeName(), null); } /** * Returns a list of all events which are publicly exposed via the eventbus websocket bridge. * * @return */ public static List<MeshEvent> publicEvents() { List<MeshEvent> events = new ArrayList<>(); events.addAll(Arrays.asList(MeshEvent.values())); return events; } public String getAddress() { return address; } public Class<? extends MeshEventModel> getBodyModel() { return bodyModel; } public String getDescription() { return description; } /** * Return the example model for the event. * * @return */ public MeshEventModel example() { return exampleGenerator.get(); } }
package org.web3j.contracts.eip721.generated; import io.reactivex.Flowable; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.web3j.abi.EventEncoder; import org.web3j.abi.TypeReference; import org.web3j.abi.datatypes.Address; import org.web3j.abi.datatypes.Bool; import org.web3j.abi.datatypes.Event; import org.web3j.abi.datatypes.Function; import org.web3j.abi.datatypes.Type; import org.web3j.abi.datatypes.generated.Uint256; import org.web3j.crypto.Credentials; import org.web3j.protocol.Web3j; import org.web3j.protocol.core.DefaultBlockParameter; import org.web3j.protocol.core.RemoteCall; import org.web3j.protocol.core.methods.request.EthFilter; import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.core.methods.response.TransactionReceipt; import org.web3j.tx.Contract; import org.web3j.tx.TransactionManager; import org.web3j.tx.gas.ContractGasProvider; /** * <p>Auto generated code. * <p><strong>Do not modify!</strong> * <p>Please use the <a href="https://docs.web3j.io/command_line.html">web3j command line tools</a>, * or the org.web3j.codegen.SolidityFunctionWrapperGenerator in the * <a href="https://github.com/web3j/web3j/tree/master/codegen">codegen module</a> to update. * * <p>Generated with web3j version 4.1.1. */ public class ERC721 extends Contract { private static final String BINARY = "Bin file was not provided"; public static final String FUNC_GETAPPROVED = "getApproved"; public static final String FUNC_APPROVE = "approve"; public static final String FUNC_TRANSFERFROM = "transferFrom"; public static final String FUNC_SAFETRANSFERFROM = "safeTransferFrom"; public static final String FUNC_OWNEROF = "ownerOf"; public static final String FUNC_BALANCEOF = "balanceOf"; public static final String FUNC_SETAPPROVALFORALL = "setApprovalForAll"; public static final String FUNC_ISAPPROVEDFORALL = "isApprovedForAll"; public static final Event TRANSFER_EVENT = new Event("Transfer", Arrays.<TypeReference<?>>asList(new TypeReference<Address>(true) {}, new TypeReference<Address>(true) {}, new TypeReference<Uint256>(true) {})); ; public static final Event APPROVAL_EVENT = new Event("Approval", Arrays.<TypeReference<?>>asList(new TypeReference<Address>(true) {}, new TypeReference<Address>(true) {}, new TypeReference<Uint256>(true) {})); ; public static final Event APPROVALFORALL_EVENT = new Event("ApprovalForAll", Arrays.<TypeReference<?>>asList(new TypeReference<Address>(true) {}, new TypeReference<Address>(true) {}, new TypeReference<Bool>() {})); ; @Deprecated protected ERC721(String contractAddress, Web3j web3j, Credentials credentials, BigInteger gasPrice, BigInteger gasLimit) { super(BINARY, contractAddress, web3j, credentials, gasPrice, gasLimit); } protected ERC721(String contractAddress, Web3j web3j, Credentials credentials, ContractGasProvider contractGasProvider) { super(BINARY, contractAddress, web3j, credentials, contractGasProvider); } @Deprecated protected ERC721(String contractAddress, Web3j web3j, TransactionManager transactionManager, BigInteger gasPrice, BigInteger gasLimit) { super(BINARY, contractAddress, web3j, transactionManager, gasPrice, gasLimit); } protected ERC721(String contractAddress, Web3j web3j, TransactionManager transactionManager, ContractGasProvider contractGasProvider) { super(BINARY, contractAddress, web3j, transactionManager, contractGasProvider); } public RemoteCall<String> getApproved(BigInteger _tokenId) { final Function function = new Function(FUNC_GETAPPROVED, Arrays.<Type>asList(new org.web3j.abi.datatypes.generated.Uint256(_tokenId)), Arrays.<TypeReference<?>>asList(new TypeReference<Address>() {})); return executeRemoteCallSingleValueReturn(function, String.class); } public RemoteCall<TransactionReceipt> approve(String _approved, BigInteger _tokenId, BigInteger weiValue) { final Function function = new Function( FUNC_APPROVE, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_approved), new org.web3j.abi.datatypes.generated.Uint256(_tokenId)), Collections.<TypeReference<?>>emptyList()); return executeRemoteCallTransaction(function, weiValue); } public RemoteCall<TransactionReceipt> transferFrom(String _from, String _to, BigInteger _tokenId, BigInteger weiValue) { final Function function = new Function( FUNC_TRANSFERFROM, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_from), new org.web3j.abi.datatypes.Address(_to), new org.web3j.abi.datatypes.generated.Uint256(_tokenId)), Collections.<TypeReference<?>>emptyList()); return executeRemoteCallTransaction(function, weiValue); } public RemoteCall<TransactionReceipt> safeTransferFrom(String _from, String _to, BigInteger _tokenId, BigInteger weiValue) { final Function function = new Function( FUNC_SAFETRANSFERFROM, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_from), new org.web3j.abi.datatypes.Address(_to), new org.web3j.abi.datatypes.generated.Uint256(_tokenId)), Collections.<TypeReference<?>>emptyList()); return executeRemoteCallTransaction(function, weiValue); } public RemoteCall<String> ownerOf(BigInteger _tokenId) { final Function function = new Function(FUNC_OWNEROF, Arrays.<Type>asList(new org.web3j.abi.datatypes.generated.Uint256(_tokenId)), Arrays.<TypeReference<?>>asList(new TypeReference<Address>() {})); return executeRemoteCallSingleValueReturn(function, String.class); } public RemoteCall<BigInteger> balanceOf(String _owner) { final Function function = new Function(FUNC_BALANCEOF, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_owner)), Arrays.<TypeReference<?>>asList(new TypeReference<Uint256>() {})); return executeRemoteCallSingleValueReturn(function, BigInteger.class); } public RemoteCall<TransactionReceipt> setApprovalForAll(String _operator, Boolean _approved) { final Function function = new Function( FUNC_SETAPPROVALFORALL, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_operator), new org.web3j.abi.datatypes.Bool(_approved)), Collections.<TypeReference<?>>emptyList()); return executeRemoteCallTransaction(function); } public RemoteCall<TransactionReceipt> safeTransferFrom(String _from, String _to, BigInteger _tokenId, byte[] data, BigInteger weiValue) { final Function function = new Function( FUNC_SAFETRANSFERFROM, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_from), new org.web3j.abi.datatypes.Address(_to), new org.web3j.abi.datatypes.generated.Uint256(_tokenId), new org.web3j.abi.datatypes.DynamicBytes(data)), Collections.<TypeReference<?>>emptyList()); return executeRemoteCallTransaction(function, weiValue); } public RemoteCall<Boolean> isApprovedForAll(String _owner, String _operator) { final Function function = new Function(FUNC_ISAPPROVEDFORALL, Arrays.<Type>asList(new org.web3j.abi.datatypes.Address(_owner), new org.web3j.abi.datatypes.Address(_operator)), Arrays.<TypeReference<?>>asList(new TypeReference<Bool>() {})); return executeRemoteCallSingleValueReturn(function, Boolean.class); } public List<TransferEventResponse> getTransferEvents(TransactionReceipt transactionReceipt) { List<Contract.EventValuesWithLog> valueList = extractEventParametersWithLog(TRANSFER_EVENT, transactionReceipt); ArrayList<TransferEventResponse> responses = new ArrayList<TransferEventResponse>(valueList.size()); for (Contract.EventValuesWithLog eventValues : valueList) { TransferEventResponse typedResponse = new TransferEventResponse(); typedResponse.log = eventValues.getLog(); typedResponse._from = (String) eventValues.getIndexedValues().get(0).getValue(); typedResponse._to = (String) eventValues.getIndexedValues().get(1).getValue(); typedResponse._tokenId = (BigInteger) eventValues.getIndexedValues().get(2).getValue(); responses.add(typedResponse); } return responses; } public Flowable<TransferEventResponse> transferEventFlowable(EthFilter filter) { return web3j.ethLogFlowable(filter).map(new io.reactivex.functions.Function<Log, TransferEventResponse>() { @Override public TransferEventResponse apply(Log log) { Contract.EventValuesWithLog eventValues = extractEventParametersWithLog(TRANSFER_EVENT, log); TransferEventResponse typedResponse = new TransferEventResponse(); typedResponse.log = log; typedResponse._from = (String) eventValues.getIndexedValues().get(0).getValue(); typedResponse._to = (String) eventValues.getIndexedValues().get(1).getValue(); typedResponse._tokenId = (BigInteger) eventValues.getIndexedValues().get(2).getValue(); return typedResponse; } }); } public Flowable<TransferEventResponse> transferEventFlowable(DefaultBlockParameter startBlock, DefaultBlockParameter endBlock) { EthFilter filter = new EthFilter(startBlock, endBlock, getContractAddress()); filter.addSingleTopic(EventEncoder.encode(TRANSFER_EVENT)); return transferEventFlowable(filter); } public List<ApprovalEventResponse> getApprovalEvents(TransactionReceipt transactionReceipt) { List<Contract.EventValuesWithLog> valueList = extractEventParametersWithLog(APPROVAL_EVENT, transactionReceipt); ArrayList<ApprovalEventResponse> responses = new ArrayList<ApprovalEventResponse>(valueList.size()); for (Contract.EventValuesWithLog eventValues : valueList) { ApprovalEventResponse typedResponse = new ApprovalEventResponse(); typedResponse.log = eventValues.getLog(); typedResponse._owner = (String) eventValues.getIndexedValues().get(0).getValue(); typedResponse._approved = (String) eventValues.getIndexedValues().get(1).getValue(); typedResponse._tokenId = (BigInteger) eventValues.getIndexedValues().get(2).getValue(); responses.add(typedResponse); } return responses; } public Flowable<ApprovalEventResponse> approvalEventFlowable(EthFilter filter) { return web3j.ethLogFlowable(filter).map(new io.reactivex.functions.Function<Log, ApprovalEventResponse>() { @Override public ApprovalEventResponse apply(Log log) { Contract.EventValuesWithLog eventValues = extractEventParametersWithLog(APPROVAL_EVENT, log); ApprovalEventResponse typedResponse = new ApprovalEventResponse(); typedResponse.log = log; typedResponse._owner = (String) eventValues.getIndexedValues().get(0).getValue(); typedResponse._approved = (String) eventValues.getIndexedValues().get(1).getValue(); typedResponse._tokenId = (BigInteger) eventValues.getIndexedValues().get(2).getValue(); return typedResponse; } }); } public Flowable<ApprovalEventResponse> approvalEventFlowable(DefaultBlockParameter startBlock, DefaultBlockParameter endBlock) { EthFilter filter = new EthFilter(startBlock, endBlock, getContractAddress()); filter.addSingleTopic(EventEncoder.encode(APPROVAL_EVENT)); return approvalEventFlowable(filter); } public List<ApprovalForAllEventResponse> getApprovalForAllEvents(TransactionReceipt transactionReceipt) { List<Contract.EventValuesWithLog> valueList = extractEventParametersWithLog(APPROVALFORALL_EVENT, transactionReceipt); ArrayList<ApprovalForAllEventResponse> responses = new ArrayList<ApprovalForAllEventResponse>(valueList.size()); for (Contract.EventValuesWithLog eventValues : valueList) { ApprovalForAllEventResponse typedResponse = new ApprovalForAllEventResponse(); typedResponse.log = eventValues.getLog(); typedResponse._owner = (String) eventValues.getIndexedValues().get(0).getValue(); typedResponse._operator = (String) eventValues.getIndexedValues().get(1).getValue(); typedResponse._approved = (Boolean) eventValues.getNonIndexedValues().get(0).getValue(); responses.add(typedResponse); } return responses; } public Flowable<ApprovalForAllEventResponse> approvalForAllEventFlowable(EthFilter filter) { return web3j.ethLogFlowable(filter).map(new io.reactivex.functions.Function<Log, ApprovalForAllEventResponse>() { @Override public ApprovalForAllEventResponse apply(Log log) { Contract.EventValuesWithLog eventValues = extractEventParametersWithLog(APPROVALFORALL_EVENT, log); ApprovalForAllEventResponse typedResponse = new ApprovalForAllEventResponse(); typedResponse.log = log; typedResponse._owner = (String) eventValues.getIndexedValues().get(0).getValue(); typedResponse._operator = (String) eventValues.getIndexedValues().get(1).getValue(); typedResponse._approved = (Boolean) eventValues.getNonIndexedValues().get(0).getValue(); return typedResponse; } }); } public Flowable<ApprovalForAllEventResponse> approvalForAllEventFlowable(DefaultBlockParameter startBlock, DefaultBlockParameter endBlock) { EthFilter filter = new EthFilter(startBlock, endBlock, getContractAddress()); filter.addSingleTopic(EventEncoder.encode(APPROVALFORALL_EVENT)); return approvalForAllEventFlowable(filter); } @Deprecated public static ERC721 load(String contractAddress, Web3j web3j, Credentials credentials, BigInteger gasPrice, BigInteger gasLimit) { return new ERC721(contractAddress, web3j, credentials, gasPrice, gasLimit); } @Deprecated public static ERC721 load(String contractAddress, Web3j web3j, TransactionManager transactionManager, BigInteger gasPrice, BigInteger gasLimit) { return new ERC721(contractAddress, web3j, transactionManager, gasPrice, gasLimit); } public static ERC721 load(String contractAddress, Web3j web3j, Credentials credentials, ContractGasProvider contractGasProvider) { return new ERC721(contractAddress, web3j, credentials, contractGasProvider); } public static ERC721 load(String contractAddress, Web3j web3j, TransactionManager transactionManager, ContractGasProvider contractGasProvider) { return new ERC721(contractAddress, web3j, transactionManager, contractGasProvider); } public static class TransferEventResponse { public Log log; public String _from; public String _to; public BigInteger _tokenId; } public static class ApprovalEventResponse { public Log log; public String _owner; public String _approved; public BigInteger _tokenId; } public static class ApprovalForAllEventResponse { public Log log; public String _owner; public String _operator; public Boolean _approved; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.analysis; import java.math.BigInteger; import java.util.*; import ghidra.app.cmd.data.CreateDataCmd; import ghidra.app.cmd.label.AddLabelCmd; import ghidra.app.plugin.core.disassembler.AddressTable; import ghidra.framework.options.Options; import ghidra.program.model.address.*; import ghidra.program.model.data.*; import ghidra.program.model.lang.*; import ghidra.program.model.listing.*; import ghidra.program.model.pcode.Varnode; import ghidra.program.model.scalar.Scalar; import ghidra.program.model.symbol.*; import ghidra.program.util.*; import ghidra.util.exception.*; import ghidra.util.task.TaskMonitor; public class Motorola68KAnalyzer extends ConstantPropagationAnalyzer { private static final String SWITCH_OPTION_NAME = "Switch Table Recovery"; private static final String SWITCH_OPTION_DESCRIPTION = "Turn on to recover switch tables"; private static final boolean SWITCH_OPTION_DEFAULT_VALUE = false; private boolean recoverSwitchTables = SWITCH_OPTION_DEFAULT_VALUE; private final static String PROCESSOR_NAME = "68000"; public Motorola68KAnalyzer() { super(PROCESSOR_NAME); } @Override public boolean canAnalyze(Program program) { boolean canAnalyze = program.getLanguage().getProcessor().equals( Processor.findOrPossiblyCreateProcessor(PROCESSOR_NAME)); if (!canAnalyze) { return false; } return true; } @Override public AddressSetView flowConstants(final Program program, Address flowStart, AddressSetView flowSet, final SymbolicPropogator symEval, final TaskMonitor monitor) throws CancelledException { // follow all flows building up context // use context to fill out addresses on certain instructions ConstantPropagationContextEvaluator eval = new ConstantPropagationContextEvaluator(trustWriteMemOption) { @Override public boolean evaluateContext(VarnodeContext context, Instruction instr) { String mnemonic = instr.getMnemonicString(); if (mnemonic.equals("pea")) { // retrieve the value pushed onto the stack try { Varnode stackValue = context.getValue(context.getStackVarnode(), this); Varnode value = context.getValue(stackValue, this); if (value != null && value.isConstant()) { long lval = value.getOffset(); Address refAddr = instr.getMinAddress().getNewAddress(lval); if (lval <= 4096 || ((lval % 1024) == 0) || lval < 0 || lval == 0xffff || lval == 0xff00 || lval == 0xffffff || lval == 0xff0000 || lval == 0xff00ff || lval == 0xffffffff || lval == 0xffffff00 || lval == 0xffff0000 || lval == 0xff000000) { return false; } if (program.getMemory().contains(refAddr)) { if (instr.getOperandReferences(0).length == 0) { instr.addOperandReference(0, refAddr, RefType.DATA, SourceType.ANALYSIS); } } } } catch (NotFoundException e) { // value not found doesn't matter } } if (mnemonic.equals("lea")) { Register destReg = instr.getRegister(1); if (destReg == null) { return false; } RegisterValue value = context.getRegisterValue(destReg); if (value != null) { BigInteger rval = value.getUnsignedValue(); long lval = rval.longValue(); Address refAddr = instr.getMinAddress().getNewAddress(lval); if ((lval > 4096 || lval < 0) && program.getMemory().contains(refAddr) || Arrays.asList(instr.getOpObjects(0)).contains( program.getRegister("PC"))) { if (instr.getOperandReferences(0).length == 0) { instr.addOperandReference(0, refAddr, RefType.DATA, SourceType.ANALYSIS); } } } } return false; } @Override public boolean evaluateReference(VarnodeContext context, Instruction instr, int pcodeop, Address address, int size, RefType refType) { if (instr.getFlowType().isJump()) { return false; } if (instr.getNumOperands() > 2) { return false; } return super.evaluateReference(context, instr, pcodeop, address, size, refType); } @Override public boolean evaluateDestination(VarnodeContext context, Instruction instruction) { String mnemonic = instruction.getMnemonicString(); if (!instruction.getFlowType().isJump()) { return false; } if (mnemonic.equals("jmp")) { // record the destination that is unknown int numRefs = instruction.getReferencesFrom().length; if (numRefs >= 4) { destSet.addRange(instruction.getMinAddress(), instruction.getMinAddress()); } } return false; } }; AddressSet resultSet = symEval.flowConstants(flowStart, flowSet, eval, true, monitor); // // Don't do switch analysis here, let Decomp do it. But if it is already done, mark up the data references // // TODO: This most likely does not need to be done, or should be done in a general switch recovery algorithm. // Leave here for now as off. if (recoverSwitchTables) { recoverSwitches(program, symEval, eval.getDestinationSet(), monitor); } return resultSet; } int tableSizeMax; private void recoverSwitches(final Program program, SymbolicPropogator symEval, AddressSet destSet, TaskMonitor monitor) throws CancelledException { final ArrayList<CreateDataCmd> dataCmdList = new ArrayList<CreateDataCmd>(); final ArrayList<Address> targetList = new ArrayList<Address>(); // now handle symbolic execution assuming values! class SwitchEvaluator implements ContextEvaluator { Long assumeValue; boolean hitTheGuard; Address targetSwitchAddr; public void setGuard(boolean hitGuard) { hitTheGuard = hitGuard; } public void setAssume(Long assume) { assumeValue = assume; } public void setTargetSwitchAddr(Address addr) { targetSwitchAddr = addr; } @Override public boolean evaluateContextBefore(VarnodeContext context, Instruction instr) { return false; } @Override public boolean evaluateContext(VarnodeContext context, Instruction instr) { // find the cmpli to set the size of the table // tableSize = size String mnemonic = instr.getMnemonicString(); if (mnemonic.startsWith("cmpi")) { int numOps = instr.getNumOperands(); if (numOps > 1) { Register reg = instr.getRegister(numOps - 1); if ((reg != null)) { Scalar scalar = instr.getScalar(numOps - 2); if (scalar != null) { int svalue = (int) scalar.getSignedValue() + 1; if (svalue > 0 && svalue < 128) { tableSizeMax = svalue; } RegisterValue rval = context.getRegisterValue(reg); if (rval != null) { long lval = rval.getSignedValue().longValue(); } } } } } if (instr.getFlowType().isConditional()) { hitTheGuard = true; } return false; } @Override public Address evaluateConstant(VarnodeContext context, Instruction instr, int pcodeop, Address constant, int size, RefType refType) { return null; } @Override public boolean evaluateReference(VarnodeContext context, Instruction instr, int pcodeop, Address address, int size, RefType refType) { if (targetList.contains(address)) { return false; } // TODO: if ever loading from instructions in memory, must EXIT! if (!(instr.getFlowType().isComputed() && program.getMemory().contains(address))) { Program program = instr.getProgram(); if (!program.getListing().isUndefined(address, address)) { return false; } String mnemonic = instr.getMnemonicString(); if (mnemonic.startsWith("move")) { CreateDataCmd cdata = null; char endCh = mnemonic.charAt(mnemonic.length() - 1); switch (endCh) { case 'w': cdata = new CreateDataCmd(address, false, false, Undefined2DataType.dataType); break; case 'l': cdata = new CreateDataCmd(address, false, false, Undefined4DataType.dataType); break; case 'b': cdata = new CreateDataCmd(address, false, false, Undefined1DataType.dataType); break; } CodeUnit u = instr.getProgram().getListing().getInstructionContaining(address); if (u != null) { return false; } u = instr.getProgram().getListing().getCodeUnitAt(address); if (!targetList.isEmpty() && instr.getProgram().getReferenceManager().hasReferencesTo( u.getMinAddress())) { int newTableSizeMax = assumeValue.intValue(); if (newTableSizeMax > 0 && newTableSizeMax < 128) { tableSizeMax = newTableSizeMax; } return false; } dataCmdList.add(cdata); } return false; } long diff = address.subtract(instr.getMinAddress()); if ((diff > 0 && diff < (8 * 1024)) && !context.readExecutableCode()) { targetList.add(address); return false; // just go ahead and mark up the instruction } if (context.readExecutableCode() && targetList.isEmpty()) { context.clearReadExecutableCode(); return false; } return false; } @Override public boolean evaluateDestination(VarnodeContext context, Instruction instruction) { return instruction.getMinAddress().equals(targetSwitchAddr); } @Override public Long unknownValue(VarnodeContext context, Instruction instruction, Varnode node) { if (node.isRegister()) { Register reg = program.getRegister(node.getAddress()); if (reg != null) { } } return assumeValue; } @Override public boolean followFalseConditionalBranches() { return false; } @Override public boolean evaluateSymbolicReference(VarnodeContext context, Instruction instr, Address address) { return false; } @Override public boolean allowAccess(VarnodeContext context, Address addr) { return false; } } SwitchEvaluator switchEvaluator = new SwitchEvaluator(); // clear past constants. This example doesn't seem to depend on them symEval = new SymbolicPropogator(program); // now flow with the simple block of this branch.... // for each unknown branch destination, AddressIterator iter = destSet.getAddresses(true); while (iter.hasNext() && !monitor.isCancelled()) { Address loc = iter.next(); Instruction instr = program.getListing().getInstructionAt(loc); Address maxAddress = instr.getMaxAddress(); Address prev = instr.getFallFrom(); if (prev == null) { continue; } instr = program.getListing().getInstructionAt(prev); Address minAddress = instr.getMinAddress(); prev = instr.getFallFrom(); if (prev == null) { continue; } instr = program.getListing().getInstructionAt(prev); if (instr.getMnemonicString().startsWith("add") && instr.getRegister(0).equals(instr.getRegister(1))) { minAddress = instr.getMinAddress(); } AddressSet branchSet = new AddressSet(minAddress, maxAddress); tableSizeMax = 64; for (long assume = 0; assume < tableSizeMax; assume++) { switchEvaluator.setAssume(new Long(assume)); switchEvaluator.setGuard(false); switchEvaluator.setTargetSwitchAddr(loc); symEval.flowConstants(minAddress, branchSet, switchEvaluator, false, monitor); if (symEval.readExecutable()) { break; } // if it didn't get it after try with 0 if (assume > 0 && targetList.size() < 1) { break; } } // re-create the function body with the newly found code if (targetList.size() > 1) { AddressTable table; //table = new AddressTable(loc, targetList.toArray(new Address[0]), program.getDefaultPointerSize(), 0, 0); //table.fixupFunctionBody(program, program.getListing().getInstructionAt(loc), monitor); createData(program, dataCmdList); //labelTable(program, loc, targetList); } } } private void createData(Program program, ArrayList<CreateDataCmd> dataCommands) { for (Iterator<CreateDataCmd> iterator = dataCommands.iterator(); iterator.hasNext();) { CreateDataCmd createDataCmd = iterator.next(); createDataCmd.applyTo(program); } } private void labelTable(Program program, Address loc, ArrayList<Address> targets) { Namespace space = null; Instruction start_inst = program.getListing().getInstructionAt(loc); String spaceName = "switch_" + start_inst.getMinAddress(); try { space = program.getSymbolTable().createNameSpace(space, spaceName, SourceType.ANALYSIS); } catch (DuplicateNameException e) { space = program.getSymbolTable().getNamespace(spaceName, program.getGlobalNamespace()); } catch (InvalidInputException e) { // just go with default space } int tableNumber = 0; for (Iterator<Address> iterator = targets.iterator(); iterator.hasNext();) { Address addr = iterator.next(); AddLabelCmd lcmd = new AddLabelCmd(addr, "case_" + Long.toHexString(tableNumber), space, SourceType.ANALYSIS); tableNumber++; lcmd.setNamespace(space); lcmd.applyTo(program); } } @Override public void optionsChanged(Options options, Program program) { super.optionsChanged(options, program); options.registerOption(SWITCH_OPTION_NAME, recoverSwitchTables, null, SWITCH_OPTION_DESCRIPTION); recoverSwitchTables = options.getBoolean(SWITCH_OPTION_NAME, recoverSwitchTables); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.pipeline; import com.hazelcast.function.BiFunctionEx; import com.hazelcast.function.FunctionEx; import com.hazelcast.jet.accumulator.LongAccumulator; import com.hazelcast.jet.aggregate.AggregateOperation; import com.hazelcast.jet.aggregate.AggregateOperation1; import com.hazelcast.jet.aggregate.AggregateOperations; import com.hazelcast.jet.aggregate.CoAggregateOperationBuilder; import com.hazelcast.jet.datamodel.ItemsByTag; import com.hazelcast.jet.datamodel.Tag; import com.hazelcast.jet.datamodel.Tuple2; import com.hazelcast.jet.datamodel.Tuple3; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collector; import static com.hazelcast.function.ComparatorEx.comparingInt; import static com.hazelcast.jet.Util.entry; import static com.hazelcast.jet.aggregate.AggregateOperations.aggregateOperation2; import static com.hazelcast.jet.aggregate.AggregateOperations.aggregateOperation3; import static com.hazelcast.jet.aggregate.AggregateOperations.coAggregateOperationBuilder; import static com.hazelcast.jet.aggregate.AggregateOperations.maxBy; import static com.hazelcast.jet.datamodel.ItemsByTag.itemsByTag; import static com.hazelcast.jet.datamodel.Tuple2.tuple2; import static com.hazelcast.jet.datamodel.Tuple3.tuple3; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.summingLong; import static org.junit.Assert.assertEquals; @Category({QuickTest.class, ParallelJVMTest.class}) public class BatchAggregateTest extends PipelineTestSupport { static final FunctionEx<Entry<Integer, Long>, String> FORMAT_FN = e -> String.format("(%04d: %04d)", e.getKey(), e.getValue()); static final BiFunctionEx<Integer, Tuple2<Long, Long>, String> FORMAT_FN_2 = (key, t2) -> String.format("(%04d: %04d, %04d)", key, t2.f0(), t2.f1()); static final BiFunctionEx<Integer, Tuple3<Long, Long, Long>, String> FORMAT_FN_3 = (key, t3) -> String.format("(%04d: %04d, %04d, %04d)", key, t3.f0(), t3.f1(), t3.f2()); private static final AggregateOperation1<Integer, LongAccumulator, Long> SUMMING = AggregateOperations.summingLong(i -> i); private static final int FACTOR_1 = 1_000; private static final int FACTOR_2 = 1_000_000; private List<Integer> input; @Before public void before() { input = sequence(itemCount); } @Test public void aggregate() { // When BatchStage<Long> aggregated = batchStageFromInput().aggregate(SUMMING); // Then aggregated.writeTo(sink); execute(); assertEquals( singletonList(input.stream().mapToLong(i -> i).sum()), new ArrayList<>(sinkList) ); } @Test public void when_aggregateZeroItems_then_producesOutput() { // When BatchStage<Long> aggregated = batchStageFromList(emptyList()).aggregate(SUMMING); // Then aggregated.writeTo(sink); execute(); assertEquals( singletonList(0L), new ArrayList<>(sinkList) ); } @Test public void when_maxOfZeroItems_then_producesNoOutput() { // When BatchStage<Integer> aggregated = batchStageFromList(emptyList()).aggregate(maxBy(comparingInt(i -> i))); // Then aggregated.writeTo(sink); execute(); assertEquals( emptyList(), new ArrayList<>(sinkList) ); } @Test public void aggregate2_withSeparateAggrOps() { // Given BatchStage<Integer> stage = batchStageFromInput(); // When BatchStage<Tuple2<Long, Long>> aggregated = batchStageFromInput() .aggregate2(SUMMING, stage, SUMMING); // Then aggregated.writeTo(sink); execute(); long expectedSum = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(tuple2(expectedSum, expectedSum)), new ArrayList<>(sinkList) ); } @Test public void aggregate2_withAggrOp2() { // When BatchStage<Tuple2<Long, Long>> aggregated = batchStageFromInput() .aggregate2(batchStageFromInput(), aggregateOperation2(SUMMING, SUMMING)); // Then aggregated.writeTo(sink); execute(); long expectedSum = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(tuple2(expectedSum, expectedSum)), new ArrayList<>(sinkList) ); } @Test public void aggregate2_withAggrOp2_with_finishFn() { // Given BiFunctionEx<Long, Long, Long> outputFn = (a, b) -> 10_000 * a + b; // When BatchStage<Long> aggregated = batchStageFromInput().aggregate2( batchStageFromInput(), aggregateOperation2(SUMMING, SUMMING, outputFn)); // Then aggregated.writeTo(sink); execute(); long expectedSum = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(outputFn.apply(expectedSum, expectedSum)), new ArrayList<>(sinkList) ); } @Test public void aggregate3_withSeparateAggrOps() { // Given BatchStage<Integer> stage11 = batchStageFromInput(); // When BatchStage<Tuple2<Long, Long>> aggregated = batchStageFromInput() .aggregate2(SUMMING, stage11, SUMMING); // Then aggregated.writeTo(sink); execute(); long expectedSum = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(tuple2(expectedSum, expectedSum)), new ArrayList<>(sinkList) ); } @Test public void aggregate3_withAggrOp3() { // Given BatchStage<Integer> stage1 = batchStageFromInput(); BatchStage<Integer> stage2 = batchStageFromInput(); // When BatchStage<Tuple3<Long, Long, Long>> aggregated = batchStageFromInput().aggregate3( stage1, stage2, aggregateOperation3(SUMMING, SUMMING, SUMMING)); // Then aggregated.writeTo(sink); execute(); long expectedSum = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(tuple3(expectedSum, expectedSum, expectedSum)), new ArrayList<>(sinkList) ); } @Test public void aggregate3_withAggrOp3_withOutputFn() { // When BatchStage<Long> aggregated = batchStageFromInput().aggregate3( batchStageFromInput(), batchStageFromInput(), aggregateOperation3(SUMMING, SUMMING, SUMMING, (r0, r1, r2) -> r0 + r1 + r2)); // Then aggregated.writeTo(sink); execute(); assertEquals( singletonList(3 * input.stream().mapToLong(i -> i).sum()), new ArrayList<>(sinkList)); } private class AggregateBuilderFixture { FunctionEx<Integer, Integer> mapFn1 = i -> FACTOR_1 * i; FunctionEx<Integer, Integer> mapFn2 = i -> FACTOR_2 * i; BatchStage<Integer> stage1 = batchStageFromInput().map(mapFn1); BatchStage<Integer> stage2 = batchStageFromInput().map(mapFn2); } @Test public void aggregateBuilder_withSeparateAggrOps() { // Given AggregateBuilderFixture fx = new AggregateBuilderFixture(); // When AggregateBuilder<Long> b = batchStageFromInput().aggregateBuilder(SUMMING); Tag<Long> tag0 = b.tag0(); Tag<Long> tag1 = b.add(fx.stage1, SUMMING); Tag<Long> tag2 = b.add(fx.stage2, SUMMING); BatchStage<ItemsByTag> aggregated = b.build(); // Then aggregated.writeTo(sink); execute(); long sum0 = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(itemsByTag(tag0, sum0, tag1, FACTOR_1 * sum0, tag2, FACTOR_2 * sum0)), new ArrayList<>(sinkList) ); } @Test public void aggregateBuilder_withComplexAggrOp() { // Given AggregateBuilderFixture fx = new AggregateBuilderFixture(); // When AggregateBuilder1<Integer> b = batchStageFromInput().aggregateBuilder(); Tag<Integer> tag0_in = b.tag0(); Tag<Integer> tag1_in = b.add(fx.stage1); Tag<Integer> tag2_in = b.add(fx.stage2); CoAggregateOperationBuilder agb = coAggregateOperationBuilder(); Tag<Long> tag0 = agb.add(tag0_in, SUMMING); Tag<Long> tag1 = agb.add(tag1_in, SUMMING); Tag<Long> tag2 = agb.add(tag2_in, SUMMING); AggregateOperation<Object[], ItemsByTag> aggrOp = agb.build(); BatchStage<ItemsByTag> aggregated = b.build(aggrOp); // Then aggregated.writeTo(sink); execute(); long sum0 = input.stream().mapToLong(i -> i).sum(); assertEquals( singletonList(itemsByTag(tag0, sum0, tag1, FACTOR_1 * sum0, tag2, FACTOR_2 * sum0)), new ArrayList<>(sinkList) ); } @Test @SuppressWarnings("ConstantConditions") public void aggregateBuilder_withSeparateAggrOps_withOutputFn() { // Given BatchStage<Integer> stage1 = batchStageFromInput(); BatchStage<Integer> stage2 = batchStageFromInput(); // When AggregateBuilder<Long> b = batchStageFromInput().aggregateBuilder(SUMMING); Tag<Long> tag0 = b.tag0(); Tag<Long> tag1 = b.add(stage1, SUMMING); Tag<Long> tag2 = b.add(stage2, SUMMING); BatchStage<Long> aggregated = b.build(ibt -> ibt.get(tag0) + ibt.get(tag1) + ibt.get(tag2)); // Then aggregated.writeTo(sink); execute(); assertEquals( singletonList(3 * input.stream().mapToLong(i -> i).sum()), new ArrayList<>(sinkList) ); } @Test @SuppressWarnings("ConstantConditions") public void aggregateBuilder_with_complexAggrOp_withOutputFn() { // Given BatchStage<Integer> stage1 = batchStageFromInput(); BatchStage<Integer> stage2 = batchStageFromInput(); // When AggregateBuilder1<Integer> b = batchStageFromInput().aggregateBuilder(); Tag<Integer> tag0_in = b.tag0(); Tag<Integer> tag1_in = b.add(stage1); Tag<Integer> tag2_in = b.add(stage2); CoAggregateOperationBuilder agb = coAggregateOperationBuilder(); Tag<Long> tag0 = agb.add(tag0_in, SUMMING); Tag<Long> tag1 = agb.add(tag1_in, SUMMING); Tag<Long> tag2 = agb.add(tag2_in, SUMMING); AggregateOperation<Object[], Long> aggrOp = agb.build(ibt -> ibt.get(tag0) + ibt.get(tag1) + ibt.get(tag2)); BatchStage<Long> aggregated = b.build(aggrOp); // Then aggregated.writeTo(sink); execute(); assertEquals( singletonList(3 * input.stream().mapToLong(i -> i).sum()), new ArrayList<>(sinkList) ); } @Test public void groupAggregate() { // Given FunctionEx<Integer, Integer> keyFn = i -> i % 5; // When BatchStage<Entry<Integer, Long>> aggregated = batchStageFromInput() .groupingKey(keyFn) .aggregate(SUMMING); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expected = input.stream().collect(groupingBy(keyFn, summingLong(i -> i))); assertEquals( streamToString(expected.entrySet().stream(), FORMAT_FN), streamToString(sinkStreamOfEntry(), FORMAT_FN)); } private class GroupAggregateFixture { final FunctionEx<Integer, Integer> keyFn; final FunctionEx<Integer, Integer> mapFn1; final FunctionEx<Integer, Integer> mapFn2; final Collector<Integer, ?, Long> collectOp; final BatchStage<Integer> srcStage0; // Initialization in constructor to avoid lambda capture of `this` GroupAggregateFixture() { int offset = itemCount; keyFn = i -> i % 10; mapFn1 = i -> i + offset; mapFn2 = i -> i + 2 * offset; srcStage0 = batchStageFromInput(); collectOp = summingLong(i -> i); } BatchStage<Integer> srcStage1() { return batchStageFromInput().map(mapFn1); } BatchStage<Integer> srcStage2() { return batchStageFromInput().map(mapFn2); } } @Test public void groupAggregate2_withSeparateAggrOps() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); // When BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStage<Entry<Integer, Tuple2<Long, Long>>> aggregated = stage0.aggregate2(SUMMING, stage1, SUMMING); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN_2.apply(e.getKey(), tuple2(e.getValue(), expectedMap1.get(e.getKey())))), streamToString(this.<Integer, Tuple2<Long, Long>>sinkStreamOfEntry(), e -> FORMAT_FN_2.apply(e.getKey(), e.getValue())) ); } @Test public void groupAggregate2_withAggrOp2() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); // When BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStage<Entry<Integer, Tuple2<Long, Long>>> aggregated = stage0.aggregate2(stage1, aggregateOperation2(SUMMING, SUMMING)); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN_2.apply(e.getKey(), tuple2(e.getValue(), expectedMap1.get(e.getKey())))), streamToString(this.<Integer, Tuple2<Long, Long>>sinkStreamOfEntry(), e -> FORMAT_FN_2.apply(e.getKey(), e.getValue())) ); } @Test public void groupAggregate3_withSeparateAggrOps() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); // When BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage2 = fx.srcStage2().groupingKey(fx.keyFn); BatchStage<Entry<Integer, Tuple3<Long, Long, Long>>> aggregated = stage0.aggregate3(SUMMING, stage1, SUMMING, stage2, SUMMING); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap2 = input.stream().map(fx.mapFn2).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN_3.apply( e.getKey(), tuple3(e.getValue(), expectedMap1.get(e.getKey()), expectedMap2.get(e.getKey())))), streamToString(this.<Integer, Tuple3<Long, Long, Long>>sinkStreamOfEntry(), e -> FORMAT_FN_3.apply( e.getKey(), e.getValue())) ); } @Test public void groupAggregate3_withAggrOp3() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); // When BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage2 = fx.srcStage2().groupingKey(fx.keyFn); BatchStage<Entry<Integer, Tuple3<Long, Long, Long>>> aggregated = stage0.aggregate3(stage1, stage2, aggregateOperation3(SUMMING, SUMMING, SUMMING)); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap2 = input.stream().map(fx.mapFn2).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN_3.apply( e.getKey(), tuple3(e.getValue(), expectedMap1.get(e.getKey()), expectedMap2.get(e.getKey())))), streamToString(this.<Integer, Tuple3<Long, Long, Long>>sinkStreamOfEntry(), e -> FORMAT_FN_3.apply( e.getKey(), e.getValue())) ); } @Test public void groupAggregateBuilder_withSeparateAggrOps() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage2 = fx.srcStage2().groupingKey(fx.keyFn); // When GroupAggregateBuilder<Integer, Long> b = stage0.aggregateBuilder(SUMMING); Tag<Long> tag0 = b.tag0(); Tag<Long> tag1 = b.add(stage1, SUMMING); Tag<Long> tag2 = b.add(stage2, SUMMING); BatchStage<Entry<Integer, ItemsByTag>> aggregated = b.build(); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap2 = input.stream().map(fx.mapFn2).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN_3.apply( e.getKey(), tuple3(e.getValue(), expectedMap1.get(e.getKey()), expectedMap2.get(e.getKey())))), streamToString(this.<Integer, ItemsByTag>sinkStreamOfEntry(), e -> FORMAT_FN_3.apply( e.getKey(), tuple3(e.getValue().get(tag0), e.getValue().get(tag1), e.getValue().get(tag2)))) ); } @Test public void groupAggregateBuilder_withComplexAggrOp() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage2 = fx.srcStage2().groupingKey(fx.keyFn); // When GroupAggregateBuilder1<Integer, Integer> b = stage0.aggregateBuilder(); Tag<Integer> tag0_in = b.tag0(); Tag<Integer> tag1_in = b.add(stage1); Tag<Integer> tag2_in = b.add(stage2); CoAggregateOperationBuilder agb = coAggregateOperationBuilder(); Tag<Long> tag0 = agb.add(tag0_in, SUMMING); Tag<Long> tag1 = agb.add(tag1_in, SUMMING); Tag<Long> tag2 = agb.add(tag2_in, SUMMING); AggregateOperation<Object[], ItemsByTag> aggrOp = agb.build(); BatchStage<Entry<Integer, ItemsByTag>> aggregated = b.build(aggrOp); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap2 = input.stream().map(fx.mapFn2).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN_3.apply( e.getKey(), tuple3(e.getValue(), expectedMap1.get(e.getKey()), expectedMap2.get(e.getKey())))), streamToString(this.<Integer, ItemsByTag>sinkStreamOfEntry(), e -> FORMAT_FN_3.apply( e.getKey(), tuple3(e.getValue().get(tag0), e.getValue().get(tag1), e.getValue().get(tag2)))) ); } @Test @SuppressWarnings("ConstantConditions") public void groupAggregateBuilder_withComplexAggrOp_withOutputFn() { // Given GroupAggregateFixture fx = new GroupAggregateFixture(); BatchStageWithKey<Integer, Integer> stage0 = fx.srcStage0.groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage1 = fx.srcStage1().groupingKey(fx.keyFn); BatchStageWithKey<Integer, Integer> stage2 = fx.srcStage2().groupingKey(fx.keyFn); // When GroupAggregateBuilder1<Integer, Integer> b = stage0.aggregateBuilder(); Tag<Integer> tag0_in = b.tag0(); Tag<Integer> tag1_in = b.add(stage1); Tag<Integer> tag2_in = b.add(stage2); CoAggregateOperationBuilder agb = coAggregateOperationBuilder(); Tag<Long> tag0 = agb.add(tag0_in, SUMMING); Tag<Long> tag1 = agb.add(tag1_in, SUMMING); Tag<Long> tag2 = agb.add(tag2_in, SUMMING); AggregateOperation<Object[], ItemsByTag> aggrOp = agb.build(); BatchStage<Entry<Integer, Long>> aggregated = b .build(aggrOp) .map(e -> { ItemsByTag ibt = e.getValue(); return entry(e.getKey(), ibt.get(tag0) + ibt.get(tag1) + ibt.get(tag2)); }); // Then aggregated.writeTo(sink); execute(); Map<Integer, Long> expectedMap0 = input.stream().collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap1 = input.stream().map(fx.mapFn1).collect(groupingBy(fx.keyFn, fx.collectOp)); Map<Integer, Long> expectedMap2 = input.stream().map(fx.mapFn2).collect(groupingBy(fx.keyFn, fx.collectOp)); assertEquals( streamToString(expectedMap0.entrySet().stream(), e -> FORMAT_FN.apply(entry(e.getKey(), e.getValue() + expectedMap1.get(e.getKey()) + expectedMap2.get(e.getKey())))), streamToString(sinkStreamOfEntry(), FORMAT_FN) ); } private BatchStage<Integer> batchStageFromInput() { return batchStageFromList(input); } }
/* * JasperReports - Free Java Reporting Library. * Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved. * http://www.jaspersoft.com * * Unless you have purchased a commercial license agreement from Jaspersoft, * the following license terms apply: * * This program is part of JasperReports. * * JasperReports is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * JasperReports is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with JasperReports. If not, see <http://www.gnu.org/licenses/>. */ package net.sf.jasperreports.charts.design; import java.awt.Color; import net.sf.jasperreports.charts.base.JRBaseLinePlot; import net.sf.jasperreports.charts.util.JRAxisFormat; import net.sf.jasperreports.engine.JRChart; import net.sf.jasperreports.engine.JRChartPlot; import net.sf.jasperreports.engine.JRConstants; import net.sf.jasperreports.engine.JRExpression; import net.sf.jasperreports.engine.JRFont; /** * @author Flavius Sana (flavius_sana@users.sourceforge.net) * @version $Id: JRDesignLinePlot.java 7199 2014-08-27 13:58:10Z teodord $ */ public class JRDesignLinePlot extends JRBaseLinePlot implements JRDesignCategoryPlot { private static final long serialVersionUID = JRConstants.SERIAL_VERSION_UID; public static final String PROPERTY_CATEGORY_AXIS_LABEL_COLOR = "categoryAxisLabelColor"; public static final String PROPERTY_CATEGORY_AXIS_LABEL_FONT = "categoryAxisLabelFont"; public static final String PROPERTY_CATEGORY_AXIS_LINE_COLOR = "categoryAxisLineColor"; public static final String PROPERTY_CATEGORY_AXIS_TICK_LABEL_COLOR = "categoryAxisTickLabelColor"; public static final String PROPERTY_CATEGORY_AXIS_TICK_LABEL_FONT = "categoryAxisTickLabelFont"; public static final String PROPERTY_CATEGORY_AXIS_TICK_LABEL_MASK = "categoryAxisTickLabelMask"; public static final String PROPERTY_CATEGORY_AXIS_VERTICAL_TICK_LABELS = "categoryAxisVerticalTickLabels"; public static final String PROPERTY_VALUE_AXIS_LABEL_COLOR = "valueAxisLabelColor"; public static final String PROPERTY_VALUE_AXIS_LABEL_FONT = "valueAxisLabelFont"; public static final String PROPERTY_VALUE_AXIS_LINE_COLOR = "valueAxisLineColor"; public static final String PROPERTY_VALUE_AXIS_TICK_LABEL_COLOR = "valueAxisTickLabelColor"; public static final String PROPERTY_VALUE_AXIS_TICK_LABEL_FONT = "valueAxisTickLabelFont"; public static final String PROPERTY_VALUE_AXIS_TICK_LABEL_MASK = "valueAxisTickLabelMask"; public static final String PROPERTY_VALUE_AXIS_VERTICAL_TICK_LABELS = "valueAxisVerticalTickLabels"; /** * */ public JRDesignLinePlot(JRChartPlot plot, JRChart chart) { super(plot, chart); } /** * */ public void setCategoryAxisLabelExpression(JRExpression categoryAxisLabelExpression) { Object old = this.categoryAxisLabelExpression; this.categoryAxisLabelExpression = categoryAxisLabelExpression; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LABEL_EXPRESSION, old, this.categoryAxisLabelExpression); } /** * */ public void setCategoryAxisLabelFont(JRFont categoryAxisLabelFont) { Object old = this.categoryAxisLabelFont; this.categoryAxisLabelFont = categoryAxisLabelFont; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LABEL_FONT, old, this.categoryAxisLabelFont); } /** * */ public void setCategoryAxisLabelColor(Color categoryAxisLabelColor) { Object old = this.categoryAxisLabelColor; this.categoryAxisLabelColor = categoryAxisLabelColor; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LABEL_COLOR, old, this.categoryAxisLabelColor); } /** * */ public void setCategoryAxisTickLabelFont(JRFont categoryAxisTickLabelFont) { Object old = this.categoryAxisTickLabelFont; this.categoryAxisTickLabelFont = categoryAxisTickLabelFont; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_TICK_LABEL_FONT, old, this.categoryAxisTickLabelFont); } /** * */ public void setCategoryAxisTickLabelColor(Color categoryAxisTickLabelColor) { Object old = this.categoryAxisTickLabelColor; this.categoryAxisTickLabelColor = categoryAxisTickLabelColor; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_TICK_LABEL_COLOR, old, this.categoryAxisTickLabelColor); } /** * */ public void setCategoryAxisTickLabelMask(String categoryAxisTickLabelMask) { Object old = this.categoryAxisTickLabelMask; this.categoryAxisTickLabelMask = categoryAxisTickLabelMask; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_TICK_LABEL_MASK, old, this.categoryAxisTickLabelMask); } /** * */ public void setCategoryAxisVerticalTickLabels(Boolean categoryAxisVerticalTickLabels) { Object old = this.categoryAxisVerticalTickLabels; this.categoryAxisVerticalTickLabels = categoryAxisVerticalTickLabels; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_VERTICAL_TICK_LABELS, old, this.categoryAxisVerticalTickLabels); } /** * */ public void setCategoryAxisLineColor(Color categoryAxisLineColor) { Object old = this.categoryAxisLineColor; this.categoryAxisLineColor = categoryAxisLineColor; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LINE_COLOR, old, this.categoryAxisLineColor); } /** * */ public void setValueAxisLabelExpression(JRExpression valueAxisLabelExpression) { Object old = this.valueAxisLabelExpression; this.valueAxisLabelExpression = valueAxisLabelExpression; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LABEL_EXPRESSION, old, this.valueAxisLabelExpression); } /** * */ public void setDomainAxisMinValueExpression(JRExpression domainAxisMinValueExpression) { Object old = this.domainAxisMinValueExpression; this.domainAxisMinValueExpression = domainAxisMinValueExpression; getEventSupport().firePropertyChange(PROPERTY_DOMAIN_AXIS_MINVALUE_EXPRESSION, old, this.domainAxisMinValueExpression); } /** * */ public void setDomainAxisMaxValueExpression(JRExpression domainAxisMaxValueExpression) { Object old = this.domainAxisMaxValueExpression; this.domainAxisMaxValueExpression = domainAxisMaxValueExpression; getEventSupport().firePropertyChange(PROPERTY_DOMAIN_AXIS_MAXVALUE_EXPRESSION, old, this.domainAxisMaxValueExpression); } /** * */ public void setRangeAxisMinValueExpression(JRExpression rangeAxisMinValueExpression) { Object old = this.rangeAxisMinValueExpression; this.rangeAxisMinValueExpression = rangeAxisMinValueExpression; getEventSupport().firePropertyChange(PROPERTY_RANGE_AXIS_MINVALUE_EXPRESSION, old, this.rangeAxisMinValueExpression); } /** * */ public void setRangeAxisMaxValueExpression(JRExpression rangeAxisMaxValueExpression) { Object old = this.rangeAxisMaxValueExpression; this.rangeAxisMaxValueExpression = rangeAxisMaxValueExpression; getEventSupport().firePropertyChange(PROPERTY_RANGE_AXIS_MAXVALUE_EXPRESSION, old, this.rangeAxisMaxValueExpression); } /** * */ public void setValueAxisLabelFont(JRFont valueAxisLabelFont) { Object old = this.valueAxisLabelFont; this.valueAxisLabelFont = valueAxisLabelFont; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LABEL_FONT, old, this.valueAxisLabelFont); } /** * */ public void setValueAxisLabelColor(Color valueAxisLabelColor) { Object old = this.valueAxisLabelColor; this.valueAxisLabelColor = valueAxisLabelColor; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LABEL_COLOR, old, this.valueAxisLabelColor); } /** * */ public void setValueAxisTickLabelFont(JRFont valueAxisTickLabelFont) { Object old = this.valueAxisTickLabelFont; this.valueAxisTickLabelFont = valueAxisTickLabelFont; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_TICK_LABEL_FONT, old, this.valueAxisTickLabelFont); } /** * */ public void setValueAxisTickLabelColor(Color valueAxisTickLabelColor) { Object old = this.valueAxisTickLabelColor; this.valueAxisTickLabelColor = valueAxisTickLabelColor; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_TICK_LABEL_COLOR, old, this.valueAxisTickLabelColor); } /** * */ public void setValueAxisTickLabelMask(String valueAxisTickLabelMask) { Object old = this.valueAxisTickLabelMask; this.valueAxisTickLabelMask = valueAxisTickLabelMask; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_TICK_LABEL_MASK, old, this.valueAxisTickLabelMask); } /** * */ public void setValueAxisVerticalTickLabels(Boolean valueAxisVerticalTickLabels) { Object old = this.valueAxisVerticalTickLabels; this.valueAxisVerticalTickLabels = valueAxisVerticalTickLabels; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_VERTICAL_TICK_LABELS, old, this.valueAxisVerticalTickLabels); } /** * */ public void setValueAxisLineColor(Color valueAxisLineColor) { Object old = this.valueAxisLineColor; this.valueAxisLineColor = valueAxisLineColor; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LINE_COLOR, old, this.valueAxisLineColor); } /** * */ public void setCategoryAxisFormat(JRAxisFormat axisFormat) { setCategoryAxisLabelFont(axisFormat.getLabelFont()); setCategoryAxisLabelColor(axisFormat.getLabelColor()); setCategoryAxisTickLabelFont(axisFormat.getTickLabelFont()); setCategoryAxisTickLabelColor(axisFormat.getTickLabelColor()); setCategoryAxisTickLabelMask(axisFormat.getTickLabelMask()); setCategoryAxisVerticalTickLabels(axisFormat.getVerticalTickLabels()); setCategoryAxisLineColor(axisFormat.getLineColor()); } /** * */ public void setValueAxisFormat(JRAxisFormat axisFormat) { setValueAxisLabelFont(axisFormat.getLabelFont()); setValueAxisLabelColor(axisFormat.getLabelColor()); setValueAxisTickLabelFont(axisFormat.getTickLabelFont()); setValueAxisTickLabelColor(axisFormat.getTickLabelColor()); setValueAxisTickLabelMask(axisFormat.getTickLabelMask()); setValueAxisVerticalTickLabels(axisFormat.getVerticalTickLabels()); setValueAxisLineColor(axisFormat.getLineColor()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.apollo.util; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.*; /** */ public class URISupport { public static class CompositeData { private String host; private String scheme; private String path; private URI components[]; private Map<String, String> parameters; private String fragment; public URI[] getComponents() { return components; } public String getFragment() { return fragment; } public Map<String, String> getParameters() { return parameters; } public String getScheme() { return scheme; } public String getPath() { return path; } public String getHost() { return host; } public URI toURI() throws URISyntaxException { StringBuffer sb = new StringBuffer(); if (scheme != null) { sb.append(scheme); sb.append(':'); } if (host != null && host.length() != 0) { sb.append(host); } else { sb.append('('); for (int i = 0; i < components.length; i++) { if (i != 0) { sb.append(','); } sb.append(components[i].toString()); } sb.append(')'); } if (path != null) { sb.append('/'); sb.append(path); } if (!parameters.isEmpty()) { sb.append("?"); sb.append(createQueryString(parameters)); } if (fragment != null) { sb.append("#"); sb.append(fragment); } return new URI(sb.toString()); } } public static Map<String, String> parseQuery(String uri) throws URISyntaxException { try { Map<String, String> rc = new HashMap<String, String>(); if (uri != null) { String[] parameters = uri.split("&"); for (int i = 0; i < parameters.length; i++) { int p = parameters[i].indexOf("="); if (p >= 0) { String name = URLDecoder.decode(parameters[i].substring(0, p), "UTF-8"); String value = URLDecoder.decode(parameters[i].substring(p + 1), "UTF-8"); rc.put(name, value); } else { rc.put(parameters[i], null); } } } return rc; } catch (UnsupportedEncodingException e) { throw (URISyntaxException)new URISyntaxException(e.toString(), "Invalid encoding").initCause(e); } } public static Map<String, String> parseParamters(URI uri) throws URISyntaxException { return uri.getQuery() == null ? emptyMap() : parseQuery(stripPrefix(uri.getQuery(), "?")); } @SuppressWarnings("unchecked") private static Map<String, String> emptyMap() { return Collections.EMPTY_MAP; } /** * Removes any URI query from the given uri */ public static URI removeQuery(URI uri) throws URISyntaxException { return createURIWithQuery(uri, null); } /** * Creates a URI with the given query */ public static URI createURIWithQuery(URI uri, String query) throws URISyntaxException { return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), query, uri.getFragment()); } public static CompositeData parseComposite(URI uri) throws URISyntaxException { CompositeData rc = new CompositeData(); rc.scheme = uri.getScheme(); String ssp = stripPrefix(uri.getSchemeSpecificPart().trim(), "//").trim(); parseComposite(uri, rc, ssp); rc.fragment = uri.getFragment(); return rc; } /** * @param uri * @param rc * @param ssp * @param p * @throws java.net.URISyntaxException */ private static void parseComposite(URI uri, CompositeData rc, String ssp) throws URISyntaxException { String componentString; String params; if (!checkParenthesis(ssp)) { throw new URISyntaxException(uri.toString(), "Not a matching number of '(' and ')' parenthesis"); } int p; int intialParen = ssp.indexOf("("); if (intialParen == 0) { rc.host = ssp.substring(0, intialParen); p = rc.host.indexOf("/"); if (p >= 0) { rc.path = rc.host.substring(p); rc.host = rc.host.substring(0, p); } p = ssp.lastIndexOf(")"); componentString = ssp.substring(intialParen + 1, p); params = ssp.substring(p + 1).trim(); } else { componentString = ssp; params = ""; } String components[] = splitComponents(componentString); rc.components = new URI[components.length]; for (int i = 0; i < components.length; i++) { rc.components[i] = new URI(components[i].trim()); } p = params.indexOf("?"); if (p >= 0) { if (p > 0) { rc.path = stripPrefix(params.substring(0, p), "/"); } rc.parameters = parseQuery(params.substring(p + 1)); } else { if (params.length() > 0) { rc.path = stripPrefix(params, "/"); } rc.parameters = emptyMap(); } } /** * @param componentString * @return */ private static String[] splitComponents(String str) { List<String> l = new ArrayList<String>(); int last = 0; int depth = 0; char chars[] = str.toCharArray(); for (int i = 0; i < chars.length; i++) { switch (chars[i]) { case '(': depth++; break; case ')': depth--; break; case ',': if (depth == 0) { String s = str.substring(last, i); l.add(s); last = i + 1; } break; default: } } String s = str.substring(last); if (s.length() != 0) { l.add(s); } String rc[] = new String[l.size()]; l.toArray(rc); return rc; } public static String stripPrefix(String value, String prefix) { if (value.startsWith(prefix)) { return value.substring(prefix.length()); } return value; } public static URI stripScheme(URI uri) throws URISyntaxException { return new URI(stripPrefix(uri.getSchemeSpecificPart().trim(), "//")); } public static String createQueryString(Map options) throws URISyntaxException { try { if (options.size() > 0) { StringBuffer rc = new StringBuffer(); boolean first = true; for (Iterator iter = options.keySet().iterator(); iter.hasNext();) { if (first) { first = false; } else { rc.append("&"); } String key = (String)iter.next(); String value = (String)options.get(key); rc.append(URLEncoder.encode(key, "UTF-8")); rc.append("="); rc.append(URLEncoder.encode(value, "UTF-8")); } return rc.toString(); } else { return ""; } } catch (UnsupportedEncodingException e) { throw (URISyntaxException)new URISyntaxException(e.toString(), "Invalid encoding").initCause(e); } } /** * Creates a URI from the original URI and the remaining paramaters * * @throws java.net.URISyntaxException */ public static URI createRemainingURI(URI originalURI, Map params) throws URISyntaxException { String s = createQueryString(params); if (s.length() == 0) { s = null; } return createURIWithQuery(originalURI, s); } public static URI changeScheme(URI bindAddr, String scheme) throws URISyntaxException { return new URI(scheme, bindAddr.getUserInfo(), bindAddr.getHost(), bindAddr.getPort(), bindAddr .getPath(), bindAddr.getQuery(), bindAddr.getFragment()); } public static boolean checkParenthesis(String str) { boolean result = true; if (str != null) { int open = 0; int closed = 0; int i = 0; while ((i = str.indexOf('(', i)) >= 0) { i++; open++; } i = 0; while ((i = str.indexOf(')', i)) >= 0) { i++; closed++; } result = open == closed; } return result; } public int indexOfParenthesisMatch(String str) { int result = -1; return result; } }
package micromod; /* Java ProTracker Replay (c)2018 mumart@gmail.com */ public class Micromod { public static final String VERSION = "20180204 (c)2018 mumart@gmail.com"; private Module module; private int[] rampBuf; private Note note; private Channel[] channels; private int sampleRate; private int seqPos, breakSeqPos, row, nextRow, tick; private int speed, tempo, plCount, plChannel; private boolean interpolation; private byte[][] playCount; /* Play the specified Module at the specified sampling rate. */ public Micromod( Module module, int samplingRate ) { this.module = module; setSampleRate( samplingRate ); rampBuf = new int[ 128 ]; note = new Note(); playCount = new byte[ module.getSequenceLength() ][]; channels = new Channel[ module.getNumChannels() ]; setSequencePos( 0 ); } /* Return the sampling rate of playback. */ public int getSampleRate() { return sampleRate; } /* Set the sampling rate of playback. */ public void setSampleRate( int rate ) { // Use with Module.c2Rate to adjust the tempo of playback. // To play at half speed, multiply both the samplingRate and Module.c2Rate by 2. if( rate < 8000 || rate > 128000 ) { throw new IllegalArgumentException( "Unsupported sampling rate!" ); } sampleRate = rate; } /* Enable or disable the linear interpolation filter. */ public void setInterpolation( boolean interpolation ) { this.interpolation = interpolation; } /* Return the length of the buffer required by getAudio(). */ public int getMixBufferLength() { return ( calculateTickLen( 32, 128000 ) + 65 ) * 4; } /* Get the current row position. */ public int getRow() { return row; } /* Get the current pattern position in the sequence. */ public int getSequencePos() { return seqPos; } /* Set the pattern in the sequence to play. The tempo is reset to the default. */ public void setSequencePos( int pos ) { if( pos >= module.getSequenceLength() ) pos = 0; breakSeqPos = pos; nextRow = 0; tick = 1; speed = 6; tempo = 125; plCount = plChannel = -1; for( int idx = 0; idx < playCount.length; idx++ ) playCount[ idx ] = new byte[ Pattern.NUM_ROWS ]; for( int idx = 0; idx < channels.length; idx++ ) channels[ idx ] = new Channel( module, idx ); for( int idx = 0; idx < 128; idx++ ) rampBuf[ idx ] = 0; tick(); } /* Returns the song duration in samples at the current sampling rate. */ public int calculateSongDuration() { int duration = 0; setSequencePos( 0 ); boolean songEnd = false; while( !songEnd ) { duration += calculateTickLen( tempo, sampleRate ); songEnd = tick(); } setSequencePos( 0 ); return duration; } /* Seek to approximately the specified sample position. The actual sample position reached is returned. */ public int seek( int samplePos ) { setSequencePos( 0 ); int currentPos = 0; int tickLen = calculateTickLen( tempo, sampleRate ); while( ( samplePos - currentPos ) >= tickLen ) { for( int idx = 0; idx < channels.length; idx++ ) channels[ idx ].updateSampleIdx( tickLen * 2, sampleRate * 2 ); currentPos += tickLen; tick(); tickLen = calculateTickLen( tempo, sampleRate ); } return currentPos; } /* Seek to the specified position and row in the sequence. */ public void seekSequencePos( int sequencePos, int sequenceRow ) { setSequencePos( 0 ); if( sequencePos < 0 || sequencePos >= module.getSequenceLength() ) sequencePos = 0; if( sequenceRow >= 64 ) sequenceRow = 0; while( seqPos < sequencePos || row < sequenceRow ) { int tickLen = calculateTickLen( tempo, sampleRate ); for( int idx = 0; idx < channels.length; idx++ ) channels[ idx ].updateSampleIdx( tickLen * 2, sampleRate * 2 ); if( tick() ) { // Song end reached. setSequencePos( sequencePos ); return; } } } /* Generate audio. The number of samples placed into outputBuf is returned. The output buffer length must be at least that returned by getMixBufferLength(). A "sample" is a pair of 16-bit integer amplitudes, one for each of the stereo channels. */ public int getAudio( int[] outputBuf ) { int tickLen = calculateTickLen( tempo, sampleRate ); // Clear output buffer. for( int idx = 0, end = ( tickLen + 65 ) * 4; idx < end; idx++ ) outputBuf[ idx ] = 0; // Resample. for( int chanIdx = 0; chanIdx < channels.length; chanIdx++ ) { Channel chan = channels[ chanIdx ]; chan.resample( outputBuf, 0, ( tickLen + 65 ) * 2, sampleRate * 2, interpolation ); chan.updateSampleIdx( tickLen * 2, sampleRate * 2 ); } downsample( outputBuf, tickLen + 64 ); volumeRamp( outputBuf, tickLen ); tick(); return tickLen; } private int calculateTickLen( int tempo, int samplingRate ) { return ( samplingRate * 5 ) / ( tempo * 2 ); } private void volumeRamp( int[] mixBuf, int tickLen ) { int rampRate = 256 * 2048 / sampleRate; for( int idx = 0, a1 = 0; a1 < 256; idx += 2, a1 += rampRate ) { int a2 = 256 - a1; mixBuf[ idx ] = ( mixBuf[ idx ] * a1 + rampBuf[ idx ] * a2 ) >> 8; mixBuf[ idx + 1 ] = ( mixBuf[ idx + 1 ] * a1 + rampBuf[ idx + 1 ] * a2 ) >> 8; } System.arraycopy( mixBuf, tickLen * 2, rampBuf, 0, 128 ); } private void downsample( int[] buf, int count ) { // 2:1 downsampling with simple but effective anti-aliasing. Buf must contain count * 2 + 1 stereo samples. int outLen = count * 2; for( int inIdx = 0, outIdx = 0; outIdx < outLen; inIdx += 4, outIdx += 2 ) { buf[ outIdx ] = ( buf[ inIdx ] >> 2 ) + ( buf[ inIdx + 2 ] >> 1 ) + ( buf[ inIdx + 4 ] >> 2 ); buf[ outIdx + 1 ] = ( buf[ inIdx + 1 ] >> 2 ) + ( buf[ inIdx + 3 ] >> 1 ) + ( buf[ inIdx + 5 ] >> 2 ); } } private boolean tick() { if( --tick <= 0 ) { tick = speed; row(); } else { for( int idx = 0; idx < channels.length; idx++ ) channels[ idx ].tick(); } return playCount[ seqPos ][ row ] > 1; } private void row() { if( nextRow < 0 ) { breakSeqPos = seqPos + 1; nextRow = 0; } if( breakSeqPos >= 0 ) { if( breakSeqPos >= module.getSequenceLength() ) breakSeqPos = nextRow = 0; seqPos = breakSeqPos; for( int idx = 0; idx < channels.length; idx++ ) channels[ idx ].plRow = 0; breakSeqPos = -1; } row = nextRow; int count = playCount[ seqPos ][ row ]; if( plCount < 0 && count < 127 ) { playCount[ seqPos ][ row ] = ( byte ) ( count + 1 ); } nextRow = row + 1; if( nextRow >= Pattern.NUM_ROWS ) { nextRow = -1; } for( int chanIdx = 0; chanIdx < channels.length; chanIdx++ ) { Channel channel = channels[ chanIdx ]; module.getPattern( module.getSequenceEntry( seqPos ) ).getNote( row, chanIdx, note ); int effect = note.effect & 0xFF; int param = note.parameter & 0xFF; if( effect == 0xE ) { effect = 0x10 | ( param >> 4 ); param &= 0xF; } if( effect == 0 && param > 0 ) effect = 0xE; channel.row( note.key, note.instrument, effect, param ); switch( effect ) { case 0xB: /* Pattern Jump.*/ if( plCount < 0 ) { breakSeqPos = param; nextRow = 0; } break; case 0xD: /* Pattern Break.*/ if( plCount < 0 ) { if( breakSeqPos < 0 ) breakSeqPos = seqPos + 1; nextRow = ( param >> 4 ) * 10 + ( param & 0xF ); if( nextRow >= 64 ) nextRow = 0; } break; case 0xF: /* Set Speed.*/ if( param > 0 ) { if( param < 32 ) tick = speed = param; else tempo = param; } break; case 0x16: /* Pattern Loop.*/ if( param == 0 ) /* Set loop marker on this channel. */ channel.plRow = row; if( channel.plRow < row && breakSeqPos < 0 ) { /* Marker valid. */ if( plCount < 0 ) { /* Not already looping, begin. */ plCount = param; plChannel = chanIdx; } if( plChannel == chanIdx ) { /* Next Loop.*/ if( plCount == 0 ) { /* Loop finished. */ /* Invalidate current marker. */ channel.plRow = row + 1; } else { /* Loop. */ nextRow = channel.plRow; } plCount--; } } break; case 0x1E: /* Pattern Delay.*/ tick = speed + speed * param; break; } } } }
package org.fxmisc.flowless; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.function.Function; import javafx.beans.property.ObjectProperty; import javafx.collections.ObservableList; import javafx.css.CssMetaData; import javafx.css.StyleConverter; import javafx.css.Styleable; import javafx.css.StyleableObjectProperty; import javafx.css.StyleableProperty; import javafx.geometry.Bounds; import javafx.geometry.Orientation; import javafx.geometry.Point2D; import javafx.scene.input.ScrollEvent; import javafx.scene.layout.Region; import javafx.scene.shape.Rectangle; import org.reactfx.collection.MemoizationList; import org.reactfx.util.Lists; import org.reactfx.value.Val; import org.reactfx.value.Var; /** * A VirtualFlow is a memory-efficient viewport that only renders enough of its content to completely fill up the * viewport through its {@link Navigator}. Based on the viewport's {@link Gravity}, it sequentially lays out the * {@link javafx.scene.Node}s of the {@link Cell}s until the viewport is completely filled up or it has no additional * cell's nodes to render. * * <p> * Since this viewport does not fully render all of its content, the scroll values are estimates based on the nodes * that are currently displayed in the viewport. If every node that could be rendered is the same width or same * height, then the corresponding scroll values (e.g., scrollX or totalX) are accurate. * <em>Note:</em> the VirtualFlow does not have scroll bars by default. These can be added by wrapping this object * in a {@link VirtualizedScrollPane}. * </p> * * <p> * Since the viewport can be used to lay out its content horizontally or vertically, it uses two * orientation-agnostic terms to refer to its width and height: "breadth" and "length," respectively. The viewport * always lays out its {@link Cell cell}'s {@link javafx.scene.Node}s from "top-to-bottom" or from "bottom-to-top" * (these terms should be understood in reference to the viewport's {@link OrientationHelper orientation} and * {@link Gravity}). Thus, its length ("height") is independent as the viewport's bounds are dependent upon * its parent's bounds whereas its breadth ("width") is dependent upon its length. * </p> * * @param <T> the model content that the {@link Cell#getNode() cell's node} renders * @param <C> the {@link Cell} that can render the model with a {@link javafx.scene.Node}. */ public class VirtualFlow<T, C extends Cell<T, ?>> extends Region implements Virtualized { /** * Determines how the cells in the viewport should be laid out and where any extra unused space should exist * if there are not enough cells to completely fill up the viewport */ public static enum Gravity { /** * If using a {@link VerticalHelper vertical viewport}, lays out the content from top-to-bottom. The first * visible item will appear at the top and the last visible item (or unused space) towards the bottom. * <p> * If using a {@link HorizontalHelper horizontal viewport}, lays out the content from left-to-right. The first * visible item will appear at the left and the last visible item (or unused space) towards the right. * </p> */ FRONT, /** * If using a {@link VerticalHelper vertical viewport}, lays out the content from bottom-to-top. The first * visible item will appear at the bottom and the last visible item (or unused space) towards the top. * <p> * If using a {@link HorizontalHelper horizontal viewport}, lays out the content from right-to-left. The first * visible item will appear at the right and the last visible item (or unused space) towards the left. * </p> */ REAR } /** * Creates a viewport that lays out content horizontally from left to right */ public static <T, C extends Cell<T, ?>> VirtualFlow<T, C> createHorizontal( ObservableList<T> items, Function<? super T, ? extends C> cellFactory) { return createHorizontal(items, cellFactory, Gravity.FRONT); } /** * Creates a viewport that lays out content horizontally */ public static <T, C extends Cell<T, ?>> VirtualFlow<T, C> createHorizontal( ObservableList<T> items, Function<? super T, ? extends C> cellFactory, Gravity gravity) { return new VirtualFlow<>(items, cellFactory, new HorizontalHelper(), gravity); } /** * Creates a viewport that lays out content vertically from top to bottom */ public static <T, C extends Cell<T, ?>> VirtualFlow<T, C> createVertical( ObservableList<T> items, Function<? super T, ? extends C> cellFactory) { return createVertical(items, cellFactory, Gravity.FRONT); } /** * Creates a viewport that lays out content vertically from top to bottom */ public static <T, C extends Cell<T, ?>> VirtualFlow<T, C> createVertical( ObservableList<T> items, Function<? super T, ? extends C> cellFactory, Gravity gravity) { return new VirtualFlow<>(items, cellFactory, new VerticalHelper(), gravity); } private final ObservableList<T> items; private final OrientationHelper orientation; private final CellListManager<T, C> cellListManager; private final SizeTracker sizeTracker; private final CellPositioner<T, C> cellPositioner; private final Navigator<T, C> navigator; private final StyleableObjectProperty<Gravity> gravity = new StyleableObjectProperty<Gravity>() { @Override public Object getBean() { return VirtualFlow.this; } @Override public String getName() { return "gravity"; } @Override public CssMetaData<? extends Styleable, Gravity> getCssMetaData() { return GRAVITY; } }; // non-negative private final Var<Double> breadthOffset0 = Var.newSimpleVar(0.0); private final Var<Double> breadthOffset = breadthOffset0.asVar(this::setBreadthOffset); public Var<Double> breadthOffsetProperty() { return breadthOffset; } public Val<Double> totalBreadthEstimateProperty() { return sizeTracker.maxCellBreadthProperty(); } private final Var<Double> lengthOffsetEstimate; public Var<Double> lengthOffsetEstimateProperty() { return lengthOffsetEstimate; } private VirtualFlow( ObservableList<T> items, Function<? super T, ? extends C> cellFactory, OrientationHelper orientation, Gravity gravity) { this.getStyleClass().add("virtual-flow"); this.items = items; this.orientation = orientation; this.cellListManager = new CellListManager<>(items, cellFactory); this.gravity.set(gravity); MemoizationList<C> cells = cellListManager.getLazyCellList(); this.sizeTracker = new SizeTracker(orientation, layoutBoundsProperty(), cells); this.cellPositioner = new CellPositioner<>(cellListManager, orientation, sizeTracker); this.navigator = new Navigator<>(cellListManager, cellPositioner, orientation, this.gravity, sizeTracker); getChildren().add(navigator); clipProperty().bind(Val.map( layoutBoundsProperty(), b -> new Rectangle(b.getWidth(), b.getHeight()))); lengthOffsetEstimate = sizeTracker.lengthOffsetEstimateProperty().asVar(this::setLengthOffset); // scroll content by mouse scroll this.addEventHandler(ScrollEvent.SCROLL, se -> { scrollXBy(-se.getDeltaX()); scrollYBy(-se.getDeltaY()); se.consume(); }); } public void dispose() { navigator.dispose(); sizeTracker.dispose(); cellListManager.dispose(); } /** * If the item is out of view, instantiates a new cell for the item. * The returned cell will be properly sized, but not properly positioned * relative to the cells in the viewport, unless it is itself in the * viewport. * * @return Cell for the given item. The cell will be valid only until the * next layout pass. It should therefore not be stored. It is intended to * be used for measurement purposes only. */ public C getCell(int itemIndex) { Lists.checkIndex(itemIndex, items.size()); return cellPositioner.getSizedCell(itemIndex); } /** * This method calls {@link #layout()} as a side-effect to insure * that the VirtualFlow is up-to-date in light of any changes */ public Optional<C> getCellIfVisible(int itemIndex) { // insure cells are up-to-date in light of any changes layout(); return cellPositioner.getCellIfVisible(itemIndex); } /** * This method calls {@link #layout()} as a side-effect to insure * that the VirtualFlow is up-to-date in light of any changes */ public ObservableList<C> visibleCells() { // insure cells are up-to-date in light of any changes layout(); return cellListManager.getLazyCellList().memoizedItems(); } public Val<Double> totalLengthEstimateProperty() { return sizeTracker.totalLengthEstimateProperty(); } public Bounds cellToViewport(C cell, Bounds bounds) { return cell.getNode().localToParent(bounds); } public Point2D cellToViewport(C cell, Point2D point) { return cell.getNode().localToParent(point); } public Point2D cellToViewport(C cell, double x, double y) { return cell.getNode().localToParent(x, y); } @Override protected void layoutChildren() { // navigate to the target position and fill viewport while(true) { double oldLayoutBreadth = sizeTracker.getCellLayoutBreadth(); orientation.resize(navigator, oldLayoutBreadth, sizeTracker.getViewportLength()); navigator.layout(); if(oldLayoutBreadth == sizeTracker.getCellLayoutBreadth()) { break; } } double viewBreadth = orientation.breadth(this); double navigatorBreadth = orientation.breadth(navigator); double totalBreadth = breadthOffset0.getValue(); double breadthDifference = navigatorBreadth - totalBreadth; if (breadthDifference < viewBreadth) { // viewport is scrolled all the way to the end of its breadth. // but now viewport size (breadth) has increased double adjustment = viewBreadth - breadthDifference; orientation.relocate(navigator, -(totalBreadth - adjustment), 0); breadthOffset0.setValue(totalBreadth - adjustment); } else { orientation.relocate(navigator, -breadthOffset0.getValue(), 0); } } @Override protected final double computePrefWidth(double height) { switch(getContentBias()) { case HORIZONTAL: // vertical flow return computePrefBreadth(); case VERTICAL: // horizontal flow return computePrefLength(height); default: throw new AssertionError("Unreachable code"); } } @Override protected final double computePrefHeight(double width) { switch(getContentBias()) { case HORIZONTAL: // vertical flow return computePrefLength(width); case VERTICAL: // horizontal flow return computePrefBreadth(); default: throw new AssertionError("Unreachable code"); } } private double computePrefBreadth() { return 100; } private double computePrefLength(double breadth) { return 100; } @Override public final Orientation getContentBias() { return orientation.getContentBias(); } void scrollLength(double deltaLength) { setLengthOffset(lengthOffsetEstimate.getValue() + deltaLength); } void scrollBreadth(double deltaBreadth) { setBreadthOffset(breadthOffset0.getValue() + deltaBreadth); } /** * Scroll the content horizontally by the given amount. * * @param deltaX positive value scrolls right, negative value scrolls left */ @Override public void scrollXBy(double deltaX) { orientation.scrollHorizontallyBy(this, deltaX); } /** * Scroll the content vertically by the given amount. * * @param deltaY positive value scrolls down, negative value scrolls up */ @Override public void scrollYBy(double deltaY) { orientation.scrollVerticallyBy(this, deltaY); } /** * Scroll the content horizontally to the pixel * * @param pixel - the pixel position to which to scroll */ @Override public void scrollXToPixel(double pixel) { orientation.scrollHorizontallyToPixel(this, pixel); } /** * Scroll the content vertically to the pixel * * @param pixel - the pixel position to which to scroll */ @Override public void scrollYToPixel(double pixel) { orientation.scrollVerticallyToPixel(this, pixel); } @Override public Val<Double> totalWidthEstimateProperty() { return orientation.widthEstimateProperty(this); } @Override public Val<Double> totalHeightEstimateProperty() { return orientation.heightEstimateProperty(this); } @Override public Var<Double> estimatedScrollXProperty() { return orientation.estimatedScrollXProperty(this); } @Override public Var<Double> estimatedScrollYProperty() { return orientation.estimatedScrollYProperty(this); } /** * Hits this virtual flow at the given coordinates. * * @param x x offset from the left edge of the viewport * @param y y offset from the top edge of the viewport * @return hit info containing the cell that was hit and coordinates * relative to the cell. If the hit was before the cells (i.e. above a * vertical flow content or left of a horizontal flow content), returns * a <em>hit before cells</em> containing offset from the top left corner * of the content. If the hit was after the cells (i.e. below a vertical * flow content or right of a horizontal flow content), returns a * <em>hit after cells</em> containing offset from the top right corner of * the content of a horizontal flow or bottom left corner of the content of * a vertical flow. */ public VirtualFlowHit<C> hit(double x, double y) { double bOff = orientation.getX(x, y); double lOff = orientation.getY(x, y); bOff += breadthOffset0.getValue(); if(items.isEmpty()) { return orientation.hitAfterCells(bOff, lOff); } layout(); int firstVisible = cellPositioner.getFirstVisibleIndex().getAsInt(); firstVisible = navigator.fillBackwardFrom0(firstVisible, lOff); C firstCell = cellPositioner.getVisibleCell(firstVisible); int lastVisible = cellPositioner.getLastVisibleIndex().getAsInt(); lastVisible = navigator.fillForwardFrom0(lastVisible, lOff); C lastCell = cellPositioner.getVisibleCell(lastVisible); if(lOff < orientation.minY(firstCell)) { return orientation.hitBeforeCells(bOff, lOff - orientation.minY(firstCell)); } else if(lOff >= orientation.maxY(lastCell)) { return orientation.hitAfterCells(bOff, lOff - orientation.maxY(lastCell)); } else { for(int i = firstVisible; i <= lastVisible; ++i) { C cell = cellPositioner.getVisibleCell(i); if(lOff < orientation.maxY(cell)) { return orientation.cellHit(i, cell, bOff, lOff - orientation.minY(cell)); } } throw new AssertionError("unreachable code"); } } /** * Forces the viewport to acts as though it scrolled from 0 to {@code viewportOffset}). <em>Note:</em> the * viewport makes an educated guess as to which cell is actually at {@code viewportOffset} if the viewport's * entire content was completely rendered. * * @param viewportOffset See {@link OrientationHelper} and its implementations for explanation on what the offset * means based on which implementation is used. */ public void show(double viewportOffset) { if(viewportOffset < 0) { navigator.scrollCurrentPositionBy(viewportOffset); } else if(viewportOffset > sizeTracker.getViewportLength()) { navigator.scrollCurrentPositionBy(viewportOffset - sizeTracker.getViewportLength()); } else { // do nothing, offset already in the viewport } } /** * Forces the viewport to show the given item by "scrolling" to it */ public void show(int itemIdx) { navigator.setTargetPosition(new MinDistanceTo(itemIdx)); } /** * Forces the viewport to show the given item as the first visible item as determined by its {@link Gravity}. */ public void showAsFirst(int itemIdx) { navigator.setTargetPosition(new StartOffStart(itemIdx, 0.0)); } /** * Forces the viewport to show the given item as the last visible item as determined by its {@link Gravity}. */ public void showAsLast(int itemIdx) { navigator.setTargetPosition(new EndOffEnd(itemIdx, 0.0)); } /** * Forces the viewport to show the given item by "scrolling" to it and then further "scrolling" by {@code offset} * in one layout call (e.g., this method does not "scroll" twice) * * @param offset the offset value as determined by the viewport's {@link OrientationHelper}. */ public void showAtOffset(int itemIdx, double offset) { navigator.setTargetPosition(new StartOffStart(itemIdx, offset)); } /** * Forces the viewport to show the given item by "scrolling" to it and then further "scrolling," so that the * {@code region} is visible, in one layout call (e.g., this method does not "scroll" twice). */ public void show(int itemIndex, Bounds region) { navigator.showLengthRegion(itemIndex, orientation.minY(region), orientation.maxY(region)); showBreadthRegion(orientation.minX(region), orientation.maxX(region)); } private void showBreadthRegion(double fromX, double toX) { double bOff = breadthOffset0.getValue(); double spaceBefore = fromX - bOff; double spaceAfter = sizeTracker.getViewportBreadth() - toX + bOff; if(spaceBefore < 0 && spaceAfter > 0) { double shift = Math.min(-spaceBefore, spaceAfter); setBreadthOffset(bOff - shift); } else if(spaceAfter < 0 && spaceBefore > 0) { double shift = Math.max(spaceAfter, -spaceBefore); setBreadthOffset(bOff - shift); } } void setLengthOffset(double pixels) { double total = totalLengthEstimateProperty().getOrElse(0.0); double length = sizeTracker.getViewportLength(); double max = Math.max(total - length, 0); double current = lengthOffsetEstimate.getValue(); if(pixels > max) pixels = max; if(pixels < 0) pixels = 0; double diff = pixels - current; if(diff == 0) { // do nothing } else if(Math.abs(diff) < length) { // distance less than one screen navigator.scrollCurrentPositionBy(diff); } else { jumpToAbsolutePosition(pixels); } } void setBreadthOffset(double pixels) { double total = totalBreadthEstimateProperty().getValue(); double breadth = sizeTracker.getViewportBreadth(); double max = Math.max(total - breadth, 0); double current = breadthOffset0.getValue(); if(pixels > max) pixels = max; if(pixels < 0) pixels = 0; if(pixels != current) { breadthOffset0.setValue(pixels); requestLayout(); // TODO: could be safely relocated right away? // (Does relocation request layout?) } } private void jumpToAbsolutePosition(double pixels) { if(items.isEmpty()) { return; } // guess the first visible cell and its offset in the viewport double avgLen = sizeTracker.getAverageLengthEstimate().orElse(0.0); if(avgLen == 0.0) return; int first = (int) Math.floor(pixels / avgLen); double firstOffset = -(pixels % avgLen); if(first < items.size()) { navigator.setTargetPosition(new StartOffStart(first, firstOffset)); } else { navigator.setTargetPosition(new EndOffEnd(items.size() - 1, 0.0)); } } /** * The gravity of the virtual flow. When there are not enough cells to fill * the full height (vertical virtual flow) or width (horizontal virtual flow), * the cells are placed either at the front (vertical: top, horizontal: left), * or rear (vertical: bottom, horizontal: right) of the virtual flow, depending * on the value of the gravity property. * * The gravity can also be styled in CSS, using the "-flowless-gravity" property, * for example: * <pre>.virtual-flow { -flowless-gravity: rear; }</pre> */ public ObjectProperty<Gravity> gravityProperty() { return gravity; } public Gravity getGravity() { return gravity.get(); } public void setGravity(Gravity gravity) { this.gravity.set(gravity); } @SuppressWarnings("unchecked") // Because of the cast we have to perform, below private static final CssMetaData<VirtualFlow, Gravity> GRAVITY = new CssMetaData<VirtualFlow, Gravity>( "-flowless-gravity", // JavaFX seems to have an odd return type on getEnumConverter: "? extends Enum<?>", not E as the second generic type. // Even though if you look at the source, the EnumConverter type it uses does have the type E. // To get round this, we cast on return: (StyleConverter<?, Gravity>) StyleConverter.getEnumConverter(Gravity.class), Gravity.FRONT) { @Override public boolean isSettable(VirtualFlow virtualFlow) { return !virtualFlow.gravity.isBound(); } @Override public StyleableProperty<Gravity> getStyleableProperty(VirtualFlow virtualFlow) { return virtualFlow.gravity; } }; private static final List<CssMetaData<? extends Styleable, ?>> STYLEABLES; static { List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<>(Region.getClassCssMetaData()); styleables.add(GRAVITY); STYLEABLES = Collections.unmodifiableList(styleables); } public static List<CssMetaData<? extends Styleable, ?>> getClassCssMetaData() { return STYLEABLES; } @Override public List<CssMetaData<? extends Styleable, ?>> getCssMetaData() { return getClassCssMetaData(); } }
/* * Copyright 2006-2015 Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.abstraction; import com.intellij.codeInsight.daemon.impl.analysis.JavaHighlightUtil; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.search.searches.OverridingMethodsSearch; import com.intellij.util.Query; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.psiutils.MethodUtils; import com.siyeh.ig.psiutils.WeakestTypeFinder; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; public class TypeMayBeWeakenedInspection extends BaseInspection { @SuppressWarnings({"PublicField"}) public boolean useRighthandTypeAsWeakestTypeInAssignments = true; @SuppressWarnings({"PublicField"}) public boolean useParameterizedTypeForCollectionMethods = true; @SuppressWarnings({"PublicField"}) public boolean doNotWeakenToJavaLangObject = true; @SuppressWarnings({"PublicField"}) public boolean onlyWeakentoInterface = true; @Override @NotNull public String getDisplayName() { return InspectionGadgetsBundle.message("type.may.be.weakened.display.name"); } @Override @NotNull protected String buildErrorString(Object... infos) { final Iterable<PsiClass> weakerClasses = (Iterable<PsiClass>)infos[1]; @NonNls final StringBuilder builder = new StringBuilder(); final Iterator<PsiClass> iterator = weakerClasses.iterator(); if (iterator.hasNext()) { builder.append('\'').append(getClassName(iterator.next())).append('\''); while (iterator.hasNext()) { builder.append(", '").append(getClassName(iterator.next())).append('\''); } } final Object info = infos[0]; if (info instanceof PsiField) { return InspectionGadgetsBundle.message("type.may.be.weakened.field.problem.descriptor", builder.toString()); } else if (info instanceof PsiParameter) { return InspectionGadgetsBundle.message("type.may.be.weakened.parameter.problem.descriptor", builder.toString()); } else if (info instanceof PsiMethod) { return InspectionGadgetsBundle.message("type.may.be.weakened.method.problem.descriptor", builder.toString()); } return InspectionGadgetsBundle.message("type.may.be.weakened.problem.descriptor", builder.toString()); } private static String getClassName(PsiClass aClass) { final String qualifiedName = aClass.getQualifiedName(); if (qualifiedName == null) { return aClass.getName(); } return qualifiedName; } @Override @Nullable public JComponent createOptionsPanel() { final MultipleCheckboxOptionsPanel optionsPanel = new MultipleCheckboxOptionsPanel(this); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("type.may.be.weakened.ignore.option"), "useRighthandTypeAsWeakestTypeInAssignments"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("type.may.be.weakened.collection.method.option"), "useParameterizedTypeForCollectionMethods"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("type.may.be.weakened.do.not.weaken.to.object.option"), "doNotWeakenToJavaLangObject"); optionsPanel.addCheckbox(InspectionGadgetsBundle.message("only.weaken.to.an.interface"), "onlyWeakentoInterface"); return optionsPanel; } @Override @NotNull protected InspectionGadgetsFix[] buildFixes(Object... infos) { final Iterable<PsiClass> weakerClasses = (Iterable<PsiClass>)infos[1]; final Collection<InspectionGadgetsFix> fixes = new ArrayList(); for (PsiClass weakestClass : weakerClasses) { final String className = getClassName(weakestClass); if (className == null) { continue; } fixes.add(new TypeMayBeWeakenedFix(className)); } return fixes.toArray(new InspectionGadgetsFix[fixes.size()]); } private static class TypeMayBeWeakenedFix extends InspectionGadgetsFix { private final String fqClassName; TypeMayBeWeakenedFix(@NotNull String fqClassName) { this.fqClassName = fqClassName; } @Override @NotNull public String getName() { return InspectionGadgetsBundle.message("type.may.be.weakened.quickfix", fqClassName); } @NotNull @Override public String getFamilyName() { return "Weaken type"; } @Override protected void doFix(Project project, ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement(); final PsiElement parent = element.getParent(); final PsiTypeElement typeElement; if (parent instanceof PsiVariable) { final PsiVariable variable = (PsiVariable)parent; typeElement = variable.getTypeElement(); } else if (parent instanceof PsiMethod) { final PsiMethod method = (PsiMethod)parent; typeElement = method.getReturnTypeElement(); } else { return; } if (typeElement == null) { return; } final PsiJavaCodeReferenceElement componentReferenceElement = typeElement.getInnermostComponentReferenceElement(); if (componentReferenceElement == null) { return; } final PsiType oldType = typeElement.getType(); if (!(oldType instanceof PsiClassType)) { return; } final PsiClassType oldClassType = (PsiClassType)oldType; final PsiType[] parameterTypes = oldClassType.getParameters(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiElementFactory factory = facade.getElementFactory(); final PsiType type = factory.createTypeFromText(fqClassName, element); if (!(type instanceof PsiClassType)) { return; } PsiClassType classType = (PsiClassType)type; final PsiClass aClass = classType.resolve(); if (aClass != null) { final PsiTypeParameter[] typeParameters = aClass.getTypeParameters(); if (typeParameters.length != 0 && typeParameters.length == parameterTypes.length) { final Map<PsiTypeParameter, PsiType> typeParameterMap = new HashMap(); for (int i = 0; i < typeParameters.length; i++) { final PsiTypeParameter typeParameter = typeParameters[i]; final PsiType parameterType = parameterTypes[i]; typeParameterMap.put(typeParameter, parameterType); } final PsiSubstitutor substitutor = factory.createSubstitutor(typeParameterMap); classType = factory.createType(aClass, substitutor); } } final PsiJavaCodeReferenceElement referenceElement = factory.createReferenceElementByType(classType); final PsiElement replacement = componentReferenceElement.replace(referenceElement); final JavaCodeStyleManager javaCodeStyleManager = JavaCodeStyleManager.getInstance(project); javaCodeStyleManager.shortenClassReferences(replacement); } } @Override public BaseInspectionVisitor buildVisitor() { return new TypeMayBeWeakenedVisitor(); } private class TypeMayBeWeakenedVisitor extends BaseInspectionVisitor { @Override public void visitVariable(PsiVariable variable) { super.visitVariable(variable); if (variable instanceof PsiParameter) { final PsiParameter parameter = (PsiParameter)variable; final PsiElement declarationScope = parameter.getDeclarationScope(); if (declarationScope instanceof PsiCatchSection) { // do not weaken catch block parameters return; } else if (declarationScope instanceof PsiLambdaExpression && parameter.getTypeElement() == null) { //no need to check inferred lambda params return; } else if (declarationScope instanceof PsiMethod) { final PsiMethod method = (PsiMethod)declarationScope; final PsiClass containingClass = method.getContainingClass(); if (containingClass == null || containingClass.isInterface()) { return; } if (JavaHighlightUtil.isSerializationRelatedMethod(method, containingClass)) { return; } if (MethodUtils.hasSuper(method)) { // do not try to weaken parameters of methods with // super methods return; } final Query<PsiMethod> overridingSearch = OverridingMethodsSearch.search(method); if (overridingSearch.findFirst() != null) { // do not try to weaken parameters of methods with // overriding methods. return; } } } if (isOnTheFly() && variable instanceof PsiField) { // checking variables with greater visibility is too expensive // for error checking in the editor if (!variable.hasModifierProperty(PsiModifier.PRIVATE)) { return; } } if (useRighthandTypeAsWeakestTypeInAssignments) { if (variable instanceof PsiParameter) { final PsiElement parent = variable.getParent(); if (parent instanceof PsiForeachStatement) { final PsiForeachStatement foreachStatement = (PsiForeachStatement)parent; final PsiExpression iteratedValue = foreachStatement.getIteratedValue(); if (!(iteratedValue instanceof PsiNewExpression) && !(iteratedValue instanceof PsiTypeCastExpression)) { return; } } } else { final PsiExpression initializer = variable.getInitializer(); if (!(initializer instanceof PsiNewExpression) && !(initializer instanceof PsiTypeCastExpression)) { return; } } } final Collection<PsiClass> weakestClasses = WeakestTypeFinder.calculateWeakestClassesNecessary(variable, useRighthandTypeAsWeakestTypeInAssignments, useParameterizedTypeForCollectionMethods); if (doNotWeakenToJavaLangObject) { final Project project = variable.getProject(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiClass javaLangObjectClass = facade.findClass(CommonClassNames.JAVA_LANG_OBJECT, variable.getResolveScope()); weakestClasses.remove(javaLangObjectClass); } if (onlyWeakentoInterface) { for (Iterator<PsiClass> iterator = weakestClasses.iterator(); iterator.hasNext(); ) { final PsiClass weakestClass = iterator.next(); if (!weakestClass.isInterface()) { iterator.remove(); } } } if (weakestClasses.isEmpty()) { return; } registerVariableError(variable, variable, weakestClasses); } @Override public void visitMethod(PsiMethod method) { super.visitMethod(method); if (isOnTheFly() && !method.hasModifierProperty(PsiModifier.PRIVATE) && !ApplicationManager.getApplication().isUnitTestMode()) { // checking methods with greater visibility is too expensive. // for error checking in the editor return; } if (MethodUtils.hasSuper(method)) { // do not try to weaken methods with super methods return; } final Query<PsiMethod> overridingSearch = OverridingMethodsSearch.search(method); if (overridingSearch.findFirst() != null) { // do not try to weaken methods with overriding methods. return; } final Collection<PsiClass> weakestClasses = WeakestTypeFinder.calculateWeakestClassesNecessary(method, useRighthandTypeAsWeakestTypeInAssignments, useParameterizedTypeForCollectionMethods); if (doNotWeakenToJavaLangObject) { final Project project = method.getProject(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiClass javaLangObjectClass = facade.findClass(CommonClassNames.JAVA_LANG_OBJECT, method.getResolveScope()); weakestClasses.remove(javaLangObjectClass); } if (onlyWeakentoInterface) { for (Iterator<PsiClass> iterator = weakestClasses.iterator(); iterator.hasNext(); ) { final PsiClass weakestClass = iterator.next(); if (!weakestClass.isInterface()) { iterator.remove(); } } } if (weakestClasses.isEmpty()) { return; } registerMethodError(method, method, weakestClasses); } } }
/* * Copyright 2015 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package as.tra.brayden.blesim; import android.app.Activity; import android.bluetooth.BluetoothGatt; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattService; import android.os.Bundle; import android.os.ParcelUuid; import android.util.Log; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.inputmethod.EditorInfo; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import android.widget.TextView.OnEditorActionListener; import android.widget.Toast; import java.util.Arrays; import java.util.UUID; public class HeartRateServiceFragment extends ServiceFragment { private static final String TAG = HeartRateServiceFragment.class.getCanonicalName(); private static final int MIN_UINT = 0; private static final int MAX_UINT8 = (int) Math.pow(2, 8) - 1; private static final int MAX_UINT16 = (int) Math.pow(2, 16) - 1; /** * See <a href="https://developer.bluetooth.org/gatt/services/Pages/ServiceViewer.aspx?u=org.bluetooth.service.heart_rate.xml"> * Heart Rate Service</a> */ private static final UUID HEART_RATE_SERVICE_UUID = UUID .fromString("0000180D-0000-1000-8000-00805f9b34fb"); /** * See <a href="https://developer.bluetooth.org/gatt/characteristics/Pages/CharacteristicViewer.aspx?u=org.bluetooth.characteristic.heart_rate_measurement.xml"> * Heart Rate Measurement</a> */ private static final UUID HEART_RATE_MEASUREMENT_UUID = UUID .fromString("00002A37-0000-1000-8000-00805f9b34fb"); private static final int HEART_RATE_MEASUREMENT_VALUE_FORMAT = BluetoothGattCharacteristic.FORMAT_UINT8; private static final int INITIAL_HEART_RATE_MEASUREMENT_VALUE = 60; private static final int EXPENDED_ENERGY_FORMAT = BluetoothGattCharacteristic.FORMAT_UINT16; private static final int INITIAL_EXPENDED_ENERGY = 0; private static final String HEART_RATE_MEASUREMENT_DESCRIPTION = "Used to send a heart rate " + "measurement"; /** * See <a href="https://developer.bluetooth.org/gatt/characteristics/Pages/CharacteristicViewer.aspx?u=org.bluetooth.characteristic.body_sensor_location.xml"> * Body Sensor Location</a> */ private static final UUID BODY_SENSOR_LOCATION_UUID = UUID .fromString("00002A38-0000-1000-8000-00805f9b34fb"); private static final int LOCATION_OTHER = 0; /** * See <a href="https://developer.bluetooth.org/gatt/characteristics/Pages/CharacteristicViewer.aspx?u=org.bluetooth.characteristic.heart_rate_control_point.xml"> * Heart Rate Control Point</a> */ private static final UUID HEART_RATE_CONTROL_POINT_UUID = UUID .fromString("00002A39-0000-1000-8000-00805f9b34fb"); private BluetoothGattService mHeartRateService; private BluetoothGattCharacteristic mHeartRateMeasurementCharacteristic; private BluetoothGattCharacteristic mBodySensorLocationCharacteristic; private BluetoothGattCharacteristic mHeartRateControlPoint; private ServiceFragmentDelegate mDelegate; private EditText mEditTextHeartRateMeasurement; private final OnEditorActionListener mOnEditorActionListenerHeartRateMeasurement = new OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int actionId, KeyEvent event) { if (actionId == EditorInfo.IME_ACTION_DONE) { String newHeartRateMeasurementValueString = textView.getText().toString(); if (isValidCharacteristicValue(newHeartRateMeasurementValueString, HEART_RATE_MEASUREMENT_VALUE_FORMAT)) { int newHeartRateMeasurementValue = Integer.parseInt(newHeartRateMeasurementValueString); mHeartRateMeasurementCharacteristic.setValue(newHeartRateMeasurementValue, HEART_RATE_MEASUREMENT_VALUE_FORMAT, /* offset */ 1); } else { Toast.makeText(getActivity(), R.string.heartRateMeasurementValueInvalid, Toast.LENGTH_SHORT).show(); } } return false; } }; private final OnEditorActionListener mOnEditorActionListenerEnergyExpended = new OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int actionId, KeyEvent event) { if (actionId == EditorInfo.IME_ACTION_DONE) { String newEnergyExpendedString = textView.getText().toString(); if (isValidCharacteristicValue(newEnergyExpendedString, EXPENDED_ENERGY_FORMAT)) { int newEnergyExpended = Integer.parseInt(newEnergyExpendedString); mHeartRateMeasurementCharacteristic.setValue(newEnergyExpended, EXPENDED_ENERGY_FORMAT, /* offset */ 2); } else { Toast.makeText(getActivity(), R.string.energyExpendedInvalid, Toast.LENGTH_SHORT).show(); } } return false; } }; private EditText mEditTextEnergyExpended; private Spinner mSpinnerBodySensorLocation; private final OnItemSelectedListener mLocationSpinnerOnItemSelectedListener = new OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { setBodySensorLocationValue(position); } @Override public void onNothingSelected(AdapterView<?> parent) { } }; private final OnClickListener mNotifyButtonListener = new OnClickListener() { @Override public void onClick(View v) { mDelegate.sendNotificationToDevices(mHeartRateMeasurementCharacteristic); } }; public HeartRateServiceFragment() { mHeartRateMeasurementCharacteristic = new BluetoothGattCharacteristic(HEART_RATE_MEASUREMENT_UUID, BluetoothGattCharacteristic.PROPERTY_NOTIFY, /* No permissions */ 0); mHeartRateMeasurementCharacteristic.addDescriptor( PeripheralActivity.getClientCharacteristicConfigurationDescriptor()); mHeartRateMeasurementCharacteristic.addDescriptor( PeripheralActivity.getCharacteristicUserDescriptionDescriptor(HEART_RATE_MEASUREMENT_DESCRIPTION)); mBodySensorLocationCharacteristic = new BluetoothGattCharacteristic(BODY_SENSOR_LOCATION_UUID, BluetoothGattCharacteristic.PROPERTY_READ, BluetoothGattCharacteristic.PERMISSION_READ); mHeartRateControlPoint = new BluetoothGattCharacteristic(HEART_RATE_CONTROL_POINT_UUID, BluetoothGattCharacteristic.PROPERTY_WRITE, BluetoothGattCharacteristic.PERMISSION_WRITE); mHeartRateService = new BluetoothGattService(HEART_RATE_SERVICE_UUID, BluetoothGattService.SERVICE_TYPE_PRIMARY); mHeartRateService.addCharacteristic(mHeartRateMeasurementCharacteristic); mHeartRateService.addCharacteristic(mBodySensorLocationCharacteristic); mHeartRateService.addCharacteristic(mHeartRateControlPoint); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_heart_rate, container, false); mSpinnerBodySensorLocation = (Spinner) view.findViewById(R.id.spinner_bodySensorLocation); mSpinnerBodySensorLocation.setOnItemSelectedListener(mLocationSpinnerOnItemSelectedListener); mEditTextHeartRateMeasurement = (EditText) view .findViewById(R.id.editText_heartRateMeasurementValue); mEditTextHeartRateMeasurement .setOnEditorActionListener(mOnEditorActionListenerHeartRateMeasurement); mEditTextEnergyExpended = (EditText) view .findViewById(R.id.editText_energyExpended); mEditTextEnergyExpended .setOnEditorActionListener(mOnEditorActionListenerEnergyExpended); Button notifyButton = (Button) view.findViewById(R.id.button_heartRateMeasurementNotify); notifyButton.setOnClickListener(mNotifyButtonListener); setHeartRateMeasurementValue(INITIAL_HEART_RATE_MEASUREMENT_VALUE, INITIAL_EXPENDED_ENERGY); setBodySensorLocationValue(LOCATION_OTHER); return view; } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mDelegate = (ServiceFragmentDelegate) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement ServiceFragmentDelegate"); } } @Override public void onDetach() { super.onDetach(); mDelegate = null; } @Override public BluetoothGattService getBluetoothGattService() { return mHeartRateService; } @Override public ParcelUuid getServiceUUID() { return new ParcelUuid(HEART_RATE_SERVICE_UUID); } private void setHeartRateMeasurementValue(int heartRateMeasurementValue, int expendedEnergy) { Log.d(TAG, Arrays.toString(mHeartRateMeasurementCharacteristic.getValue())); /* Set the org.bluetooth.characteristic.heart_rate_measurement * characteristic to a byte array of size 4 so * we can use setValue(value, format, offset); * * Flags (8bit) + Heart Rate Measurement Value (uint8) + Energy Expended (uint16) = 4 bytes * * Flags = 1 << 3: * Heart Rate Format (0) -> UINT8 * Sensor Contact Status (00) -> Not Supported * Energy Expended (1) -> Field Present * RR-Interval (0) -> Field not pressent * Unused (000) */ mHeartRateMeasurementCharacteristic.setValue(new byte[]{0b00001000, 0, 0, 0}); // Characteristic Value: [flags, 0, 0, 0] mHeartRateMeasurementCharacteristic.setValue(heartRateMeasurementValue, HEART_RATE_MEASUREMENT_VALUE_FORMAT, /* offset */ 1); // Characteristic Value: [flags, heart rate value, 0, 0] mEditTextHeartRateMeasurement.setText(Integer.toString(heartRateMeasurementValue)); mHeartRateMeasurementCharacteristic.setValue(expendedEnergy, EXPENDED_ENERGY_FORMAT, /* offset */ 2); // Characteristic Value: [flags, heart rate value, energy expended (LSB), energy expended (MSB)] mEditTextEnergyExpended.setText(Integer.toString(expendedEnergy)); } private void setBodySensorLocationValue(int location) { mBodySensorLocationCharacteristic.setValue(new byte[]{(byte) location}); mSpinnerBodySensorLocation.setSelection(location); } private boolean isValidCharacteristicValue(String s, int format) { try { int value = Integer.parseInt(s); if (format == BluetoothGattCharacteristic.FORMAT_UINT8) { return (value >= MIN_UINT) && (value <= MAX_UINT8); } else if (format == BluetoothGattCharacteristic.FORMAT_UINT16) { return (value >= MIN_UINT) && (value <= MAX_UINT16); } else { throw new IllegalArgumentException(format + " is not a valid argument"); } } catch (NumberFormatException e) { return false; } } @Override public int writeCharacteristic(BluetoothGattCharacteristic characteristic, int offset, byte[] value) { if (offset != 0) { return BluetoothGatt.GATT_INVALID_OFFSET; } // Heart Rate control point is a 8bit characteristic if (value.length != 1) { return BluetoothGatt.GATT_INVALID_ATTRIBUTE_LENGTH; } if ((value[0] & 1) == 1) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { mHeartRateMeasurementCharacteristic.setValue(INITIAL_EXPENDED_ENERGY, EXPENDED_ENERGY_FORMAT, /* offset */ 2); mEditTextEnergyExpended.setText(Integer.toString(INITIAL_EXPENDED_ENERGY)); } }); } return BluetoothGatt.GATT_SUCCESS; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* AMCL Weierstrass elliptic curve functions over FP8 */ package org.apache.milagro.amcl.XXX; public final class ECP8 { private FP8 x; private FP8 y; private FP8 z; /* Constructor - set this=O */ public ECP8() { x=new FP8(); y=new FP8(1); z=new FP8(); } public ECP8(ECP8 e) { this.x = new FP8(e.x); this.y = new FP8(e.y); this.z = new FP8(e.z); } /* Test this=O? */ public boolean is_infinity() { return (x.iszilch() && z.iszilch()); } /* copy this=P */ public void copy(ECP8 P) { x.copy(P.x); y.copy(P.y); z.copy(P.z); } /* set this=O */ public void inf() { x.zero(); y.one(); z.zero(); } /* Conditional move of Q to P dependant on d */ public void cmove(ECP8 Q,int d) { x.cmove(Q.x,d); y.cmove(Q.y,d); z.cmove(Q.z,d); } /* return 1 if b==c, no branching */ public static int teq(int b,int c) { int x=b^c; x-=1; // if x=0, x now -1 return ((x>>31)&1); } /* Constant time select from pre-computed table */ public void select(ECP8 W[],int b) { ECP8 MP=new ECP8(); int m=b>>31; int babs=(b^m)-m; babs=(babs-1)/2; cmove(W[0],teq(babs,0)); // conditional move cmove(W[1],teq(babs,1)); cmove(W[2],teq(babs,2)); cmove(W[3],teq(babs,3)); cmove(W[4],teq(babs,4)); cmove(W[5],teq(babs,5)); cmove(W[6],teq(babs,6)); cmove(W[7],teq(babs,7)); MP.copy(this); MP.neg(); cmove(MP,(int)(m&1)); } /* Test if P == Q */ public boolean equals(ECP8 Q) { FP8 a=new FP8(x); // ***** FP8 b=new FP8(Q.x); a.mul(Q.z); b.mul(z); if (!a.equals(b)) return false; a.copy(y); a.mul(Q.z); b.copy(Q.y); b.mul(z); if (!a.equals(b)) return false; return true; } /* set this=-this */ public void neg() { y.norm(); y.neg(); y.norm(); return; } /* set to Affine - (x,y,z) to (x,y) */ public void affine() { if (is_infinity()) return; FP8 one=new FP8(1); if (z.equals(one)) { x.reduce(); y.reduce(); return; } z.inverse(); x.mul(z); x.reduce(); // ***** y.mul(z); y.reduce(); z.copy(one); } /* extract affine x as FP8 */ public FP8 getX() { ECP8 W= new ECP8(this); W.affine(); return W.x; } /* extract affine y as FP8 */ public FP8 getY() { ECP8 W= new ECP8(this); W.affine(); return W.y; } /* extract projective x */ public FP8 getx() { return x; } /* extract projective y */ public FP8 gety() { return y; } /* extract projective z */ public FP8 getz() { return z; } /* convert to byte array */ public void toBytes(byte[] b) { byte[] t=new byte[CONFIG_BIG.MODBYTES]; ECP8 W=new ECP8(this); W.affine(); int MB=CONFIG_BIG.MODBYTES; W.x.geta().geta().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i]=t[i];} W.x.geta().geta().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+MB]=t[i];} W.x.geta().getb().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+2*MB]=t[i];} W.x.geta().getb().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+3*MB]=t[i];} W.x.getb().geta().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+4*MB]=t[i];} W.x.getb().geta().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+5*MB]=t[i];} W.x.getb().getb().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+6*MB]=t[i];} W.x.getb().getb().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+7*MB]=t[i];} W.y.geta().geta().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+8*MB]=t[i];} W.y.geta().geta().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+9*MB]=t[i];} W.y.geta().getb().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+10*MB]=t[i];} W.y.geta().getb().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+11*MB]=t[i];} W.y.getb().geta().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+12*MB]=t[i];} W.y.getb().geta().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+13*MB]=t[i];} W.y.getb().getb().getA().toBytes(t); for (int i=0;i<MB;i++) { b[i+14*MB]=t[i];} W.y.getb().getb().getB().toBytes(t); for (int i=0;i<MB;i++) { b[i+15*MB]=t[i];} } /* convert from byte array to point */ public static ECP8 fromBytes(byte[] b) { byte[] t=new byte[CONFIG_BIG.MODBYTES]; BIG ra; BIG rb; int MB=CONFIG_BIG.MODBYTES; for (int i=0;i<MB;i++) {t[i]=b[i];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+MB];} rb=BIG.fromBytes(t); FP2 ra4=new FP2(ra,rb); for (int i=0;i<MB;i++) {t[i]=b[i+2*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+3*MB];} rb=BIG.fromBytes(t); FP2 rb4=new FP2(ra,rb); FP4 ra8=new FP4(ra4,rb4); for (int i=0;i<MB;i++) {t[i]=b[i+4*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+5*MB];} rb=BIG.fromBytes(t); ra4=new FP2(ra,rb); for (int i=0;i<MB;i++) {t[i]=b[i+6*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+7*MB];} rb=BIG.fromBytes(t); rb4=new FP2(ra,rb); FP4 rb8=new FP4(ra4,rb4); FP8 rx=new FP8(ra8,rb8); for (int i=0;i<MB;i++) {t[i]=b[i+8*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+9*MB];} rb=BIG.fromBytes(t); ra4=new FP2(ra,rb); for (int i=0;i<MB;i++) {t[i]=b[i+10*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+11*MB];} rb=BIG.fromBytes(t); rb4=new FP2(ra,rb); ra8=new FP4(ra4,rb4); for (int i=0;i<MB;i++) {t[i]=b[i+12*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+13*MB];} rb=BIG.fromBytes(t); ra4=new FP2(ra,rb); for (int i=0;i<MB;i++) {t[i]=b[i+14*MB];} ra=BIG.fromBytes(t); for (int i=0;i<MB;i++) {t[i]=b[i+15*MB];} rb=BIG.fromBytes(t); rb4=new FP2(ra,rb); rb8=new FP4(ra4,rb4); FP8 ry=new FP8(ra8,rb8); return new ECP8(rx,ry); } /* convert this to hex string */ public String toString() { ECP8 W=new ECP8(this); W.affine(); if (W.is_infinity()) return "infinity"; return "("+W.x.toString()+","+W.y.toString()+")"; } /* Calculate RHS of twisted curve equation x^3+B/i */ public static FP8 RHS(FP8 x) { FP8 r=new FP8(x); r.sqr(); FP8 b=new FP8(new FP4(new FP2(new BIG(ROM.CURVE_B)))); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.D_TYPE) { b.div_i(); } if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.M_TYPE) { b.times_i(); } r.mul(x); r.add(b); r.reduce(); return r; } /* construct this from (x,y) - but set to O if not on curve */ public ECP8(FP8 ix,FP8 iy) { x=new FP8(ix); y=new FP8(iy); z=new FP8(1); x.norm(); FP8 rhs=RHS(x); FP8 y2=new FP8(y); y2.sqr(); if (!y2.equals(rhs)) inf(); } /* construct this from x - but set to O if not on curve */ public ECP8(FP8 ix) { x=new FP8(ix); y=new FP8(1); z=new FP8(1); x.norm(); FP8 rhs=RHS(x); if (rhs.sqrt()) { y.copy(rhs); } else {inf();} } /* this+=this */ public int dbl() { // if (INF) return -1; FP8 iy=new FP8(y); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.D_TYPE) { iy.times_i(); } FP8 t0=new FP8(y); //***** Change t0.sqr(); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.D_TYPE) { t0.times_i(); } FP8 t1=new FP8(iy); t1.mul(z); FP8 t2=new FP8(z); t2.sqr(); z.copy(t0); z.add(t0); z.norm(); z.add(z); z.add(z); z.norm(); t2.imul(3*ROM.CURVE_B_I); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.M_TYPE) { t2.times_i(); } FP8 x3=new FP8(t2); x3.mul(z); FP8 y3=new FP8(t0); y3.add(t2); y3.norm(); z.mul(t1); t1.copy(t2); t1.add(t2); t2.add(t1); t2.norm(); t0.sub(t2); t0.norm(); //y^2-9bz^2 y3.mul(t0); y3.add(x3); //(y^2+3z*2)(y^2-9z^2)+3b.z^2.8y^2 t1.copy(x); t1.mul(iy); // x.copy(t0); x.norm(); x.mul(t1); x.add(x); //(y^2-9bz^2)xy2 x.norm(); y.copy(y3); y.norm(); return 1; } /* this+=Q - return 0 for add, 1 for double, -1 for O */ public int add(ECP8 Q) { int b=3*ROM.CURVE_B_I; FP8 t0=new FP8(x); t0.mul(Q.x); // x.Q.x FP8 t1=new FP8(y); t1.mul(Q.y); // y.Q.y FP8 t2=new FP8(z); t2.mul(Q.z); FP8 t3=new FP8(x); t3.add(y); t3.norm(); //t3=X1+Y1 FP8 t4=new FP8(Q.x); t4.add(Q.y); t4.norm(); //t4=X2+Y2 t3.mul(t4); //t3=(X1+Y1)(X2+Y2) t4.copy(t0); t4.add(t1); //t4=X1.X2+Y1.Y2 t3.sub(t4); t3.norm(); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.D_TYPE) { t3.times_i(); //t3=(X1+Y1)(X2+Y2)-(X1.X2+Y1.Y2) = X1.Y2+X2.Y1 } t4.copy(y); t4.add(z); t4.norm(); //t4=Y1+Z1 FP8 x3=new FP8(Q.y); x3.add(Q.z); x3.norm(); //x3=Y2+Z2 t4.mul(x3); //t4=(Y1+Z1)(Y2+Z2) x3.copy(t1); // x3.add(t2); //X3=Y1.Y2+Z1.Z2 t4.sub(x3); t4.norm(); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.D_TYPE) { t4.times_i(); //t4=(Y1+Z1)(Y2+Z2) - (Y1.Y2+Z1.Z2) = Y1.Z2+Y2.Z1 } x3.copy(x); x3.add(z); x3.norm(); // x3=X1+Z1 FP8 y3=new FP8(Q.x); y3.add(Q.z); y3.norm(); // y3=X2+Z2 x3.mul(y3); // x3=(X1+Z1)(X2+Z2) y3.copy(t0); y3.add(t2); // y3=X1.X2+Z1+Z2 y3.rsub(x3); y3.norm(); // y3=(X1+Z1)(X2+Z2) - (X1.X2+Z1.Z2) = X1.Z2+X2.Z1 if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.D_TYPE) { t0.times_i(); // x.Q.x t1.times_i(); // y.Q.y } x3.copy(t0); x3.add(t0); t0.add(x3); t0.norm(); t2.imul(b); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.M_TYPE) { t2.times_i(); } FP8 z3=new FP8(t1); z3.add(t2); z3.norm(); t1.sub(t2); t1.norm(); y3.imul(b); if (CONFIG_CURVE.SEXTIC_TWIST==CONFIG_CURVE.M_TYPE) { y3.times_i(); } x3.copy(y3); x3.mul(t4); t2.copy(t3); t2.mul(t1); x3.rsub(t2); y3.mul(t0); t1.mul(z3); y3.add(t1); t0.mul(t3); z3.mul(t4); z3.add(t0); x.copy(x3); x.norm(); y.copy(y3); y.norm(); z.copy(z3); z.norm(); return 0; } /* set this-=Q */ public int sub(ECP8 Q) { ECP8 NQ=new ECP8(Q); NQ.neg(); int D=add(NQ); return D; } public static FP2[] frob_constants() { BIG Fra=new BIG(ROM.Fra); BIG Frb=new BIG(ROM.Frb); FP2 X=new FP2(Fra,Frb); FP2 F0=new FP2(X); F0.sqr(); FP2 F2=new FP2(F0); F2.mul_ip(); F2.norm(); FP2 F1=new FP2(F2); F1.sqr(); F2.mul(F1); F2.mul_ip(); F2.norm(); F1.copy(X); if (CONFIG_CURVE.SEXTIC_TWIST == CONFIG_CURVE.M_TYPE) { F1.mul_ip(); F1.inverse(); F0.copy(F1); F0.sqr(); } F0.mul_ip(); F0.norm(); F1.mul(F0); FP2[] F={F0,F1,F2}; return F; } /* set this*=q, where q is Modulus, using Frobenius */ public void frob(FP2 F[],int n) { for (int i=0;i<n;i++) { x.frob(F[2]); x.qmul(F[0]); if (CONFIG_CURVE.SEXTIC_TWIST == CONFIG_CURVE.M_TYPE) { x.div_i2(); } if (CONFIG_CURVE.SEXTIC_TWIST == CONFIG_CURVE.D_TYPE) { x.times_i2(); } y.frob(F[2]); y.qmul(F[1]); if (CONFIG_CURVE.SEXTIC_TWIST == CONFIG_CURVE.M_TYPE) { y.div_i(); } if (CONFIG_CURVE.SEXTIC_TWIST == CONFIG_CURVE.D_TYPE) { y.times_i2(); y.times_i2(); y.times_i(); } z.frob(F[2]); } } /* P*=e */ public ECP8 mul(BIG e) { /* fixed size windows */ int i,b,nb,m,s,ns; BIG mt=new BIG(); BIG t=new BIG(); ECP8 P=new ECP8(); ECP8 Q=new ECP8(); ECP8 C=new ECP8(); ECP8[] W=new ECP8[8]; byte[] w=new byte[1+(BIG.NLEN*CONFIG_BIG.BASEBITS+3)/4]; if (is_infinity()) return new ECP8(); /* precompute table */ Q.copy(this); Q.dbl(); W[0]=new ECP8(); W[0].copy(this); for (i=1;i<8;i++) { W[i]=new ECP8(); W[i].copy(W[i-1]); W[i].add(Q); } /* make exponent odd - add 2P if even, P if odd */ t.copy(e); s=t.parity(); t.inc(1); t.norm(); ns=t.parity(); mt.copy(t); mt.inc(1); mt.norm(); t.cmove(mt,s); Q.cmove(this,ns); C.copy(Q); nb=1+(t.nbits()+3)/4; /* convert exponent to signed 4-bit window */ for (i=0;i<nb;i++) { w[i]=(byte)(t.lastbits(5)-16); t.dec(w[i]); t.norm(); t.fshr(4); } w[nb]=(byte)t.lastbits(5); P.copy(W[(w[nb]-1)/2]); for (i=nb-1;i>=0;i--) { Q.select(W,w[i]); P.dbl(); P.dbl(); P.dbl(); P.dbl(); P.add(Q); } P.sub(C); P.affine(); return P; } /* P=u0.Q0+u1*Q1+u2*Q2+u3*Q3... */ // Bos & Costello https://eprint.iacr.org/2013/458.pdf // Faz-Hernandez & Longa & Sanchez https://eprint.iacr.org/2013/158.pdf // Side channel attack secure public static ECP8 mul16(ECP8[] Q,BIG[] u) { int i,j,k,nb,pb1,pb2,pb3,pb4; ECP8 W=new ECP8(); ECP8 P=new ECP8(); ECP8[] T1=new ECP8[8]; ECP8[] T2=new ECP8[8]; ECP8[] T3=new ECP8[8]; ECP8[] T4=new ECP8[8]; BIG mt=new BIG(); BIG[] t=new BIG[16]; byte[] w1=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] s1=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] w2=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] s2=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] w3=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] s3=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] w4=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; byte[] s4=new byte[BIG.NLEN*CONFIG_BIG.BASEBITS+1]; for (i=0;i<16;i++) { t[i]=new BIG(u[i]); //Q[i].affine(); t[i].norm(); } T1[0] = new ECP8(); T1[0].copy(Q[0]); // Q[0] T1[1] = new ECP8(); T1[1].copy(T1[0]); T1[1].add(Q[1]); // Q[0]+Q[1] T1[2] = new ECP8(); T1[2].copy(T1[0]); T1[2].add(Q[2]); // Q[0]+Q[2] T1[3] = new ECP8(); T1[3].copy(T1[1]); T1[3].add(Q[2]); // Q[0]+Q[1]+Q[2] T1[4] = new ECP8(); T1[4].copy(T1[0]); T1[4].add(Q[3]); // Q[0]+Q[3] T1[5] = new ECP8(); T1[5].copy(T1[1]); T1[5].add(Q[3]); // Q[0]+Q[1]+Q[3] T1[6] = new ECP8(); T1[6].copy(T1[2]); T1[6].add(Q[3]); // Q[0]+Q[2]+Q[3] T1[7] = new ECP8(); T1[7].copy(T1[3]); T1[7].add(Q[3]); // Q[0]+Q[1]+Q[2]+Q[3] // Use Frobenius FP2[] F=ECP8.frob_constants(); for (i=0;i<8;i++) { T2[i] = new ECP8(); T2[i].copy(T1[i]); T2[i].frob(F,4); T3[i] = new ECP8(); T3[i].copy(T2[i]); T3[i].frob(F,4); T4[i] = new ECP8(); T4[i].copy(T3[i]); T4[i].frob(F,4); } // Make it odd pb1=1-t[0].parity(); t[0].inc(pb1); t[0].norm(); pb2=1-t[4].parity(); t[4].inc(pb2); t[4].norm(); pb3=1-t[8].parity(); t[8].inc(pb3); t[8].norm(); pb4=1-t[12].parity(); t[12].inc(pb4); t[12].norm(); // Number of bits mt.zero(); for (i=0;i<16;i++) { mt.or(t[i]); } nb=1+mt.nbits(); // Sign pivot s1[nb-1]=1; s2[nb-1]=1; s3[nb-1]=1; s4[nb-1]=1; for (i=0;i<nb-1;i++) { t[0].fshr(1); s1[i]=(byte)(2*t[0].parity()-1); t[4].fshr(1); s2[i]=(byte)(2*t[4].parity()-1); t[8].fshr(1); s3[i]=(byte)(2*t[8].parity()-1); t[12].fshr(1); s4[i]=(byte)(2*t[12].parity()-1); } // Recoded exponent for (i=0; i<nb; i++) { w1[i]=0; k=1; for (j=1; j<4; j++) { byte bt=(byte)(s1[i]*t[j].parity()); t[j].fshr(1); t[j].dec((int)(bt)>>1); t[j].norm(); w1[i]+=bt*(byte)k; k*=2; } w2[i]=0; k=1; for (j=5; j<8; j++) { byte bt=(byte)(s2[i]*t[j].parity()); t[j].fshr(1); t[j].dec((int)(bt)>>1); t[j].norm(); w2[i]+=bt*(byte)k; k*=2; } w3[i]=0; k=1; for (j=9; j<12; j++) { byte bt=(byte)(s3[i]*t[j].parity()); t[j].fshr(1); t[j].dec((int)(bt)>>1); t[j].norm(); w3[i]+=bt*(byte)k; k*=2; } w4[i]=0; k=1; for (j=13; j<16; j++) { byte bt=(byte)(s4[i]*t[j].parity()); t[j].fshr(1); t[j].dec((int)(bt)>>1); t[j].norm(); w4[i]+=bt*(byte)k; k*=2; } } // Main loop P.select(T1,(int)(2*w1[nb-1]+1)); W.select(T2,(int)(2*w2[nb-1]+1)); P.add(W); W.select(T3,(int)(2*w3[nb-1]+1)); P.add(W); W.select(T4,(int)(2*w4[nb-1]+1)); P.add(W); for (i=nb-2;i>=0;i--) { P.dbl(); W.select(T1,(int)(2*w1[i]+s1[i])); P.add(W); W.select(T2,(int)(2*w2[i]+s2[i])); P.add(W); W.select(T3,(int)(2*w3[i]+s3[i])); P.add(W); W.select(T4,(int)(2*w4[i]+s4[i])); P.add(W); } // apply correction W.copy(P); W.sub(Q[0]); P.cmove(W,pb1); W.copy(P); W.sub(Q[4]); P.cmove(W,pb2); W.copy(P); W.sub(Q[8]); P.cmove(W,pb3); W.copy(P); W.sub(Q[12]); P.cmove(W,pb4); P.affine(); return P; } /* needed for SOK */ public static ECP8 mapit(byte[] h) { BIG q=new BIG(ROM.Modulus); BIG x=BIG.fromBytes(h); BIG one=new BIG(1); FP8 X; FP2 X2; FP4 X4; ECP8 Q; x.mod(q); while (true) { X2=new FP2(one,x); X4=new FP4(X2); X=new FP8(X4); Q=new ECP8(X); if (!Q.is_infinity()) break; x.inc(1); x.norm(); } FP2[] F=ECP8.frob_constants(); x=new BIG(ROM.CURVE_Bnx); /* Efficient hash maps to G2 on BLS curves - Budroni, Pintore */ ECP8 xQ=Q.mul(x); ECP8 x2Q=xQ.mul(x); ECP8 x3Q=x2Q.mul(x); ECP8 x4Q=x3Q.mul(x); ECP8 x5Q=x4Q.mul(x); ECP8 x6Q=x5Q.mul(x); ECP8 x7Q=x6Q.mul(x); ECP8 x8Q=x7Q.mul(x); if (CONFIG_CURVE.SIGN_OF_X==CONFIG_CURVE.NEGATIVEX) { xQ.neg(); x3Q.neg(); x5Q.neg(); x7Q.neg(); } x8Q.sub(x7Q); x8Q.sub(Q); x7Q.sub(x6Q); x7Q.frob(F,1); x6Q.sub(x5Q); x6Q.frob(F,2); x5Q.sub(x4Q); x5Q.frob(F,3); x4Q.sub(x3Q); x4Q.frob(F,4); x3Q.sub(x2Q); x3Q.frob(F,5); x2Q.sub(xQ); x2Q.frob(F,6); xQ.sub(Q); xQ.frob(F,7); Q.dbl(); Q.frob(F,8); Q.add(x8Q); Q.add(x7Q); Q.add(x6Q); Q.add(x5Q); Q.add(x4Q); Q.add(x3Q); Q.add(x2Q); Q.add(xQ); Q.affine(); return Q; } public static ECP8 generator() { return new ECP8( new FP8( new FP4( new FP2( new BIG(ROM.CURVE_Pxaaa),new BIG(ROM.CURVE_Pxaab)), new FP2( new BIG(ROM.CURVE_Pxaba),new BIG(ROM.CURVE_Pxabb))), new FP4( new FP2( new BIG(ROM.CURVE_Pxbaa),new BIG(ROM.CURVE_Pxbab)), new FP2( new BIG(ROM.CURVE_Pxbba),new BIG(ROM.CURVE_Pxbbb)))), new FP8( new FP4( new FP2( new BIG(ROM.CURVE_Pyaaa),new BIG(ROM.CURVE_Pyaab)), new FP2( new BIG(ROM.CURVE_Pyaba),new BIG(ROM.CURVE_Pyabb))), new FP4( new FP2( new BIG(ROM.CURVE_Pybaa),new BIG(ROM.CURVE_Pybab)), new FP2( new BIG(ROM.CURVE_Pybba),new BIG(ROM.CURVE_Pybbb))))); } }
/* * Copyright (c) 2002-2011 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lwjgl.opengl; import org.lwjgl.BufferUtils; import org.lwjgl.LWJGLException; import org.lwjgl.LWJGLUtil; import org.lwjgl.PointerBuffer; import org.lwjgl.opengles.ContextAttribs; import org.lwjgl.opengles.*; import org.lwjgl.opengles.Util; import static org.lwjgl.opengles.EGL.*; import static org.lwjgl.opengles.GLES20.*; /** * @author Spasi * @since 14/5/2011 */ abstract class DrawableGLES implements DrawableLWJGL { /** The PixelFormat used to create the EGLDisplay. */ protected org.lwjgl.opengles.PixelFormat pixel_format; protected EGLDisplay eglDisplay; protected EGLConfig eglConfig; protected EGLSurface eglSurface; /** The OpenGL Context. */ protected ContextGLES context; /** The Drawable that shares objects with this Drawable. */ protected Drawable shared_drawable; protected DrawableGLES() { } public void setPixelFormat(final PixelFormatLWJGL pf) throws LWJGLException { synchronized ( GlobalLock.lock ) { this.pixel_format = (org.lwjgl.opengles.PixelFormat)pf; } } public PixelFormatLWJGL getPixelFormat() { synchronized ( GlobalLock.lock ) { return pixel_format; } } public void initialize(final long window, final long display_id, final int eglSurfaceType, final org.lwjgl.opengles.PixelFormat pf) throws LWJGLException { synchronized ( GlobalLock.lock ) { if ( eglSurface != null ) { eglSurface.destroy(); eglSurface = null; } if ( eglDisplay != null ) { eglDisplay.terminate(); eglDisplay = null; } final EGLDisplay eglDisplay = eglGetDisplay((int)display_id); int[] attribs = { EGL_LEVEL, 0, EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NATIVE_RENDERABLE, EGL_FALSE, }; final EGLConfig[] configs = eglDisplay.chooseConfig(pf.getAttribBuffer(eglDisplay, eglSurfaceType, attribs), null, BufferUtils.createIntBuffer(1)); if ( configs.length == 0 ) throw new LWJGLException("No EGLConfigs found for the specified PixelFormat."); final EGLConfig eglConfig = pf.getBestMatch(configs); final EGLSurface eglSurface = eglDisplay.createWindowSurface(eglConfig, window, null); pf.setSurfaceAttribs(eglSurface); this.eglDisplay = eglDisplay; this.eglConfig = eglConfig; this.eglSurface = eglSurface; // This can happen when switching in and out of full-screen mode. if ( context != null ) context.getEGLContext().setDisplay(eglDisplay); } } public void createContext(final ContextAttribs attribs, final Drawable shared_drawable) throws LWJGLException { synchronized ( GlobalLock.lock ) { this.context = new ContextGLES(this, attribs, shared_drawable != null ? ((DrawableGLES)shared_drawable).getContext() : null); this.shared_drawable = shared_drawable; } } Drawable getSharedDrawable() { synchronized ( GlobalLock.lock ) { return shared_drawable; } } public EGLDisplay getEGLDisplay() { synchronized ( GlobalLock.lock ) { return eglDisplay; } } public EGLConfig getEGLConfig() { synchronized ( GlobalLock.lock ) { return eglConfig; } } public EGLSurface getEGLSurface() { synchronized ( GlobalLock.lock ) { return eglSurface; } } public ContextGLES getContext() { synchronized ( GlobalLock.lock ) { return context; } } public org.lwjgl.opengl.Context createSharedContext() throws LWJGLException { synchronized ( GlobalLock.lock ) { checkDestroyed(); return new ContextGLES(this, context.getContextAttribs(), context); } } public void checkGLError() { Util.checkGLError(); } public void setSwapInterval(final int swap_interval) { ContextGLES.setSwapInterval(swap_interval); } public void swapBuffers() throws LWJGLException { ContextGLES.swapBuffers(); } public void initContext(final float r, final float g, final float b) { // set background clear color glClearColor(r, g, b, 0.0f); // Clear window to avoid the desktop "showing through" glClear(GL_COLOR_BUFFER_BIT); } public boolean isCurrent() throws LWJGLException { synchronized ( GlobalLock.lock ) { checkDestroyed(); return context.isCurrent(); } } public void makeCurrent() throws LWJGLException, PowerManagementEventException { synchronized ( GlobalLock.lock ) { checkDestroyed(); context.makeCurrent(); } } public void releaseContext() throws LWJGLException, PowerManagementEventException { synchronized ( GlobalLock.lock ) { checkDestroyed(); if ( context.isCurrent() ) context.releaseCurrent(); } } public void destroy() { synchronized ( GlobalLock.lock ) { try { if ( context != null ) { try { releaseContext(); } catch (PowerManagementEventException e) { // Ignore } context.forceDestroy(); context = null; } if ( eglSurface != null ) { eglSurface.destroy(); eglSurface = null; } if ( eglDisplay != null ) { eglDisplay.terminate(); eglDisplay = null; } pixel_format = null; shared_drawable = null; } catch (LWJGLException e) { LWJGLUtil.log("Exception occurred while destroying Drawable: " + e); } } } protected void checkDestroyed() { if ( context == null ) throw new IllegalStateException("The Drawable has no context available."); } public void setCLSharingProperties(final PointerBuffer properties) throws LWJGLException { throw new UnsupportedOperationException(); } }
package external.fastclasspathscanner; import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; /** * Uber-fast, ultra-lightweight Java classpath scanner. Scans the classpath by parsing the classfile binary * format directly rather than by using reflection. (Reflection causes the classloader to load each class, * which can take an order of magnitude more time than parsing the classfile directly.) * * This classpath scanner is able to scan directories and jar/zip files on the classpath to locate: (1) * classes that subclass a given class or one of its subclasses; (2) classes that implement an interface or * one of its subinterfaces; (3) classes that have a given annotation; and (4) file paths (even for * non-classfiles) anywhere on the classpath that match a given regexp. * * * Usage example (with Java 8 lambda expressions): * * <code> * new FastClasspathScanner( * new String[] { "com.xyz.widget", "com.xyz.gizmo" }) // Whitelisted package prefixes to scan * * .matchSubclassesOf(DBModel.class, * // c is a subclass of DBModel * c -> System.out.println("Subclasses DBModel: " + c.getName())) * * .matchClassesImplementing(Runnable.class, * // c is a class that implements Runnable * c -> System.out.println("Implements Runnable: " + c.getName())) * * .matchClassesWithAnnotation(RestHandler.class, * // c is a class annotated with @RestHandler * c -> System.out.println("Has @RestHandler class annotation: " + c.getName())) * * * .matchFilenamePattern("^template/.*\\.html", * // templatePath is a path on the classpath that matches the above pattern; * // inputStream is a stream opened on the file or zipfile entry * // No need to close inputStream before exiting, it is closed by caller. * (absolutePath, relativePath, inputStream) -> { * try { * String template = IOUtils.toString(inputStream, "UTF-8"); * System.out.println("Found template: " + absolutePath * + " (size " + template.length() + ")"); * } catch (IOException e) { * throw new RuntimeException(e); * } * }) * * .scan(); // Actually perform the scan * </code> * * Note that you need to pass a whitelist of package prefixes to scan into the constructor, and the ability * to detect that a class or interface extends another depends upon the entire ancestral path between the two * classes or interfaces having one of the whitelisted package prefixes. * * The scanner also records the latest last-modified timestamp of any file or directory encountered, and you * can see if that latest last-modified timestamp has increased (indicating that something on the classpath * has been updated) by calling: * * <code> * boolean classpathContentsModified = fastClassPathScanner.classpathContentsModifiedSinceScan(); * </code> * * This can be used to enable dynamic class-reloading if something on the classpath is updated, for example * to support hot-replace of route handler classes in a webserver. The above call is several times faster * than the original call to scan(), since only modification timestamps need to be checked. * * Hosted at: https://github.com/lukehutch/fast-classpath-scanner * * Inspired by: https://github.com/rmuller/infomas-asl/tree/master/annotation-detector * * See also: http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4 * * Let me know if you find this useful! * * @author Luke Hutchison <luke .dot. hutch .at. gmail .dot. com> * * @license MIT * * The MIT License (MIT) * * Copyright (c) 2014 Luke Hutchison * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and * associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, Publish, distribute, * sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in allOf copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT * NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ public class FastClasspathScanner { /** * List of directory path prefixes to scan (produced from list of package prefixes passed into the * constructor) */ private String[] pathsToScan; /** * The latest last-modified timestamp of any file, directory or sub-directory in the classpath, in millis * since the Unix epoch. Does not consider timestamps inside zipfiles/jarfiles, but the timestamp of the * zip/jarfile itself is considered. */ private long lastModified = 0; /** A list of class matchers to call once allOf classes have been read in from classpath. */ private ArrayList<ClassMatcher> classMatchers = new ArrayList<>(); /** * A list of file path matchers to call when a directory or subdirectory on the classpath matches a given * regexp. */ private ArrayList<FilePathMatcher> filePathMatchers = new ArrayList<>(); /** A then from fully-qualified class name to the corresponding ClassInfo object. */ private final HashMap<String, ClassInfo> classNameToClassInfo = new HashMap<>(); /** A then from fully-qualified class name to the corresponding InterfaceInfo object. */ private final HashMap<String, InterfaceInfo> interfaceNameToInterfaceInfo = new HashMap<>(); /** Reverse mapping from annotation to classes that have the annotation */ private final HashMap<String, ArrayList<String>> annotationToClasses = new HashMap<>(); /** Reverse mapping from interface to classes that implement the interface */ private final HashMap<String, ArrayList<String>> interfaceToClasses = new HashMap<>(); // ------------------------------------------------------------------------------------------------------ /** * Initialize a classpath scanner, with a list of package prefixes to scan. * * @param pacakagesToScan * A list of package prefixes to scan. */ public FastClasspathScanner(String[] pacakagesToScan) { this.pathsToScan = Stream.of(pacakagesToScan).map(p -> p.replace('.', '/') + "/") .toArray(String[]::new); } // ------------------------------------------------------------------------------------------------------ /** The method to run when a subclass of a specific class is found on the classpath. */ @FunctionalInterface public interface SubclassMatchProcessor<T> { void processMatch(Class<? extends T> matchingClass); } /** * Call the given ClassMatchProcessor if classes are found on the classpath that extend the specified * superclass. * * @param superclass * The superclass to match (i.e. the class that subclasses need to extend to match). * @param classMatchProcessor * the ClassMatchProcessor to call when a match is found. */ @SuppressWarnings("unchecked") public <T> FastClasspathScanner matchSubclassesOf(final Class<T> superclass, final SubclassMatchProcessor<T> classMatchProcessor) { if (superclass.isInterface()) { // No support yet for scanning for interfaces that extend other interfaces throw new IllegalArgumentException(superclass.getName() + " is an interface, not a regular class"); } if (superclass.isAnnotation()) { // No support yet for scanning for interfaces that extend other interfaces throw new IllegalArgumentException(superclass.getName() + " is an annotation, not a regular class"); } classMatchers.add(() -> { ClassInfo superclassInfo = classNameToClassInfo.get(superclass.getName()); boolean foundMatches = false; if (superclassInfo != null) { // For allOf subclasses of the given superclass for (ClassInfo subclassInfo : superclassInfo.allSubclasses) { try { // Load class Class<? extends T> klass = (Class<? extends T>) Class.forName(subclassInfo.name); // Process match classMatchProcessor.processMatch(klass); foundMatches = true; } catch (ClassNotFoundException | NoClassDefFoundError e) { throw new RuntimeException(e); } } } if (!foundMatches) { // Log.info("No classes found with superclass " + superclass.getName()); } }); return this; } // ------------------------------------------------------------------------------------------------------ /** The method to run when a class implementing a specific interface is found on the classpath. */ @FunctionalInterface public interface InterfaceMatchProcessor<T> { void processMatch(Class<? extends T> matchingClass); } /** * Call the given ClassMatchProcessor if classes are found on the classpath that implement the specified * interface. * * @param iface * The interface to match (i.e. the interface that classes need to implement to match). * @param interfaceMatchProcessor * the ClassMatchProcessor to call when a match is found. */ @SuppressWarnings("unchecked") public <T> FastClasspathScanner matchClassesImplementing(final Class<T> iface, final InterfaceMatchProcessor<T> interfaceMatchProcessor) { if (!iface.isInterface()) { throw new IllegalArgumentException(iface.getName() + " is not an interface"); } classMatchers.add(() -> { ArrayList<String> classesImplementingIface = interfaceToClasses.get(iface.getName()); if (classesImplementingIface != null) { // For allOf classes implementing the given interface for (String implClass : classesImplementingIface) { try { // Load class Class<? extends T> klass = (Class<? extends T>) Class.forName(implClass); // Process match interfaceMatchProcessor.processMatch(klass); } catch (ClassNotFoundException | NoClassDefFoundError e) { throw new RuntimeException(e); } } } else { // Log.info("No classes found implementing interface " + iface.getName()); } }); return this; } // ------------------------------------------------------------------------------------------------------ /** The method to run when a class with the right matching annotation is found on the classpath. */ @FunctionalInterface public interface ClassAnnotationMatchProcessor { void processMatch(Class<?> matchingClass); } /** * Call the given ClassMatchProcessor if classes are found on the classpath that have the given * annotation. * * @param annotation * The class annotation to match. * @param classMatchProcessor * the ClassMatchProcessor to call when a match is found. */ public FastClasspathScanner matchClassesWithAnnotation(final Class<?> annotation, final ClassAnnotationMatchProcessor classMatchProcessor) { if (!annotation.isAnnotation()) { throw new IllegalArgumentException("Class " + annotation.getName() + " is not an annotation"); } classMatchers.add(() -> { ArrayList<String> classesWithAnnotation = annotationToClasses.get(annotation.getName()); if (classesWithAnnotation != null) { // For allOf classes with the given annotation for (String classWithAnnotation : classesWithAnnotation) { try { // Load class Class<?> klass = Class.forName(classWithAnnotation); // Process match classMatchProcessor.processMatch(klass); } catch (ClassNotFoundException | NoClassDefFoundError e) { throw new RuntimeException(e); } } } else { // Log.info("No classes found with annotation " + annotation.getName()); } }); return this; } // ------------------------------------------------------------------------------------------------------ /** * The method to run when a matching file is found on the classpath. */ @FunctionalInterface public interface FileMatchProcessor { /** * Process a matching file. * * @param absolutePath * The path of the matching file on the filesystem. * @param relativePath * The path of the matching file relative to the classpath entry that contained the match. * @param inputStream * An InputStream (either a FileInputStream or a ZipEntry InputStream) opened on the file. * You do not need to close this InputStream before returning, it is closed by the caller. */ void processMatch(String absolutePath, String relativePath, InputStream inputStream); } /** * Call the given FileMatchProcessor if files are found on the classpath with the given regex pattern in * their path. * * @param filenameMatchPattern * The regex to match, e.g. "app/templates/.*\\.html" * @param fileMatchProcessor * The FileMatchProcessor to call when each match is found. */ public FastClasspathScanner matchFilenamePattern(final String filenameMatchPattern, final FileMatchProcessor fileMatchProcessor) { filePathMatchers.add(new FilePathMatcher(Pattern.compile(filenameMatchPattern), fileMatchProcessor)); return this; } // ------------------------------------------------------------------------------------------------------ /** An interface used for testing if a file path matches a specified pattern. */ private static class FilePathMatcher { Pattern pattern; FileMatchProcessor fileMatchProcessor; public FilePathMatcher(Pattern pattern, FileMatchProcessor fileMatchProcessor) { this.pattern = pattern; this.fileMatchProcessor = fileMatchProcessor; } } /** A functional interface used for testing if a class matches specified criteria. */ @FunctionalInterface private interface ClassMatcher { void lookForMatches(); } // ------------------------------------------------------------------------------------------------------ /** * An object to hold class information. For speed purposes, this is reconstructed directly from the * classfile header without calling the classloader. */ private static class ClassInfo { /** Class name */ String name; /** * Set to true when this class is encountered in the classpath (false if the class is so far only * cited as a superclass) */ boolean encountered; /** Direct superclass */ ClassInfo directSuperclass; /** Direct subclasses */ ArrayList<ClassInfo> directSubclasses = new ArrayList<>(); /** All superclasses, including java.lang.Object. */ HashSet<ClassInfo> allSuperclasses = new HashSet<>(); /** All subclasses */ HashSet<ClassInfo> allSubclasses = new HashSet<>(); /** All interfaces */ HashSet<String> interfaces = new HashSet<>(); /** All annotation */ HashSet<String> annotations = new HashSet<>(); /** This class was encountered on the classpath. */ public ClassInfo(String name, ArrayList<String> interfaces, HashSet<String> annotations) { this.name = name; this.encounter(interfaces, annotations); } /** * If called by another class, this class was previously cited as a superclass, and now has been * itself encountered on the classpath. */ public void encounter(ArrayList<String> interfaces, HashSet<String> annotations) { this.encountered = true; this.interfaces.addAll(interfaces); this.annotations.addAll(annotations); } /** This class was referenced as a superclass of the given subclass. */ public ClassInfo(String name, ClassInfo subclass) { this.name = name; this.encountered = false; addSubclass(subclass); } /** Connect this class to a subclass. */ public void addSubclass(ClassInfo subclass) { if (subclass.directSuperclass != null && subclass.directSuperclass != this) { throw new RuntimeException(subclass.name + " has two superclasses: " + subclass.directSuperclass.name + ", " + this.name); } subclass.directSuperclass = this; subclass.allSuperclasses.add(this); this.directSubclasses.add(subclass); this.allSubclasses.add(subclass); } @Override public String toString() { return name; } } /** * Direct and ancestral interfaces of a given interface. */ private static class InterfaceInfo { ArrayList<String> superInterfaces = new ArrayList<>(); HashSet<String> allSuperInterfaces = new HashSet<>(); public InterfaceInfo(ArrayList<String> superInterfaces) { this.superInterfaces.addAll(superInterfaces); } } // ------------------------------------------------------------------------------------------------------ /** * Recursively find allOf subclasses for each class; called by finalizeClassHierarchy. */ private static void finalizeClassHierarchyRec(ClassInfo curr) { // DFS through subclasses for (ClassInfo subclass : curr.directSubclasses) { finalizeClassHierarchyRec(subclass); } // Postorder traversal of curr node to accumulate subclasses for (ClassInfo subclass : curr.directSubclasses) { curr.allSubclasses.addAll(subclass.allSubclasses); } } /** * Recursively find allOf superinterfaces of each interface; called by finalizeClassHierarchy. */ private void finalizeInterfaceHierarchyRec(InterfaceInfo interfaceInfo) { // Interface inheritance is a DAG; don't double-visit nodes if (interfaceInfo.allSuperInterfaces.isEmpty() && !interfaceInfo.superInterfaces.isEmpty()) { interfaceInfo.allSuperInterfaces.addAll(interfaceInfo.superInterfaces); for (String iface : interfaceInfo.superInterfaces) { InterfaceInfo superinterfaceInfo = interfaceNameToInterfaceInfo.get(iface); if (superinterfaceInfo != null) { finalizeInterfaceHierarchyRec(superinterfaceInfo); // Merge allOf ancestral interfaces into list of allOf superinterfaces for this interface interfaceInfo.allSuperInterfaces.addAll(superinterfaceInfo.allSuperInterfaces); } } } } /** * Find allOf superclasses and subclasses for each class once allOf classes have been read. */ private void finalizeClassHierarchy() { if (classNameToClassInfo.isEmpty() && interfaceNameToInterfaceInfo.isEmpty()) { // If no classes or interfaces were matched, there is no hierarchy to build return; } // Find allOf root nodes (most classes and interfaces have java.lang.Object as a superclass) ArrayList<ClassInfo> roots = new ArrayList<>(); for (ClassInfo classInfo : classNameToClassInfo.values()) { if (classInfo.directSuperclass == null) { roots.add(classInfo); } } // Accumulate allOf superclasses and interfaces along each branch of class hierarchy. // Traverse top down / breadth first from roots. LinkedList<ClassInfo> nodes = new LinkedList<>(); nodes.addAll(roots); while (!nodes.isEmpty()) { ClassInfo head = nodes.removeFirst(); if (head.directSuperclass != null) { // Accumulate superclasses from ancestral classes head.allSuperclasses.addAll(head.directSuperclass.allSuperclasses); } // Add subclasses to queue for BFS for (ClassInfo subclass : head.directSubclasses) { nodes.add(subclass); } } // Accumulate allOf subclasses along each branch of class hierarchy. // Traverse depth first, postorder from roots. for (ClassInfo root : roots) { finalizeClassHierarchyRec(root); } // Create reverse mapping from annotation to classes that have the annotation for (ClassInfo classInfo : classNameToClassInfo.values()) { for (String annotation : classInfo.annotations) { ArrayList<String> classList = annotationToClasses.get(annotation); if (classList == null) { annotationToClasses.put(annotation, classList = new ArrayList<String>()); } classList.add(classInfo.name); } } for (InterfaceInfo ii : interfaceNameToInterfaceInfo.values()) { finalizeInterfaceHierarchyRec(ii); } // Create reverse mapping from interface to classes that implement the interface for (ClassInfo classInfo : classNameToClassInfo.values()) { // Find allOf interfaces and superinterfaces of a class HashSet<String> interfaceAndSuperinterfaces = new HashSet<>(); for (String iface : classInfo.interfaces) { interfaceAndSuperinterfaces.add(iface); InterfaceInfo ii = interfaceNameToInterfaceInfo.get(iface); if (ii != null) { interfaceAndSuperinterfaces.addAll(ii.allSuperInterfaces); } } // Add a mapping from the interface or super-interface back to the class for (String iface : interfaceAndSuperinterfaces) { ArrayList<String> classList = interfaceToClasses.get(iface); if (classList == null) { interfaceToClasses.put(iface, classList = new ArrayList<String>()); } classList.add(classInfo.name); } } // Classes that subclass another class that implements an interface also implement that interface for (String iface : interfaceToClasses.keySet()) { ArrayList<String> classes = interfaceToClasses.get(iface); HashSet<String> subClasses = new HashSet<String>(classes); for (String klass : classes) { ClassInfo ci = classNameToClassInfo.get(klass); if (ci != null) { for (ClassInfo subci : ci.allSubclasses) { subClasses.add(subci.name); } } } interfaceToClasses.put(iface, new ArrayList<>(subClasses)); } } // ------------------------------------------------------------------------------------------------------ /** * Read annotation entry from classfile. */ private String readAnnotation(final DataInputStream inp, Object[] constantPool) throws IOException { String annotationFieldDescriptor = readRefdString(inp, constantPool); String annotationClassName; if (annotationFieldDescriptor.charAt(0) == 'L' && annotationFieldDescriptor.charAt(annotationFieldDescriptor.length() - 1) == ';') { // Lcom/xyz/Annotation; -> com.xyz.Annotation annotationClassName = annotationFieldDescriptor.substring(1, annotationFieldDescriptor.length() - 1).replace('/', '.'); } else { // Should not happen annotationClassName = annotationFieldDescriptor; } int numElementValuePairs = inp.readUnsignedShort(); for (int i = 0; i < numElementValuePairs; i++) { inp.skipBytes(2); // element_name_index readAnnotationElementValue(inp, constantPool); } return annotationClassName; } /** * Read annotation element value from classfile. */ private void readAnnotationElementValue(final DataInputStream inp, Object[] constantPool) throws IOException { int tag = inp.readUnsignedByte(); switch (tag) { case 'B': case 'C': case 'D': case 'F': case 'I': case 'J': case 'S': case 'Z': case 's': // const_value_index inp.skipBytes(2); break; case 'e': // enum_const_value inp.skipBytes(4); break; case 'c': // class_info_index inp.skipBytes(2); break; case '@': // Complex (nested) annotation readAnnotation(inp, constantPool); break; case '[': // array_value final int count = inp.readUnsignedShort(); for (int l = 0; l < count; ++l) { // Nested annotation element value readAnnotationElementValue(inp, constantPool); } break; default: throw new ClassFormatError("Invalid annotation element type tag: 0x" + Integer.toHexString(tag)); } } /** * Read a string reference from a classfile, then look up the string in the constant pool. */ private static String readRefdString(DataInputStream inp, Object[] constantPool) throws IOException { int constantPoolIdx = inp.readUnsignedShort(); Object constantPoolObj = constantPool[constantPoolIdx]; return (constantPoolObj instanceof Integer ? (String) constantPool[(Integer) constantPoolObj] : (String) constantPoolObj); } /** * Directly examine contents of classfile binary header. */ private void readClassInfoFromClassfileHeader(final InputStream inputStream) throws IOException { DataInputStream inp = new DataInputStream(new BufferedInputStream(inputStream, 1024)); // Magic if (inp.readInt() != 0xCAFEBABE) { // Not classfile return; } // Minor version inp.readUnsignedShort(); // Major version inp.readUnsignedShort(); // Constant pool count (1-indexed, zeroth entry not used) int cpCount = inp.readUnsignedShort(); // Constant pool Object[] constantPool = new Object[cpCount]; for (int i = 1; i < cpCount; ++i) { final int tag = inp.readUnsignedByte(); switch (tag) { case 1: // Modified UTF8 constantPool[i] = inp.readUTF(); break; case 3: // int case 4: // float inp.skipBytes(4); break; case 5: // long case 6: // double inp.skipBytes(8); i++; // double slot break; case 7: // Class case 8: // String // Forward or backward reference a Modified UTF8 entry constantPool[i] = inp.readUnsignedShort(); break; case 9: // field ref case 10: // method ref case 11: // interface ref case 12: // name and type inp.skipBytes(4); // two shorts break; case 15: // method handle inp.skipBytes(3); break; case 16: // method type inp.skipBytes(2); break; case 18: // invoke dynamic inp.skipBytes(4); break; default: throw new ClassFormatError("Unkown tag value for constant pool entry: " + tag); } } // Access flags int flags = inp.readUnsignedShort(); boolean isInterface = (flags & 0x0200) != 0; // This class name, with slashes replaced with dots String className = readRefdString(inp, constantPool).replace('/', '.'); // Superclass name, with slashes replaced with dots String superclassName = readRefdString(inp, constantPool).replace('/', '.'); // Interfaces int interfaceCount = inp.readUnsignedShort(); ArrayList<String> interfaces = new ArrayList<>(); for (int i = 0; i < interfaceCount; i++) { interfaces.add(readRefdString(inp, constantPool).replace('/', '.')); } // Fields int fieldCount = inp.readUnsignedShort(); for (int i = 0; i < fieldCount; i++) { inp.skipBytes(6); // access_flags, name_index, descriptor_index int attributesCount = inp.readUnsignedShort(); for (int j = 0; j < attributesCount; j++) { inp.skipBytes(2); // attribute_name_index int attributeLength = inp.readInt(); inp.skipBytes(attributeLength); } } // Methods int methodCount = inp.readUnsignedShort(); for (int i = 0; i < methodCount; i++) { inp.skipBytes(6); // access_flags, name_index, descriptor_index int attributesCount = inp.readUnsignedShort(); for (int j = 0; j < attributesCount; j++) { inp.skipBytes(2); // attribute_name_index int attributeLength = inp.readInt(); inp.skipBytes(attributeLength); } } // Attributes (including class annotation) HashSet<String> annotations = new HashSet<>(); int attributesCount = inp.readUnsignedShort(); for (int i = 0; i < attributesCount; i++) { String attributeName = readRefdString(inp, constantPool); int attributeLength = inp.readInt(); if ("RuntimeVisibleAnnotations".equals(attributeName)) { int annotationCount = inp.readUnsignedShort(); for (int m = 0; m < annotationCount; m++) { String annotationName = readAnnotation(inp, constantPool); annotations.add(annotationName); } } else { inp.skipBytes(attributeLength); } } if (isInterface) { // Save the info recovered from the classfile for an interface // Look up InterfaceInfo object for this interface InterfaceInfo thisInterfaceInfo = interfaceNameToInterfaceInfo.get(className); if (thisInterfaceInfo == null) { // This interface has not been encountered before on the classpath interfaceNameToInterfaceInfo.put(className, thisInterfaceInfo = new InterfaceInfo(interfaces)); } else { // An interface of this fully-qualified name has been encountered already earlier on // the classpath, so this interface is shadowed, ignore it return; } } else { // Save the info recovered from the classfile for a class // Look up ClassInfo object for this class ClassInfo thisClassInfo = classNameToClassInfo.get(className); if (thisClassInfo == null) { // This class has not been encountered before on the classpath classNameToClassInfo.put(className, thisClassInfo = new ClassInfo(className, interfaces, annotations)); } else if (thisClassInfo.encountered) { // A class of this fully-qualified name has been encountered already earlier on // the classpath, so this class is shadowed, ignore it return; } else { // This is the first time this class has been encountered on the classpath, but // it was previously cited as a superclass of another class thisClassInfo.encounter(interfaces, annotations); } // Look up ClassInfo object for superclass, and connect it to this class ClassInfo superclassInfo = classNameToClassInfo.get(superclassName); if (superclassInfo == null) { classNameToClassInfo.put(superclassName, superclassInfo = new ClassInfo(superclassName, thisClassInfo)); } else { superclassInfo.addSubclass(thisClassInfo); } } } // ------------------------------------------------------------------------------------------------------ /** * Scan a file. */ private void scanFile(File file, String absolutePath, String relativePath, boolean scanTimestampsOnly) throws IOException { lastModified = Math.max(lastModified, file.lastModified()); if (!scanTimestampsOnly) { if (relativePath.endsWith(".class")) { // Found a classfile try (InputStream inputStream = new FileInputStream(file)) { // Inspect header of classfile readClassInfoFromClassfileHeader(inputStream); } } else { // For non-classfiles, match file paths against path patterns for (FilePathMatcher fileMatcher : filePathMatchers) { if (fileMatcher.pattern.matcher(relativePath).matches()) { // If there's a match, open the file as a stream and call the match processor try (InputStream inputStream = new FileInputStream(file)) { fileMatcher.fileMatchProcessor.processMatch(absolutePath, relativePath, inputStream); } } } } } } /** * Scan a directory for matching file path patterns. */ private void scanDir(File dir, int ignorePrefixLen, boolean scanTimestampsOnly) throws IOException { String absolutePath = dir.getPath(); String relativePath = ignorePrefixLen > absolutePath.length() ? "" : absolutePath.substring(ignorePrefixLen); relativePath = relativePath.replace(File.separatorChar,'/'); boolean scanDirs = false, scanFiles = false; for (String pathToScan : pathsToScan) { if (relativePath.startsWith(pathToScan) || // (relativePath.length() == pathToScan.length() - 1 && pathToScan.startsWith(relativePath))) { // In a path that has a whitelisted path as a prefix -- can start scanning files scanDirs = scanFiles = true; break; } if (pathToScan.startsWith(relativePath)) { // In a path that is a prefix of a whitelisted path -- keep recursively scanning dirs scanDirs = true; } } if (scanDirs || scanFiles) { lastModified = Math.max(lastModified, dir.lastModified()); File[] subFiles = dir.listFiles(); for (final File subFile : subFiles) { if (subFile.isDirectory()) { // Recurse into subdirectory scanDir(subFile, ignorePrefixLen, scanTimestampsOnly); } else if (scanFiles && subFile.isFile()) { // Scan file String leafSuffix = "/" + subFile.getName(); scanFile(subFile, absolutePath + leafSuffix, relativePath + leafSuffix, scanTimestampsOnly); } } } } /** * Scan a zipfile for matching file path patterns. (Does not recurse into zipfiles within zipfiles.) */ private void scanZipfile(final String zipfilePath, final ZipFile zipFile, boolean scanTimestampsOnly) throws IOException { boolean timestampWarning = false; for (Enumeration<? extends ZipEntry> entries = zipFile.entries(); entries.hasMoreElements();) { // Scan for matching filenames final ZipEntry entry = entries.nextElement(); if (!entry.isDirectory()) { // Only process file entries (zipfile indices contain both directory entries and // separate file entries for files within each directory, in lexicographic order) String path = entry.getName(); boolean scanFile = false; for (String pathToScan : pathsToScan) { if (path.startsWith(pathToScan)) { // File path has a whitelisted path as a prefix -- can scan file scanFile = true; break; } } if (scanFile) { // Assumes that the clock used to timestamp zipfile entries is in sync with the // clock used to timestamp regular file and directory entries in the classpath. // Just in case, we check entry timestamps against the current time. long entryTime = entry.getTime(); lastModified = Math.max(lastModified, entryTime); if (entryTime > System.currentTimeMillis() && !timestampWarning) { String msg = zipfilePath + " contains modification timestamps after the current time"; // Log.warning(log); System.err.println(msg); // Only warn once timestampWarning = true; } if (!scanTimestampsOnly) { if (path.endsWith(".class")) { // Found a classfile, open it as a stream and inspect header try (InputStream inputStream = zipFile.getInputStream(entry)) { readClassInfoFromClassfileHeader(inputStream); } } else { // For non-classfiles, match file paths against path patterns for (FilePathMatcher fileMatcher : filePathMatchers) { if (fileMatcher.pattern.matcher(path).matches()) { // There's a match, open the file as a stream and call the match processor try (InputStream inputStream = zipFile.getInputStream(entry)) { fileMatcher.fileMatchProcessor.processMatch(path, path, inputStream); } } } } } } } } } // ------------------------------------------------------------------------------------------------------ /** * Get a list of unique elements on the classpath as File objects, preserving order. * Classpath elements that do not exist are not returned. */ public static ArrayList<File> getUniqueClasspathElements() { String[] pathElements = System.getProperty("java.class.path").split(File.pathSeparator); HashSet<String> pathElementsSet = new HashSet<>(); ArrayList<File> pathFiles = new ArrayList<>(); for (String pathElement : pathElements) { if (pathElementsSet.add(pathElement)) { File file = new File(pathElement); if (file.exists()) { pathFiles.add(file); } } } return pathFiles; } /** * Scan classpath for matching files. Call this after allOf match processors have been added. */ private void scan(boolean scanTimestampsOnly) { // long scanStart = System.currentTimeMillis(); if (!scanTimestampsOnly) { classNameToClassInfo.clear(); interfaceNameToInterfaceInfo.clear(); annotationToClasses.clear(); interfaceToClasses.clear(); } try { // Iterate through path elements and recursively scan within each directory and zipfile for (File pathElt : getUniqueClasspathElements()) { String path = pathElt.getPath(); if (pathElt.isDirectory()) { // Scan within dir path element scanDir(pathElt, path.length() + 1, scanTimestampsOnly); } else if (pathElt.isFile()) { String pathLower = path.toLowerCase(); if (pathLower.endsWith(".jar") || pathLower.endsWith(".zip")) { // Scan within jar/zipfile path element scanZipfile(path, new ZipFile(pathElt), scanTimestampsOnly); } else { // File listed directly on classpath scanFile(pathElt, path, pathElt.getName(), scanTimestampsOnly); for (FilePathMatcher fileMatcher : filePathMatchers) { if (fileMatcher.pattern.matcher(path).matches()) { // If there's a match, open the file as a stream and call the match processor try (InputStream inputStream = new FileInputStream(pathElt)) { fileMatcher.fileMatchProcessor.processMatch(path, pathElt.getName(), inputStream); } } } } } else { // Log.info("Skipping non-file/non-dir on classpath: " + file.getCanonicalPath()); } } } catch (IOException e) { throw new RuntimeException(e); } if (!scanTimestampsOnly) { // Finalize class hierarchy, then look for class matches finalizeClassHierarchy(); for (ClassMatcher classMatcher : classMatchers) { classMatcher.lookForMatches(); } } // Log.info("Classpath " + (scanTimestampsOnly ? "timestamp " : "") + "scanning took: " // + (System.currentTimeMillis() - scanStart) + " ms"); } /** * Scan classpath for matching files. Call this after allOf match processors have been added. */ public void scan() { scan(/* scanTimestampsOnly = */false); } /** * Returns true if the classpath contents have been changed since scan() was last called. Only considers * classpath prefixes whitelisted in the call to the constructor. */ public boolean classpathContentsModifiedSinceScan() { long lastModified = this.lastModified; scan(/* scanTimestampsOnly = */true); return this.lastModified > lastModified; } }
package com.oreumio.james.rest.user; import java.util.List; /** * @author Jhonson choi (jhonsonchoi@gmail.com) */ public class EmlUserConfigVo { private String listDisplayType; private int listSize; private String listPreviewContent; private List<String> listColumns; private String readOpenNewWindow; private String readDisplaySingleLineSubject; private String readDisplayAllRecipients; private String writeOpenNewWindow; private String writeDisplayBCC; private String writeIncludeFooter; private String writeIncludeFooterWhenReply; private String writePreferredSender; private String writeReadConfirmMail; private String writeReceiveReadConfirmMail; private String writeSendConfirmMailType; private String sendPreferredAfterPage; private String useFooter; private int footer; private List<EmlSignatureVo> footers; private String useAbsence; private String absenceStart; private String absenceEnd; private String absenceContent; private String useBlocker; private String blockerAction; private List<EmlBlockerVo> blockers; private String useForwarder; private String forwarderStoreAndForward; private List<EmlForwarderVo> forwarders; private String useClassifier; private String useLabeler; public static class EmlSignatureVo { private int key; private String name; private String content; public int getKey() { return key; } public void setKey(int key) { this.key = key; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } } public static class EmlBlockerVo { private List<String> addresses; public List<String> getAddresses() { return addresses; } public void setAddresses(List<String> addresses) { this.addresses = addresses; } @Override public String toString() { return "EmlBlockerVo{" + ", addresses=" + addresses + '}'; } } public static class EmlForwarderVo { private String addresses; public String getAddresses() { return addresses; } public void setAddresses(String addresses) { this.addresses = addresses; } @Override public String toString() { return "EmlForwarderVo{" + ", addresses='" + addresses + '\'' + '}'; } } public EmlUserConfigVo() { useAbsence = "no"; useBlocker = "no"; useClassifier = "no"; useFooter = "no"; useForwarder = "no"; useLabeler = "no"; } public String getListDisplayType() { return listDisplayType; } public void setListDisplayType(String listDisplayType) { this.listDisplayType = listDisplayType; } public int getListSize() { return listSize; } public void setListSize(int listSize) { this.listSize = listSize; } public String getListPreviewContent() { return listPreviewContent; } public void setListPreviewContent(String listPreviewContent) { this.listPreviewContent = listPreviewContent; } public List<String> getListColumns() { return listColumns; } public void setListColumns(List<String> listColumns) { this.listColumns = listColumns; } public String getReadOpenNewWindow() { return readOpenNewWindow; } public void setReadOpenNewWindow(String readOpenNewWindow) { this.readOpenNewWindow = readOpenNewWindow; } public String getReadDisplaySingleLineSubject() { return readDisplaySingleLineSubject; } public void setReadDisplaySingleLineSubject(String readDisplaySingleLineSubject) { this.readDisplaySingleLineSubject = readDisplaySingleLineSubject; } public String getReadDisplayAllRecipients() { return readDisplayAllRecipients; } public void setReadDisplayAllRecipients(String readDisplayAllRecipients) { this.readDisplayAllRecipients = readDisplayAllRecipients; } public String getWriteOpenNewWindow() { return writeOpenNewWindow; } public void setWriteOpenNewWindow(String writeOpenNewWindow) { this.writeOpenNewWindow = writeOpenNewWindow; } public String getWriteDisplayBCC() { return writeDisplayBCC; } public void setWriteDisplayBCC(String writeDisplayBCC) { this.writeDisplayBCC = writeDisplayBCC; } public String getWriteIncludeFooter() { return writeIncludeFooter; } public void setWriteIncludeFooter(String writeIncludeFooter) { this.writeIncludeFooter = writeIncludeFooter; } public String getWriteIncludeFooterWhenReply() { return writeIncludeFooterWhenReply; } public void setWriteIncludeFooterWhenReply(String writeIncludeFooterWhenReply) { this.writeIncludeFooterWhenReply = writeIncludeFooterWhenReply; } public String getWritePreferredSender() { return writePreferredSender; } public void setWritePreferredSender(String writePreferredSender) { this.writePreferredSender = writePreferredSender; } public String getWriteReadConfirmMail() { return writeReadConfirmMail; } public void setWriteReadConfirmMail(String writeReadConfirmMail) { this.writeReadConfirmMail = writeReadConfirmMail; } public String getWriteReceiveReadConfirmMail() { return writeReceiveReadConfirmMail; } public void setWriteReceiveReadConfirmMail(String writeReceiveReadConfirmMail) { this.writeReceiveReadConfirmMail = writeReceiveReadConfirmMail; } public String getWriteSendConfirmMailType() { return writeSendConfirmMailType; } public void setWriteSendConfirmMailType(String writeSendConfirmMailType) { this.writeSendConfirmMailType = writeSendConfirmMailType; } public String getSendPreferredAfterPage() { return sendPreferredAfterPage; } public void setSendPreferredAfterPage(String sendPreferredAfterPage) { this.sendPreferredAfterPage = sendPreferredAfterPage; } public String getUseFooter() { return useFooter; } public void setUseFooter(String useFooter) { this.useFooter = useFooter; } public int getFooter() { return footer; } public void setFooter(int footer) { this.footer = footer; } public List<EmlSignatureVo> getFooters() { return footers; } public void setFooters(List<EmlSignatureVo> footers) { this.footers = footers; } public String getUseAbsence() { return useAbsence; } public void setUseAbsence(String useAbsence) { this.useAbsence = useAbsence; } public String getAbsenceStart() { return absenceStart; } public void setAbsenceStart(String absenceStart) { this.absenceStart = absenceStart; } public String getAbsenceEnd() { return absenceEnd; } public void setAbsenceEnd(String absenceEnd) { this.absenceEnd = absenceEnd; } public String getAbsenceContent() { return absenceContent; } public void setAbsenceContent(String absenceContent) { this.absenceContent = absenceContent; } public String getUseBlocker() { return useBlocker; } public void setUseBlocker(String useBlocker) { this.useBlocker = useBlocker; } public String getBlockerAction() { return blockerAction; } public void setBlockerAction(String blockerAction) { this.blockerAction = blockerAction; } public List<EmlBlockerVo> getBlockers() { return blockers; } public void setBlockers(List<EmlBlockerVo> blockers) { this.blockers = blockers; } public String getUseForwarder() { return useForwarder; } public void setUseForwarder(String useForwarder) { this.useForwarder = useForwarder; } public String getForwarderStoreAndForward() { return forwarderStoreAndForward; } public void setForwarderStoreAndForward(String forwarderStoreAndForward) { this.forwarderStoreAndForward = forwarderStoreAndForward; } public List<EmlForwarderVo> getForwarders() { return forwarders; } public void setForwarders(List<EmlForwarderVo> forwarders) { this.forwarders = forwarders; } public String getUseClassifier() { return useClassifier; } public void setUseClassifier(String useClassifier) { this.useClassifier = useClassifier; } public String getUseLabeler() { return useLabeler; } public void setUseLabeler(String useLabeler) { this.useLabeler = useLabeler; } @Override public String toString() { return "EmlUserConfigVo{" + "listDisplayType='" + listDisplayType + '\'' + ", listSize=" + listSize + ", listPreviewContent='" + listPreviewContent + '\'' + ", listColumns=" + listColumns + ", readOpenNewWindow='" + readOpenNewWindow + '\'' + ", readDisplaySingleLineSubject='" + readDisplaySingleLineSubject + '\'' + ", readDisplayAllRecipients='" + readDisplayAllRecipients + '\'' + ", writeOpenNewWindow='" + writeOpenNewWindow + '\'' + ", writeDisplayBCC='" + writeDisplayBCC + '\'' + ", writeIncludeFooter='" + writeIncludeFooter + '\'' + ", writeIncludeFooterWhenReply='" + writeIncludeFooterWhenReply + '\'' + ", writePreferredSender='" + writePreferredSender + '\'' + ", writeReadConfirmMail='" + writeReadConfirmMail + '\'' + ", writeReceiveReadConfirmMail='" + writeReceiveReadConfirmMail + '\'' + ", writeSendConfirmMailType='" + writeSendConfirmMailType + '\'' + ", sendPreferredAfterPage='" + sendPreferredAfterPage + '\'' + ", useFooter='" + useFooter + '\'' + ", footer='" + footer + '\'' + ", footers=" + footers + ", useAbsence='" + useAbsence + '\'' + ", useBlocker='" + useBlocker + '\'' + ", blockers=" + blockers + ", useForwarder='" + useForwarder + '\'' + ", forwarders=" + forwarders + ", useClassifier='" + useClassifier + '\'' + ", useLabeler='" + useLabeler + '\'' + '}'; } }
package com.documents; import static org.hibernate.validator.internal.util.Contracts.assertNotNull; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.runners.MockitoJUnitRunner; import com.documents.models.WithdrawalDocumentRequest; import com.documents.repositories.WithdrawalDocumentRequestRepository; import com.documents.services.WithdrawalDocumentRequestService; import com.documents.services.WithdrawalDocumentRequestServiceImpl; @RunWith(MockitoJUnitRunner.class) public class WithdrawalDocumentRequestServiceTest { @Mock private WithdrawalDocumentRequestRepository withdrawalDocumentRequestRepository; @InjectMocks private WithdrawalDocumentRequestServiceImpl withdrawalDocumentRequestServiceImpl = new WithdrawalDocumentRequestServiceImpl(); @Before public void initializedMockito() { MockitoAnnotations.initMocks(this); } public void setup (WithdrawalDocumentRequest withdrawalDocumentRequestToSet) { withdrawalDocumentRequestToSet.setId((long)3); withdrawalDocumentRequestToSet.setDocumentName("DocName"); withdrawalDocumentRequestToSet.setStudentId((long)34); withdrawalDocumentRequestToSet.setSeries("1234"); withdrawalDocumentRequestToSet.setApproveReason("reason"); withdrawalDocumentRequestToSet.setDate("01.06.2017"); withdrawalDocumentRequestToSet.setNumberBAC((long) 1234); withdrawalDocumentRequestToSet.setNumberIDCard((long) 12345); withdrawalDocumentRequestToSet.setSeriesBAC("1234"); withdrawalDocumentRequestToSet.setSignature("signature"); withdrawalDocumentRequestToSet.setNumberSheet((long) 1234); withdrawalDocumentRequestToSet.setStudyYear((long) 2); withdrawalDocumentRequestToSet.setUniversityYear((long)2); withdrawalDocumentRequestToSet.setTypeOfCourses("IP"); } @Test public void behavioural_withdrawalDocumentRequest_save_should_return_true() throws Exception { //Act WithdrawalDocumentRequest withdrawalDocumentRequestAfterSave = new WithdrawalDocumentRequest(); WithdrawalDocumentRequest withdrawalDocumentRequestToSave = new WithdrawalDocumentRequest(); setup(withdrawalDocumentRequestToSave); when(withdrawalDocumentRequestRepository.save(any(WithdrawalDocumentRequest.class))).thenReturn(withdrawalDocumentRequestToSave); withdrawalDocumentRequestAfterSave = withdrawalDocumentRequestServiceImpl.save(withdrawalDocumentRequestToSave); //Assert assertNotNull(withdrawalDocumentRequestAfterSave); } @Test public void functionality_withdrawalDocumentRequest_save_should_return_withdrawalDocumentRequest() throws Exception { //Act WithdrawalDocumentRequest withdrawalDocumentRequestAfterSave = new WithdrawalDocumentRequest(); setup(withdrawalDocumentRequestAfterSave); when(withdrawalDocumentRequestRepository.save(any(WithdrawalDocumentRequest.class))).thenReturn(withdrawalDocumentRequestAfterSave); WithdrawalDocumentRequest savedWithdrawalDocumentRequest = withdrawalDocumentRequestServiceImpl.save(withdrawalDocumentRequestAfterSave); //Assert assertEquals(Long.valueOf(3), savedWithdrawalDocumentRequest.getId()); assertEquals("DocName", savedWithdrawalDocumentRequest.getDocumentName()); assertEquals(Long.valueOf(34), savedWithdrawalDocumentRequest.getStudentId()); assertEquals("1234", savedWithdrawalDocumentRequest.getSeries()); assertEquals(Long.valueOf(12345), savedWithdrawalDocumentRequest.getNumberIDCard()); assertEquals(Long.valueOf(2), savedWithdrawalDocumentRequest.getStudyYear()); assertEquals(Long.valueOf(2), savedWithdrawalDocumentRequest.getUniversityYear()); assertEquals("IP", savedWithdrawalDocumentRequest.getTypeOfCourses()); assertEquals("reason", savedWithdrawalDocumentRequest.getApproveReason()); assertEquals("01.06.2017", savedWithdrawalDocumentRequest.getDate()); assertEquals("signature", savedWithdrawalDocumentRequest.getSignature()); assertEquals("1234", savedWithdrawalDocumentRequest.getSeriesBAC()); assertEquals(Long.valueOf(1234), savedWithdrawalDocumentRequest.getNumberBAC()); assertEquals(Long.valueOf(1234), savedWithdrawalDocumentRequest.getNumberSheet()); } @Test public void behavioural_withdrawalDocumentRequest_findById_should_return_true() throws Exception { //Act WithdrawalDocumentRequest withdrawalDocumentRequestToFind = new WithdrawalDocumentRequest(); when(withdrawalDocumentRequestRepository.findOne(any(long.class))).thenReturn(withdrawalDocumentRequestToFind); withdrawalDocumentRequestToFind = withdrawalDocumentRequestServiceImpl.findById((long) 123); //Assert Assert.assertNotNull(withdrawalDocumentRequestToFind); } @Test public void behavioural_withdrawalDocumentRequest_deleteById_should_return_true() throws Exception { //Act WithdrawalDocumentRequest withdrawalDocumentRequest = new WithdrawalDocumentRequest(); withdrawalDocumentRequest.setId((long) 3); withdrawalDocumentRequestServiceImpl.delete(withdrawalDocumentRequest.getId()); WithdrawalDocumentRequest foundWithdrawalDocumentRegistrationForm = withdrawalDocumentRequestServiceImpl.findById((long) 3); //Assert assertNull(foundWithdrawalDocumentRegistrationForm); } @Test public void functionality_withdrawalDocumentRequest_deleteById_should_delete_withdrawalDocument() throws Exception { //Act WithdrawalDocumentRequest withdrawalDocumentRequest = new WithdrawalDocumentRequest(); withdrawalDocumentRequestServiceImpl.delete(withdrawalDocumentRequest.getId()); //Assert verify(withdrawalDocumentRequestRepository).delete(withdrawalDocumentRequest.getId()); } @Test public void behavioural_withdrawalDocumentRequest_findAll_should_return_true() throws Exception { //Act List<WithdrawalDocumentRequest> withdrawalDocumentRequests = new ArrayList<>(); WithdrawalDocumentRequest withdrawalDocumentRequest = new WithdrawalDocumentRequest(); setup(withdrawalDocumentRequest); when(withdrawalDocumentRequestRepository.findAll()).thenReturn(withdrawalDocumentRequests); withdrawalDocumentRequests.add(withdrawalDocumentRequest); List<WithdrawalDocumentRequest> foundForms; foundForms = withdrawalDocumentRequestServiceImpl.findAll(); //Assert Assert.assertNotNull(foundForms); } @Test public void functionality_withdrawalDocumentRequest_findAll_should_return_list_of_withdrawalDocumentRequest() throws Exception { //Act List<WithdrawalDocumentRequest> withdrawalDocumentRequests = new ArrayList<>(); WithdrawalDocumentRequest withdrawalDocumentRequest = new WithdrawalDocumentRequest(); setup(withdrawalDocumentRequest); when(withdrawalDocumentRequestRepository.findAll()).thenReturn(withdrawalDocumentRequests); withdrawalDocumentRequests.add(withdrawalDocumentRequest); List<WithdrawalDocumentRequest> foundForms; foundForms = withdrawalDocumentRequestServiceImpl.findAll(); assertEquals(withdrawalDocumentRequests.get(0).getId(), foundForms.get(0).getId()); assertEquals(withdrawalDocumentRequests.get(0).getDocumentName(), foundForms.get(0).getDocumentName()); assertEquals(withdrawalDocumentRequests.get(0).getStudentId(), foundForms.get(0).getStudentId()); assertEquals(withdrawalDocumentRequests.get(0).getSeries(), foundForms.get(0).getSeries()); assertEquals(withdrawalDocumentRequests.get(0).getNumberIDCard(), foundForms.get(0).getNumberIDCard()); assertEquals(withdrawalDocumentRequests.get(0).getStudyYear(), foundForms.get(0).getStudyYear()); assertEquals(withdrawalDocumentRequests.get(0).getUniversityYear(), foundForms.get(0).getUniversityYear()); assertEquals(withdrawalDocumentRequests.get(0).getTypeOfCourses(), foundForms.get(0).getTypeOfCourses()); assertEquals(withdrawalDocumentRequests.get(0).getApproveReason(), foundForms.get(0).getApproveReason()); assertEquals(withdrawalDocumentRequests.get(0).getDate(), foundForms.get(0).getDate()); assertEquals(withdrawalDocumentRequests.get(0).getSignature(), foundForms.get(0).getSignature()); assertEquals(withdrawalDocumentRequests.get(0).getSeriesBAC(), foundForms.get(0).getSeriesBAC()); assertEquals(withdrawalDocumentRequests.get(0).getNumberBAC(), foundForms.get(0).getNumberBAC()); assertEquals(withdrawalDocumentRequests.get(0).getNumberSheet(), foundForms.get(0).getNumberSheet()); } }
package org.grassroot.android.utils; import android.os.Looper; import android.support.annotation.NonNull; import android.util.Log; import org.grassroot.android.models.Group; import org.grassroot.android.models.GroupJoinRequest; import org.grassroot.android.models.LocalGroupEdits; import org.grassroot.android.models.Member; import org.grassroot.android.models.Permission; import org.grassroot.android.models.PreferenceObject; import org.grassroot.android.models.PublicGroupModel; import org.grassroot.android.models.ResponseTotalsModel; import org.grassroot.android.models.ShareModel; import org.grassroot.android.models.TaskModel; import org.grassroot.android.models.TaskNotification; import org.grassroot.android.models.helpers.RealmString; import org.grassroot.android.models.responses.Token; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import io.reactivex.Observable; import io.reactivex.ObservableEmitter; import io.reactivex.ObservableOnSubscribe; import io.reactivex.Scheduler; import io.reactivex.android.schedulers.AndroidSchedulers; import io.reactivex.functions.Consumer; import io.reactivex.schedulers.Schedulers; import io.realm.Realm; import io.realm.RealmList; import io.realm.RealmObject; import io.realm.RealmQuery; import io.realm.RealmResults; import io.realm.Sort; public class RealmUtils { private static final String TAG = RealmUtils.class.getSimpleName(); public static List<String> convertListOfRealmStringInListOfString( RealmList<RealmString> realmList) { if (realmList != null) { List<String> list = new ArrayList<>(); for (RealmString realmString : realmList) { list.add(realmString.getString()); } return list; } return null; } public static boolean realmListContains(String term, RealmList<RealmString> list) { if (list == null || list.isEmpty()) { return false; } for (RealmString realmString : list) { if (realmString.getString().contains(term)) { return true; } } return false; } public static void deleteAllObjects() { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.deleteAll(); realm.commitTransaction(); realm.close(); } public static String deleteAllExceptMessagesAndPhone() { final String phoneNumber = RealmUtils.loadPreferencesFromDB().getMobileNumber(); Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.delete(Group.class); realm.delete(GroupJoinRequest.class); realm.delete(LocalGroupEdits.class); realm.delete(Member.class); realm.delete(Permission.class); realm.delete(PublicGroupModel.class); realm.delete(ResponseTotalsModel.class); realm.delete(TaskModel.class); realm.delete(TaskNotification.class); realm.delete(ShareModel.class); realm.delete(Token.class); realm.delete(PreferenceObject.class); realm.commitTransaction(); realm.close(); return phoneNumber; } public static Observable<Boolean> saveDataToRealm(final List<? extends RealmObject> list, Scheduler observingThread) { observingThread = (observingThread == null) ? AndroidSchedulers.mainThread() : observingThread; return Observable.create(new ObservableOnSubscribe<Boolean>() { @Override public void subscribe(ObservableEmitter<Boolean> subscriber) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.copyToRealmOrUpdate(list); realm.commitTransaction(); realm.close(); subscriber.onNext(true); } }).subscribeOn(Schedulers.io()).observeOn(observingThread); } public static Observable<Boolean> saveDataToRealm(final RealmObject object) { return Observable.create(new ObservableOnSubscribe<Boolean>() { @Override public void subscribe(ObservableEmitter<Boolean> subscriber) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.copyToRealmOrUpdate(object); realm.commitTransaction(); realm.close(); subscriber.onNext(true); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static void saveDataToRealmSync(final RealmObject object) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.copyToRealmOrUpdate(object); realm.commitTransaction(); realm.close(); } public static void saveDataToRealmSync(final List<? extends RealmObject> list) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.copyToRealmOrUpdate(list); realm.commitTransaction(); realm.close(); } public static void saveDataToRealmWithSubscriber(final RealmObject object) { Observable.create(new ObservableOnSubscribe<Boolean>() { @Override public void subscribe(ObservableEmitter<Boolean> subscriber) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.copyToRealmOrUpdate(object); realm.commitTransaction(); realm.close(); subscriber.onNext(true); } }) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Consumer<Boolean>() { @Override public void accept(Boolean aBoolean) { // Log.d(TAG, "saved"); } }); } public static void saveGroupToRealm(Group group) { saveDataToRealmSync(group); } public static Observable<List<Group>> loadGroupsSorted() { return Observable.create(new ObservableOnSubscribe<List<Group>>() { @Override public void subscribe(ObservableEmitter<List<Group>> subscriber) { final Realm realm = Realm.getDefaultInstance(); List<Group> groups = realm.copyFromRealm( realm.where(Group.class).findAllSorted("lastMajorChangeMillis", Sort.DESCENDING)); subscriber.onNext(groups); realm.close(); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static Observable<List<Group>> loadGroupsFilteredSorted(final Map<String, Object> filterMap, final String sortField, final Sort sortOrder) { return Observable.create(new ObservableOnSubscribe<List<Group>>() { @Override public void subscribe(ObservableEmitter<List<Group>> subscriber) { final Realm realm = Realm.getDefaultInstance(); RealmQuery<? extends RealmObject> query = realm.where(Group.class); for (Map.Entry<String, Object> entry : filterMap.entrySet()) { if (entry.getValue() instanceof String) { query.equalTo(entry.getKey(), entry.getValue().toString()); } else { query.equalTo(entry.getKey(), Boolean.valueOf(entry.getValue().toString())); } } List<Group> groups = (List<Group>) realm.copyFromRealm(query.findAllSorted(sortField, sortOrder)); subscriber.onNext(groups); realm.close(); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static <T> Observable loadListFromDB(final Class<? extends RealmObject> model, Scheduler observingThread) { Observable<List<RealmObject>> observable = Observable.create(new ObservableOnSubscribe<List<RealmObject>>() { @Override public void subscribe(ObservableEmitter<List<RealmObject>> subscriber) { RealmList<RealmObject> objects = new RealmList<>(); Realm realm = Realm.getDefaultInstance(); objects.addAll(realm.copyFromRealm(realm.where(model).findAll())); subscriber.onNext(objects); realm.close(); } }).subscribeOn(Schedulers.io()).observeOn(observingThread); return observable; } public static <T> Observable loadListFromDB(final Class<? extends RealmObject> model, final String pName, final boolean pValue, Scheduler returnThread) { return Observable.create(new ObservableOnSubscribe<List<RealmObject>>() { @Override public void subscribe(final ObservableEmitter<List<RealmObject>> subscriber) { // System.out.println("load list " + Thread.currentThread().getName()); final Realm realm = Realm.getDefaultInstance(); List<RealmObject> realmResults = (List<RealmObject>) realm.copyFromRealm( (realm.where(model).equalTo(pName, pValue).findAll())); realm.close(); subscriber.onNext(realmResults); } }).subscribeOn(Schedulers.io()).observeOn(returnThread); } public static Observable loadListFromDB(final Class<? extends RealmObject> model, final Map<String, Object> map) { return Observable.create(new ObservableOnSubscribe<List<RealmObject>>() { @Override public void subscribe(final ObservableEmitter<List<RealmObject>> subscriber) { RealmList<RealmObject> objects = loadListFromDBInline(model, map); subscriber.onNext(objects); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static <T extends RealmList> T loadListFromDBInline(final Class<? extends RealmObject> model, final Map<String, Object> map) { if (Looper.myLooper() == Looper.getMainLooper()) { throw new UnsupportedOperationException("Error! Calling inline DB query on main thread"); } RealmList<RealmObject> objects = new RealmList<>(); Realm realm = Realm.getDefaultInstance(); RealmQuery<? extends RealmObject> query = realm.where(model); for (Map.Entry<String, Object> entry : map.entrySet()) { if (entry.getValue() instanceof String) { query.equalTo(entry.getKey(), entry.getValue().toString()); } else { query.equalTo(entry.getKey(), Boolean.valueOf(entry.getValue().toString())); } } objects.addAll(realm.copyFromRealm(query.findAll())); realm.close(); return (T) objects; } public static void removeObjectFromDatabase(Class<? extends RealmObject> clazz, String pName, String pValue) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); RealmObject object = realm.where(clazz).equalTo(pName, pValue).findFirst(); if (object != null) object.deleteFromRealm(); realm.commitTransaction(); realm.close(); } public static void removeObjectsFromDatabase(Class<? extends RealmObject> clazz, Map<String, Object> map) { Realm realm = Realm.getDefaultInstance(); RealmQuery<? extends RealmObject> query = realm.where(clazz); for (Map.Entry<String, Object> entry : map.entrySet()) { if (entry.getValue() instanceof String) { query.equalTo(entry.getKey(), entry.getValue().toString()); } else { query.equalTo(entry.getKey(), Boolean.valueOf(entry.getValue().toString())); } } realm.beginTransaction(); if (query.findAll().size() > 0) query.findAll().deleteAllFromRealm(); realm.commitTransaction(); realm.close(); } public static void removeObjectsByUid(Class<? extends RealmObject> clazz, final String pName, List<String> pValues) { if (pValues != null && pValues.size() > 0) { Realm realm = Realm.getDefaultInstance(); final int size = pValues.size(); RealmQuery<? extends RealmObject> query = realm.where(clazz).equalTo(pName, pValues.get(0)); for (int i = 1; i < size; i++) { query = query.or().equalTo(pName, pValues.get(i)); } final RealmResults<? extends RealmObject> results = query.findAll(); realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { results.deleteAllFromRealm(); } }); realm.close(); } } public static <T extends RealmObject> T loadObjectFromDB(Class<? extends RealmObject> model, String pName, String pValue) { RealmList<RealmObject> objects = new RealmList<>(); Realm realm = Realm.getDefaultInstance(); objects.addAll(realm.copyFromRealm(realm.where(model).equalTo(pName, pValue).findAll())); realm.close(); if (!objects.isEmpty()) { return (T) objects.get(0); } else { return null; } } // only call this from background thread public static long countListInDB(final Class<? extends RealmObject> model, final Map<String, Object> map) { if (Looper.myLooper() == Looper.getMainLooper()) { throw new UnsupportedOperationException("Error! Calling inline DB query on main thread"); } Realm realm = Realm.getDefaultInstance(); RealmQuery<? extends RealmObject> query = realm.where(model); for (Map.Entry<String, Object> entry : map.entrySet()) { if (entry.getValue() instanceof String) { query.equalTo(entry.getKey(), entry.getValue().toString()); } else if (entry.getValue() != null && entry.getValue() instanceof Boolean) { query.equalTo(entry.getKey(), Boolean.valueOf(entry.getValue().toString())); } } long count = query.count(); realm.close(); return count; } /* SECTION : methods for specific entity fetch, count, removal */ public static PreferenceObject loadPreferencesFromDB() { Realm realm = Realm.getDefaultInstance(); List<PreferenceObject> object = realm.copyFromRealm(realm.where(PreferenceObject.class).findAll()); realm.close(); return object.size() > 0 ? object.get(0) : new PreferenceObject(); } public static Group loadGroupFromDB(final String groupUid) { return loadObjectFromDB(Group.class, "groupUid", groupUid); } public static long countGroupsInDB() { Realm realm = Realm.getDefaultInstance(); long count = realm .where(Group.class) .count(); realm.close(); return count; } public static boolean groupExistsInDB(final String groupUid) { Realm realm = Realm.getDefaultInstance(); long count = realm.where(Group.class).equalTo("groupUid", groupUid).count(); realm.close(); return count > 0; } public static long countGroupMembers(final String groupUid) { Realm realm = Realm.getDefaultInstance(); long count = realm .where(Member.class) .equalTo("groupUid", groupUid) .count(); realm.close(); return count; } public static Observable<List<Member>> loadGroupMembers(final String groupUid, final boolean includeUser) { return Observable.create(new ObservableOnSubscribe<List<Member>>() { @Override public void subscribe(ObservableEmitter<List<Member>> subscriber) { RealmList<Member> members = new RealmList<>(); final Realm realm = Realm.getDefaultInstance(); final String userMsisdn = realm.where(PreferenceObject.class) .findAll().get(0).getMobileNumber(); Log.d(TAG, "REALM: total number of members in DB ... " + realm.where(Member.class).count()); RealmQuery<Member> query; if (includeUser) { query = realm .where(Member.class) .equalTo("groupUid", groupUid) .equalTo("phoneNumber", userMsisdn); if (query.count() != 0) { members.add(realm.copyFromRealm(query.findAll().first())); } } query = realm .where(Member.class) .equalTo("groupUid", groupUid) .notEqualTo("phoneNumber", userMsisdn); members.addAll(realm.copyFromRealm(query.findAllSorted("displayName"))); subscriber.onNext(members); realm.close(); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static long countUpcomingTasksInDB() { Realm realm = Realm.getDefaultInstance(); long count = realm .where(TaskModel.class) .greaterThan("deadlineDate", new Date()) .count(); realm.close(); return count; } public static long countGroupTasksInDB(final String parentUid) { Realm realm = Realm.getDefaultInstance(); long count = realm .where(TaskModel.class) .equalTo("parentUid", parentUid) .count(); realm.close(); return count; } public static Observable<List<TaskModel>> loadUpcomingTasks() { return Observable.create(new ObservableOnSubscribe<List<TaskModel>>() { @Override public void subscribe(ObservableEmitter<List<TaskModel>> subscriber) { RealmList<TaskModel> tasks = new RealmList<>(); Realm realm = Realm.getDefaultInstance(); RealmResults<TaskModel> results = realm .where(TaskModel.class) .greaterThan("deadlineDate", new Date()) .findAll() .sort("deadlineDate", Sort.DESCENDING); tasks.addAll(realm.copyFromRealm(results)); realm.close(); subscriber.onNext(tasks); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static Observable<List<TaskModel>> loadTasksSorted(final String parentUid) { return Observable.create(new ObservableOnSubscribe<List<TaskModel>>() { @Override public void subscribe(ObservableEmitter<List<TaskModel>> subscriber) { RealmList<TaskModel> tasks = new RealmList<>(); final Realm realm = Realm.getDefaultInstance(); RealmResults<TaskModel> results = realm .where(TaskModel.class) .equalTo("parentUid", parentUid) .findAllSorted("deadlineDate", Sort.DESCENDING); tasks.addAll(realm.copyFromRealm(results)); subscriber.onNext(tasks); realm.close(); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static long countObjectsInDB(Class<? extends RealmObject> clazz) { Realm realm = Realm.getDefaultInstance(); RealmQuery<? extends RealmObject> query = realm.where(clazz); long count = query.count(); realm.close(); return count; } public static Observable<List<Member>> loadMembersSortedInvalid(final String groupUid) { return Observable.create(new ObservableOnSubscribe<List<Member>>() { @Override public void subscribe(ObservableEmitter<List<Member>> subscriber) { RealmList<Member> members = new RealmList<>(); final Realm realm = Realm.getDefaultInstance(); List<PreferenceObject> preferences = realm .where(PreferenceObject.class).findAll(); final String userMsisdn = preferences.get(0).getMobileNumber(); RealmResults<Member> results = realm .where(Member.class) .equalTo("groupUid", groupUid) .notEqualTo("phoneNumber", userMsisdn) .findAllSorted("isNumberInvalid", Sort.DESCENDING); members.addAll(realm.copyFromRealm(results)); subscriber.onNext(members); realm.close(); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static Observable saveNotificationsToRealm(final List<TaskNotification> notifications) { return Observable.create(new ObservableOnSubscribe<Boolean>() { @Override public void subscribe(ObservableEmitter<Boolean> subscriber) { Realm realm = Realm.getDefaultInstance(); realm.beginTransaction(); realm.copyToRealmOrUpdate(notifications); realm.commitTransaction(); final List<TaskNotification> savedNotifications = RealmUtils.loadNotificationsSorted(); if(savedNotifications.size()>100){ Log.d(TAG,"Saved objects of size " + String.valueOf(savedNotifications.size())); RealmUtils.saveDataToRealm(savedNotifications.subList(0,100), null) .subscribe(new Consumer<Boolean>() { @Override public void accept(Boolean b) { Log.d(TAG,"Deleting objects of size " + String.valueOf(savedNotifications.size() - 100)); for(TaskNotification notification : savedNotifications.subList(100,savedNotifications.size())){ Log.d(TAG,"Deleting objects " + notification.getMessage()); RealmUtils.removeObjectFromDatabase(TaskNotification.class,"uid",notification.getUid()); } } }); } realm.close(); subscriber.onNext(true); } }).subscribeOn(Schedulers.io()).observeOn(AndroidSchedulers.mainThread()); } public static List<TaskNotification> loadNotificationsSorted() { final Realm realm = Realm.getDefaultInstance(); List<TaskNotification> notifications = realm.copyFromRealm( realm.where(TaskNotification.class).findAllSorted("createdDateTime", Sort.DESCENDING)); realm.close(); return notifications; } public static void persistFullListJoinRequests(@NonNull final List<GroupJoinRequest> requests) { if (Looper.myLooper() == Looper.getMainLooper()) { Log.e(TAG, "Error! Calling inline DB query on main thread"); return; } final Realm realm = Realm.getDefaultInstance(); if (realm != null && !realm.isClosed()) { realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { if (!requests.isEmpty()) { realm.copyToRealmOrUpdate(requests); final int size = requests.size(); final String[] presentUids = new String[size]; for (int i = 0; i < size; i++) { presentUids[i] = requests.get(i).getRequestUid(); } realm.where(GroupJoinRequest.class) .not() .beginGroup() .in("requestUid", presentUids) .endGroup() .findAll() .deleteAllFromRealm(); } else { realm.where(GroupJoinRequest.class) .findAll() .deleteAllFromRealm(); } } }); realm.close(); } } /* HELPER METHODS TO DEAL WITH & HANDLE STRINGS */ private static RealmString[] createRealmStringArrayFromStringArray(String[] array) { RealmString[] realmStrings = null; if (array != null) { realmStrings = new RealmString[array.length]; for (int i = 0; i < array.length; i++) { realmStrings[i] = new RealmString(array[i]); } } return realmStrings; } public static RealmList<RealmString> convertListOfStringInRealmListOfString(List<String> list) { if (list != null) { String[] arrayOfStrings = new String[list.size()]; list.toArray(arrayOfStrings); RealmString[] arrayOfRealmStrings = createRealmStringArrayFromStringArray(arrayOfStrings); return new RealmList<>(arrayOfRealmStrings); } return null; } }
import com.veling.io.*; import java.io.*; import java.util.*; public class Lambda { //statics public static final int NOP = 0; public static final int READ = 1; public static final int LOAD = 2; public static final int ADD = 3; public static final int PRINT = 4; public static final int PUTCHAR = 5; public static final int GETCHAR = 6; public static final int SIGNOF = 7; public static final int INSTRUCTION = 32; public static final int DATA = 34; public static final int RESULT = 36; public static final int VALUE = 255; public static void main (String[] args) { Argument argument = new Argument(); ArrayList<Argument> arguments = new ArrayList<Argument>(1); if (args.length == 0) { printUsage(); } else { for (int i=0; i < args.length; i++) { String arg = args[i]; int argLength = arg.length(); if (arg.charAt(0) == '-' && argLength >= 2) { if (arg.equals(("-raw-mode").substring(0, argLength))) { argument.raw = true; } else if (arg.equals(("-error-messages").substring(0, argLength))) { argument.error = true; } else if (arg.equals(("-debug-messages").substring(0, argLength))) { argument.debug = true; } else if (arg.equals(("-compile-only").substring(0, argLength))) { argument.interpret = false; } else if (arg.equals(("-output").substring(0, argLength))) { argument.output = args[++i]; } } else { argument.input = arg; if (argument.complete()) { arguments.add(argument); } else { System.out.println("Error: could not find input file"); arguments = new ArrayList<Argument>(); break; } } } } if (arguments.size() == 0) { printUsage(); } else { for (int i=0; i < arguments.size(); i++) { argument = arguments.get(i); String compiled = argument.output + ".L"; compile(argument, compiled); load(argument, compiled); go(argument); } } } public static void printUsage() { System.out.println("Usages: \n" + " lambda [file...]\n" + " lambda -code [code]\n" + " lambda -r\n" + " Lambda supports the following flags:\n" + " -r Raw mode: Read raw code from stdin\n" + " -e Enable error messages\n" + " -d Enable debug mode\n" + " -c Compile only: Dont interpret\n" + " -o [file] Change output file for current file" ); } public static void compile(Argument argument, String compiledfn) { try { FastInputStream in = new FastInputStream(new FileInputStream(argument.input)); HashMap<String, Integer> labels = new HashMap<String, Integer>(100); labels.put("nop",new Integer(NOP)); labels.put("read",new Integer(READ)); labels.put("load",new Integer(LOAD)); labels.put("add",new Integer(ADD)); labels.put("print",new Integer(PRINT)); labels.put("putchar", new Integer(PUTCHAR)); labels.put("getchar", new Integer(GETCHAR)); labels.put("signof", new Integer(SIGNOF)); labels.put("instruction",new Integer(INSTRUCTION)); labels.put("data",new Integer(DATA)); labels.put("result",new Integer(RESULT)); //put all tokens in a list ArrayList<Object> tokens = new ArrayList<Object>(1000); String line,token; StringTokenizer tokenizer; Integer value; int lastlabel = -1; try { while (!in.eof) { line = in.readLine(); //skip after comment int commentidx = line.indexOf("//"); if (commentidx>=0) { if (commentidx==0) continue; line = line.substring(0,commentidx-1); } if (line.length()==0) continue; if (argument.debug) { System.out.println("tokenizing "+(256+tokens.size())+" ["+line+"]"); } tokenizer = new StringTokenizer(line," ()",false); while (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); if (token.endsWith(":")) { //is a label token = token.substring(0,token.length()-1); lastlabel = tokens.size(); labels.put(token,new Integer(256+lastlabel)); //put next address } else { //check for integer literals try { value = Integer.decode(token); //see if is the first after a label (a variable init) tokens.add(value); tokens.add(null); //VALUE tag in postfix //if (lastlabel!=tokens.size()) { //} } catch (NumberFormatException e) { //probably a label anyway //check for value if (token.endsWith("!")) { token = token.substring(0,token.length()-1); tokens.add(token); tokens.add(null); } else { tokens.add(token); } } lastlabel = -1; } } } } catch (EOFException e) {} in.close(); System.out.println("compiling"); FastOutputStream out = new FastOutputStream(new FileOutputStream(compiledfn)); Integer address; Object o; for (int i=0; i<tokens.size(); i++) { o = tokens.get(i); if (o==null) { //add value out.writeInt(VALUE); } else if (o instanceof String) { token = (String) o; //first lookup label address = (Integer) labels.get(token); if (address!=null) { //recognized label address out.writeInt(address.intValue()); } else { //invalid if (argument.error) { System.out.println("unknown label "+token); } out.close(); return; } } else if (o instanceof Integer) { value = (Integer) o; out.writeInt(value.intValue()); } else { //error if (argument.error) { System.out.println("Unknown token type found: "+o); } out.close(); return; } } out.close(); System.out.println("compiled"); } catch (IOException e) { if (argument.error) { System.out.println("catched "+e+" with message "+e.getMessage()); } } } protected static int[] memory = new int[1000]; protected static int memorysize; protected static void load(Argument argument, String fn) { //clear memory Arrays.fill(memory,0); memorysize = 256; try { FastInputStream in = new FastInputStream(new FileInputStream(fn)); try { while (!in.eof) { if (memorysize>=memory.length) { int[] newmem = new int[2*memorysize]; System.arraycopy(memory,0,newmem,0,memory.length); memory = newmem; } memory[memorysize++] = in.readInt(); } } catch (EOFException e) {} in.close(); //reset pointers memory[INSTRUCTION] = 256; memory[INSTRUCTION+1] = VALUE; memory[DATA+1] = VALUE; memory[RESULT+1] = VALUE; memory[RESULT+2] = LOAD; memory[RESULT+3] = INSTRUCTION; memory[RESULT+4] = VALUE; memory[RESULT+5] = 0; memory[RESULT+6] = VALUE; //exit if (argument.debug) { System.out.println("read "+(memorysize-256)+" tokens"); } } catch (IOException e) { if (argument.error) { System.out.println("catched "+e+" with message "+e.getMessage()); } } } protected static void go(Argument argument) { System.out.println("executing"); StringBuffer buf = new StringBuffer(30); while ((memory[INSTRUCTION]>0) && (memory[INSTRUCTION]<memorysize)) { buf.setLength(0); buf.append(memory[INSTRUCTION]+":"); interpret(argument, buf); if (argument.debug) { System.out.println(buf); } } if (memory[INSTRUCTION]>=memorysize) { System.out.println("INFINITY; ready."); } else { System.out.println("ready."); } } protected static int get(int idx) { if ((idx>=0) && (idx<memorysize)) { return memory[idx]; } else { return 0; } } protected static void put(int idx, int value) { if ((idx>=0) && (idx<memorysize)) { memory[idx] = value; } else { throw new RuntimeException("invalid memory address "+idx+"; cannot put "+value); } } protected static void interpret(Argument argument, StringBuffer buf) { int opcode = get(memory[INSTRUCTION]); //lookahead if (get(memory[INSTRUCTION]+1)==VALUE) { //so this one is a value after all memory[RESULT] = opcode; buf.append(" "+memory[RESULT]); memory[INSTRUCTION]+=2; return; } //if i get here, then is not a value but operand/address int address, value; switch (opcode) { case NOP: memory[INSTRUCTION]++; buf.append(" nop"); break; case READ: //evaluate next and interpret result as mem address //give back content of that address memory[INSTRUCTION]++; buf.append(" (read"); interpret(argument, buf); buf.append(")"); memory[RESULT] = get(memory[RESULT]); break; case LOAD: //evaluate two parameters and interpret first one as //address, second as value to load in it memory[INSTRUCTION]++; buf.append(" load ("); interpret(argument, buf); buf.append(") ("); address = memory[RESULT]; interpret(argument, buf); buf.append(")"); value = memory[RESULT]; //System.out.println("load buffer: "+buf); put(address,value); if (argument.debug) { System.out.println("["+address+"]<--"+value); } break; case ADD: //evaluate next two, interpret first as address, 2nd as value //add 2nd to value at 1st address and give back as result as well memory[INSTRUCTION]++; buf.append(" add ("); interpret(argument, buf); buf.append(") ("); address = memory[RESULT]; interpret(argument, buf); buf.append(")"); value = memory[RESULT]; put(address,get(address)+value); memory[RESULT] = get(address); if (argument.debug) { System.out.println("["+address+"]+="+value); } break; case PRINT: //evaluate what's next and print that value memory[INSTRUCTION]++; buf.append(" print"); interpret(argument, buf); if (argument.debug) { System.out.println("-->"+memory[RESULT]); } else { System.out.println(memory[RESULT]); } break; case PUTCHAR: //evaluate what's next and print that value as a character memory[INSTRUCTION]++; buf.append(" putchar"); interpret(argument, buf); value = memory[RESULT]; if (argument.debug) { System.out.println("-->'"+Character.toString((char)value)+"'"); } else { System.out.print(Character.toString((char)value)); } break; case GETCHAR: //get a character from input and put it in the adress given with argument one memory[INSTRUCTION]++; buf.append(" getchar"); interpret(argument, buf); address = memory[RESULT]; try { do { value = System.in.read(); } while (value == 13); put(address, value); //System.out.println("getchar buffer: "+buf); //evaluate what's next and print that value if (argument.debug) { System.out.println("["+address+"]<--" + value); } } catch (IOException e) { if (argument.error) { System.out.println("catched "+e+" with message "+e.getMessage()); } } break; case SIGNOF: //evalute next and give 1 as result if its not zero memory[INSTRUCTION]++; buf.append(" signof"); interpret(argument, buf); value = memory[RESULT]; memory[RESULT] = (int)Math.signum(value); if (argument.debug) { System.out.println("["+RESULT+"]="+"signof("+value+")"); } break; default: if (get(opcode+1)==VALUE) { //reference to variable value; do directly buf.append(" read "+opcode); memory[RESULT] = get(opcode); memory[INSTRUCTION]++; } else { buf.append(" jump "+opcode); //this is a goto memory[DATA] = memory[INSTRUCTION] + 1; //memory[RESULT] = get(opcode); memory[INSTRUCTION] = opcode; buf.append(" ("); interpret(argument, buf); buf.append(")"); } break; } } }
/** *============================================================================ * The Ohio State University Research Foundation, Emory University, * the University of Minnesota Supercomputing Institute * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-grid-incubation/LICENSE.txt for details. *============================================================================ **/ /** *============================================================================ *============================================================================ **/ package org.cagrid.workflow.helper.service.globus.resource; import gov.nih.nci.cagrid.common.Utils; import gov.nih.nci.cagrid.advertisement.AdvertisementClient; import gov.nih.nci.cagrid.advertisement.exceptions.UnregistrationException; import org.cagrid.workflow.helper.common.WorkflowHelperConstants; import org.cagrid.workflow.helper.stubs.WorkflowHelperResourceProperties; import org.cagrid.workflow.helper.service.WorkflowHelperConfiguration; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.URL; import java.util.Calendar; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.naming.Context; import javax.naming.InitialContext; import javax.xml.namespace.QName; import gov.nih.nci.cagrid.introduce.servicetools.FilePersistenceHelper; import gov.nih.nci.cagrid.introduce.servicetools.PersistenceHelper; import gov.nih.nci.cagrid.introduce.servicetools.ReflectionResource; import org.apache.axis.MessageContext; import org.apache.axis.message.MessageElement; import org.apache.axis.message.addressing.EndpointReferenceType; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.globus.mds.aggregator.types.AggregatorConfig; import org.globus.mds.aggregator.types.AggregatorContent; import org.globus.mds.aggregator.types.GetMultipleResourcePropertiesPollType; import org.globus.mds.servicegroup.client.ServiceGroupRegistrationParameters; import org.globus.wsrf.InvalidResourceKeyException; import org.globus.wsrf.NoSuchResourceException; import org.globus.wsrf.Constants; import org.globus.wsrf.Resource; import org.globus.wsrf.ResourceException; import org.globus.wsrf.RemoveCallback; import org.globus.wsrf.PersistenceCallback; import org.globus.wsrf.ResourceContext; import org.globus.wsrf.ResourceException; import org.globus.wsrf.ResourceContextException; import org.globus.wsrf.ResourceIdentifier; import org.globus.wsrf.ResourceKey; import org.globus.wsrf.ResourceLifetime; import org.globus.wsrf.ResourceProperties; import org.globus.wsrf.ResourceProperty; import org.globus.wsrf.ResourcePropertySet; import org.globus.wsrf.config.ContainerConfig; import org.globus.wsrf.container.ServiceHost; import org.globus.wsrf.encoding.DeserializationException; import org.globus.wsrf.encoding.ObjectDeserializer; import org.globus.wsrf.impl.ReflectionResourceProperty; import org.globus.wsrf.impl.SimpleResourceProperty; import org.globus.wsrf.impl.SimpleResourcePropertyMetaData; import org.globus.wsrf.impl.SimpleResourcePropertySet; import org.globus.wsrf.impl.security.descriptor.ResourceSecurityDescriptor; import org.globus.wsrf.impl.servicegroup.client.ServiceGroupRegistrationClient; import org.globus.wsrf.jndi.Initializable; import org.globus.wsrf.security.SecureResource; import org.globus.wsrf.utils.AddressingUtils; import org.globus.wsrf.Topic; import org.globus.wsrf.TopicList; import org.globus.wsrf.TopicListAccessor; import org.globus.wsrf.utils.SubscriptionPersistenceUtils; import org.globus.wsrf.impl.ResourcePropertyTopic; import org.globus.wsrf.impl.SimpleTopicList; import org.oasis.wsrf.lifetime.TerminationNotification; /** * DO NOT EDIT: This class is autogenerated! * * This class is the base class of the resource type created for this service. * It contains accessor and utility methods for managing any resource properties * of these resource as well as code for registering any properties selected * to the index service. * * @created by Introduce Toolkit version 1.2 * */ public abstract class WorkflowHelperResourceBase extends ReflectionResource implements Resource { static final Log logger = LogFactory.getLog(WorkflowHelperResourceBase.class); private WorkflowHelperResourceConfiguration configuration; // this can be used to cancel the registration renewal private AdvertisementClient registrationClient; private URL baseURL; private boolean beingLoaded = false; public WorkflowHelperResourceBase() { } /** * @see org.globus.wsrf.jndi.Initializable#initialize() */ public void initialize(Object resourceBean, QName resourceElementQName, Object id) throws ResourceException { // Call the super initialize on the ReflectionResource super.initialize(resourceBean,resourceElementQName,id); // this loads the metadata from XML files if this is the main service populateResourceProperties(); // register the service to the index service refreshRegistration(true); } public WorkflowHelperResourceConfiguration getConfiguration() { if (this.configuration != null) { return this.configuration; } MessageContext ctx = MessageContext.getCurrentContext(); String servicePath = ctx.getTargetService(); servicePath = servicePath.substring(0,servicePath.lastIndexOf("/")); servicePath+="/WorkflowHelper"; String jndiName = Constants.JNDI_SERVICES_BASE_NAME + servicePath + "/configuration"; logger.debug("Will read configuration from jndi name: " + jndiName); try { Context initialContext = new InitialContext(); this.configuration = (WorkflowHelperResourceConfiguration) initialContext.lookup(jndiName); } catch (Exception e) { logger.error("when performing JNDI lookup for " + jndiName + ": " + e, e); } return this.configuration; } /** * This checks the configuration file, and attempts to register to the * IndexService if shouldPerformRegistration==true. It will first read the * current container URL, and compare it against the saved value. If the * value exists, it will only try to reregister if the values are different. * This exists to handle fixing the registration URL which may be incorrect * during initialization, then later corrected during invocation. The * existence of baseURL does not imply successful registration (a non-null * registrationClient does). We will only attempt to reregister when the URL * changes (to prevent attempting registration with each invocation if there * is a configuration problem). */ public void refreshRegistration(boolean forceRefresh) { if (getConfiguration().shouldPerformRegistration()) { // first check to see if there are any resource properties that // require registration ResourceContext ctx; try { MessageContext msgContext = MessageContext.getCurrentContext(); if (msgContext == null) { logger.error("Unable to determine message context!"); return; } ctx = ResourceContext.getResourceContext(msgContext); } catch (ResourceContextException e) { logger.error("Could not get ResourceContext: " + e, e); return; } EndpointReferenceType epr; try { // since this is a singleton, pretty sure we dont't want to // register the key (allows multiple instances of same service // on successive restarts) epr = AddressingUtils.createEndpointReference(ctx, null); } catch (Exception e) { logger.error("Could not form EPR: " + e, e); return; } ServiceGroupRegistrationParameters params = null; File registrationFile = new File(ContainerConfig.getBaseDirectory() + File.separator + getConfiguration().getRegistrationTemplateFile()); if (registrationFile.exists() && registrationFile.canRead()) { logger.debug("Loading registration argumentsrmation from:" + registrationFile); try { params = ServiceGroupRegistrationClient.readParams(registrationFile.getAbsolutePath()); } catch (Exception e) { logger.error("Unable to read registration file:" + registrationFile, e); } // set our service's EPR as the registrant, or use the specified // value EndpointReferenceType registrantEpr = params.getRegistrantEPR(); if (registrantEpr == null) { params.setRegistrantEPR(epr); } } else { logger.error("Unable to read registration file:" + registrationFile); } if (params != null) { AggregatorContent content = (AggregatorContent) params.getContent(); AggregatorConfig config = content.getAggregatorConfig(); MessageElement[] elements = config.get_any(); GetMultipleResourcePropertiesPollType pollType = null; try { pollType = (GetMultipleResourcePropertiesPollType) ObjectDeserializer.toObject(elements[0], GetMultipleResourcePropertiesPollType.class); } catch (DeserializationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (pollType != null) { // if there are properties names that need to be registered then // register them to the index service if (pollType.getResourcePropertyNames()!=null && pollType.getResourcePropertyNames().length != 0) { URL currentContainerURL = null; try { currentContainerURL = ServiceHost.getBaseURL(); } catch (IOException e) { logger.error("Unable to determine container's URL! Skipping registration.", e); return; } if (this.baseURL != null) { // we've tried to register before (or we are being // forced to // retry) // do a string comparison as we don't want to do DNS // lookups // for comparison if (forceRefresh || !this.baseURL.equals(currentContainerURL)) { // we've tried to register before, and we have a // different // URL now.. so cancel the old registration (if // it // exists), // and try to redo it. if (registrationClient != null) { try { this.registrationClient.unregister(); } catch (UnregistrationException e) { logger .error("Problem unregistering existing registration:" + e.getMessage(), e); } } // save the new value this.baseURL = currentContainerURL; logger.info("Refreshing existing registration [container URL=" + this.baseURL + "]."); } else { // URLs are the same (and we weren't forced), so // don't // try // to reregister return; } } else { // we've never saved the baseURL (and therefore // haven't // tried to // register) this.baseURL = currentContainerURL; logger.info("Attempting registration for the first time[container URL=" + this.baseURL + "]."); } try { // perform the registration for this service this.registrationClient = new AdvertisementClient(params); this.registrationClient.register(); } catch (Exception e) { logger.error("Exception when trying to register service (" + epr + "): " + e, e); } } else { logger.info("No resource properties to register for service (" + epr + ")"); } } else { logger.warn("Registration file deserialized with no poll type (" + epr + ")"); } } else { logger.warn("Registration file deserialized with returned null SeviceGroupParams"); } } else { logger.info("Skipping registration."); } } private void populateResourceProperties() { } }
/******************************************************************************* * Copyright (c) 2000, 2011 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.corext.refactoring.typeconstraints.types; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.eclipse.core.runtime.Assert; import org.eclipse.jdt.core.dom.ITypeBinding; /** * TTypes are lightweight fully-resolved type objects that stand for {@link ITypeBinding}s. * TTypes can answer basic questions about the relationship between types. * * They do not hold references to their corresponding {@link ITypeBinding}s, and they * don't carry any information about members of a type. * * @see TypeEnvironment * @see TType#canAssignTo(TType) * @see HierarchyType#isSubType(HierarchyType) * @see TType#getSubTypes() */ public abstract class TType { public static final int NULL_TYPE= 1; public static final int VOID_TYPE= 2; public static final int PRIMITIVE_TYPE= 3; public static final int ARRAY_TYPE= 4; public static final int STANDARD_TYPE= 5; public static final int GENERIC_TYPE= 6; public static final int PARAMETERIZED_TYPE= 7; public static final int RAW_TYPE= 8; public static final int UNBOUND_WILDCARD_TYPE= 9; public static final int SUPER_WILDCARD_TYPE= 10; public static final int EXTENDS_WILDCARD_TYPE= 11; public static final int TYPE_VARIABLE= 12; public static final int CAPTURE_TYPE= 13; protected static final int WILDCARD_TYPE_SHIFT= 3; protected static final int ARRAY_TYPE_SHIFT= 5; private static final int F_IS_CLASS= 1 << 0; private static final int F_IS_INTERFACE= 1 << 1; private static final int F_IS_ENUM= 1 << 2; private static final int F_IS_ANNOTATION= 1 << 3; private static final int F_IS_TOP_LEVEL= 1 << 4; private static final int F_IS_NESTED= 1 << 5; private static final int F_IS_MEMBER= 1 << 6; private static final int F_IS_LOCAL= 1 << 7; private static final int F_IS_ANONYMOUS= 1 << 8; protected static final TType[] EMPTY_TYPE_ARRAY= new TType[0]; private TypeEnvironment fEnvironment; private String fBindingKey; private int fModifiers; private int fFlags; /** * Creates a new type with the given environment as an owner. * The type environment <em>must</em> call {@link #initialize(ITypeBinding)} after using this constructor. * * @param environment owner */ protected TType(TypeEnvironment environment) { fEnvironment= environment; } /** * Creates a new type with the given environment as an owner. * The type environment must <em>not</em> call {@link #initialize(ITypeBinding)} after using this constructor. * * @param environment owner * @param key this type's binding key */ protected TType(TypeEnvironment environment, String key) { this(environment); Assert.isNotNull(key); fBindingKey= key; } /** * Initialized the type from the given binding * * @param binding the binding to initialize from */ protected void initialize(ITypeBinding binding) { fBindingKey= binding.getKey(); Assert.isNotNull(fBindingKey); fModifiers= binding.getModifiers(); if (binding.isClass()) { fFlags= F_IS_CLASS; // the annotation test has to be done before test for interface // since annotations are interfaces as well. } else if (binding.isAnnotation()) { fFlags= F_IS_ANNOTATION | F_IS_INTERFACE; } else if (binding.isInterface()) { fFlags= F_IS_INTERFACE; } else if (binding.isEnum()) { fFlags= F_IS_ENUM; } if (binding.isTopLevel()) { fFlags|= F_IS_TOP_LEVEL; } else if (binding.isNested()) { fFlags|= F_IS_NESTED; if (binding.isMember()) { fFlags|= F_IS_MEMBER; } else if (binding.isLocal()) { fFlags|= F_IS_LOCAL; } else if (binding.isAnonymous()) { fFlags|= F_IS_ANONYMOUS; } } } /** * Returns the type's environment * * @return the types's environment */ public TypeEnvironment getEnvironment() { return fEnvironment; } /** * Returns the key of the binding from which this type * got constructed. * * @return the binding key */ public String getBindingKey() { return fBindingKey; } /** * Returns the modifiers for this type. * * @return the bit-wise or of <code>Modifier</code> constants * @see org.eclipse.jdt.core.dom.IBinding#getModifiers() * @see org.eclipse.jdt.core.dom.Modifier */ public int getModifiers() { return fModifiers; } /** * Returns the element kind * * @return the element kind. */ public abstract int getKind(); /** * Returns whether this type represents <code>java.lang.Object</code> or * not. * * @return whether this type is <code>java.lang.Object</code> or not */ public boolean isJavaLangObject() { return false; } /** * Returns whether this type represents <code>java.lang.Cloneable</code> * or not. * * @return whether this type is <code>java.lang.Cloneable</code> or not */ public boolean isJavaLangCloneable() { return false; } /** * Returns whether this type represents <code>java.io.Serializable</code> * or not. * * @return whether this type is <code>java.io.Serializable</code> or not */ public boolean isJavaIoSerializable() { return false; } /** * Returns <code>true</code> if the given type represents the null type. * Otherwise <code>false</code> is returned. * * @return whether this type is the null type or not */ public final boolean isNullType() { return getKind() == NULL_TYPE; } /** * Returns <code>true</code> if the given type represents the void type. * Otherwise <code>false</code> is returned. * * @return whether this type is the void type or not */ public final boolean isVoidType() { return getKind() == VOID_TYPE; } /** * Returns <code>true</code> if the given type represents a primitive type. * Otherwise <code>false</code> is returned. * * @return whether this type is a primitive type or not */ public final boolean isPrimitiveType() { return getKind() == PRIMITIVE_TYPE; } /** * Returns <code>true</code> if the given type represents an array type. * Otherwise <code>false</code> is returned. * * @return whether this type is an array type or not */ public final boolean isArrayType() { return getKind() == ARRAY_TYPE; } /** * Returns <code>true</code> if the given type represents a hierarchy type. * Otherwise <code>false</code> is returned. * * @return whether this type is a hierarchy type or not */ public final boolean isHierarchyType() { int elementType= getKind(); return elementType == RAW_TYPE || elementType == PARAMETERIZED_TYPE || elementType == GENERIC_TYPE || elementType == STANDARD_TYPE; } /** * Returns <code>true</code> if the given type represents a standard type. * Otherwise <code>false</code> is returned. * * @return whether this type is a standard type or not */ public final boolean isStandardType() { return getKind() == STANDARD_TYPE; } /** * Returns <code>true</code> if the given type represents a raw type. * Otherwise <code>false</code> is returned. * * @return whether this type is a raw type or not */ public final boolean isRawType() { return getKind() == RAW_TYPE; } /** * Returns <code>true</code> if the given type represents a parameterized type. * Otherwise <code>false</code> is returned. * * @return whether this type is a parameterized type or not */ public final boolean isParameterizedType() { return getKind() == PARAMETERIZED_TYPE; } /** * Returns <code>true</code> if the given type represents a generic type. * Otherwise <code>false</code> is returned. * * @return whether this type is a generic type or not */ public final boolean isGenericType() { return getKind() == GENERIC_TYPE; } /** * Returns <code>true</code> if the given type represents a type variable. * Otherwise <code>false</code> is returned. * * @return whether this type is a type variable or not */ public final boolean isTypeVariable() { return getKind() == TYPE_VARIABLE; } /** * Returns <code>true</code> if the given type represents a capture type. * Otherwise <code>false</code> is returned. * * @return whether this type is a capture type or not */ public final boolean isCaptureType() { return getKind() == CAPTURE_TYPE; } /** * Returns <code>true</code> if the given type represents a wildcard type. * Otherwise <code>false</code> is returned. * * @return whether this type is a wildcard type or not */ public final boolean isWildcardType() { int elementType= getKind(); return elementType == EXTENDS_WILDCARD_TYPE || elementType == UNBOUND_WILDCARD_TYPE || elementType == SUPER_WILDCARD_TYPE; } /** * Returns <code>true</code> if the given type represents a unbound wildcard type. * Otherwise <code>false</code> is returned. * * @return whether this type is a unbound wildcard type or not */ public final boolean isUnboundWildcardType() { return getKind() == UNBOUND_WILDCARD_TYPE; } /** * Returns <code>true</code> if the given type represents an extends wildcard type. * Otherwise <code>false</code> is returned. * * @return whether this type is an extends wildcard type or not */ public final boolean isExtendsWildcardType() { return getKind() == EXTENDS_WILDCARD_TYPE; } /** * Returns <code>true</code> if the given type represents a super wildcard type. * Otherwise <code>false</code> is returned. * * @return whether this type is a super wildcard type or not */ public final boolean isSuperWildcardType() { return getKind() == SUPER_WILDCARD_TYPE; } /** * Returns whether this type represents a class. * * @return whether this type represents a class * @see ITypeBinding#isClass() */ public final boolean isClass() { return (fFlags & F_IS_CLASS) != 0; } /** * Returns whether this type represents a interface. * * @return whether this type represents a interface * @see ITypeBinding#isInterface() */ public final boolean isInterface() { return (fFlags & F_IS_INTERFACE) != 0; } /** * Returns whether this type represents a enumeration. * * @return whether this type represents a enumeration * @see ITypeBinding#isEnum() */ public final boolean isEnum() { return (fFlags & F_IS_ENUM) != 0; } /** * Returns whether this type represents an annotation. * * @return whether this type represents an annotation * @see ITypeBinding#isAnnotation() */ public final boolean isAnnotation() { return (fFlags & F_IS_ANNOTATION) != 0; } /** * Returns whether this type represents a top level type. * * @return whether this type represents a top level type * @see ITypeBinding#isTopLevel() */ public final boolean isTopLevel() { return (fFlags & F_IS_TOP_LEVEL) != 0; } /** * Returns whether this type represents a nested type. * * @return whether this type represents a nested type * @see ITypeBinding#isNested() */ public final boolean isNested() { return (fFlags & F_IS_NESTED) != 0; } /** * Returns whether this type represents a member type. * * @return whether this type represents a member type * @see ITypeBinding#isMember() */ public final boolean isMember() { return (fFlags & F_IS_MEMBER) != 0; } /** * Returns whether this type represents a local type. * * @return whether this type represents a local type * @see ITypeBinding#isLocal() */ public final boolean isLocal() { return (fFlags & F_IS_LOCAL) != 0; } /** * Returns whether this type represents an anonymous type. * * @return whether this type represents an anonymous type * @see ITypeBinding#isAnonymous() */ public final boolean isAnonymous() { return (fFlags & F_IS_ANONYMOUS) != 0; } /** * Returns the super classes of this type or <code>null</code>. * * @return the super class of this type */ public TType getSuperclass() { return null; } /** * Returns the interfaces this type implements or extends. * * @return the "super" interfaces or an empty array */ public TType[] getInterfaces() { return EMPTY_TYPE_ARRAY; } public boolean isEqualTo(ITypeBinding binding) { if (binding == null) return false; return binding.getKey().equals(fBindingKey); } /** * {@inheritDoc} */ @Override public final boolean equals(Object other) { if (this == other) return true; if (!(other instanceof TType)) return false; TType otherType= (TType)other; if (getKind() != otherType.getKind()) return false; return doEquals(otherType); } @Override public abstract int hashCode(); /** * Performs the actual equals check. * * @param type The right hand side of the equals operation. The dynamic type * of the actual argument must be the same as the receiver type. * @return <code>true</code> iff this type is the same as the argument */ protected abstract boolean doEquals(TType type); /** * Returns the erasure of this type as defined by ITypeBinding#getErasure(). * * @return the erasure of this type */ public TType getErasure() { return this; } /** * Returns the type for the type declaration corresponding to this type. * * @return the type representing the declaration of this type * @see ITypeBinding#getTypeDeclaration() */ public TType getTypeDeclaration() { return this; } /** * @return direct subtypes of this type * @throws IllegalStateException if this type's TypeEnvironment * was not created with rememberSubtypes == true */ public TType[] getSubTypes() throws IllegalStateException { Map<TType, ArrayList<TType>> subTypes= fEnvironment.getSubTypes(); if (subTypes == null) throw new IllegalStateException("This TypeEnvironment does not remember subtypes"); //$NON-NLS-1$ List<TType> subtypes= subTypes.get(this); if (subtypes == null) return EMPTY_TYPE_ARRAY; else return subtypes.toArray(new TType[subtypes.size()]); } /** * Answer <code>true</code> if the receiver of this method can be assigned * to the argument lhs (e.g lhs= this is a valid assignment). * * @param lhs the left hand side of the assignment * @return whether or not this type can be assigned to lhs */ public final boolean canAssignTo(TType lhs) { if (this.isTypeEquivalentTo(lhs)) return true; return doCanAssignTo(lhs); } /** * Returns whether the receiver type is type equivalent to the other type. * This method considers the erasure for generic, raw and parameterized * types. * * @param other the other type * @return whether the receiver is type equivalent to other */ protected boolean isTypeEquivalentTo(TType other) { return this.equals(other); } /** * Checks whether the <code>this</code> left hand side type interpreted as * a type argument of a parameterized type is compatible with the given type * <code>rhs</code>. For example if * <code>List&lt;this&gt;= List&lt;rhs&gt;</code> is a valid assignment. * * @param rhs the right-hand-side type * @return <code>true</code> iff <code>this</code> contains <code>rhs</code> according to JLS3 4.5.1.1 */ protected boolean checkTypeArgument(TType rhs) { return this.equals(rhs); } /** * Hook method to perform the actual can assign test * * @param lhs the left hand side of the assignment * @return whether or not this type can be assigned to lhs */ protected abstract boolean doCanAssignTo(TType lhs); /** * Returns the name of this type as defined by {@link ITypeBinding#getName()}. * * @return the name of this type * @see ITypeBinding#getName() */ public abstract String getName(); /** * Returns a signature of this type which can be presented to the user. * * @return a pretty signature for this type */ public String getPrettySignature() { return getPlainPrettySignature(); } /** * Computes a plain pretty signature. For type with bounds (e.g * type variables and wildcards) the plain signature is different * than the full pretty signature. * * @return a plain pretty signature for this type */ protected abstract String getPlainPrettySignature(); /** * {@inheritDoc} */ @Override public String toString() { return getPrettySignature(); } }
/* * Copyright (C) 2017 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.primitives; import static com.google.common.testing.SerializableTester.reserialize; import static com.google.common.truth.Truth.assertThat; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.ImmutableList; import com.google.common.collect.ObjectArrays; import com.google.common.collect.testing.ListTestSuiteBuilder; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.TestListGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.testing.EqualsTester; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.LongStream; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** @author Kevin Bourrillion */ @GwtCompatible(emulated = true) public class ImmutableLongArrayTest extends TestCase { // Test all creation paths very lazily: by assuming asList() works public void testOf0() { assertThat(ImmutableLongArray.of().asList()).isEmpty(); } public void testOf1() { assertThat(ImmutableLongArray.of(0).asList()).containsExactly(0L); } public void testOf2() { assertThat(ImmutableLongArray.of(0, 1).asList()).containsExactly(0L, 1L).inOrder(); } public void testOf3() { assertThat(ImmutableLongArray.of(0, 1, 3).asList()).containsExactly(0L, 1L, 3L).inOrder(); } public void testOf4() { assertThat(ImmutableLongArray.of(0, 1, 3, 6).asList()) .containsExactly(0L, 1L, 3L, 6L) .inOrder(); } public void testOf5() { assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10).asList()) .containsExactly(0L, 1L, 3L, 6L, 10L) .inOrder(); } public void testOf6() { assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10, 15).asList()) .containsExactly(0L, 1L, 3L, 6L, 10L, 15L) .inOrder(); } public void testOf7() { assertThat(ImmutableLongArray.of(0, 1, 3, 6, 10, 15, 21).asList()) .containsExactly(0L, 1L, 3L, 6L, 10L, 15L, 21L) .inOrder(); } public void testCopyOf_array_empty() { /* * We don't guarantee the same-as property, so we aren't obligated to test it. However, it's * useful in testing - when two things are the same then one can't have bugs the other doesn't. */ assertThat(ImmutableLongArray.copyOf(new long[0])).isSameAs(ImmutableLongArray.of()); } public void testCopyOf_array_nonempty() { long[] array = new long[] {0, 1, 3}; ImmutableLongArray iia = ImmutableLongArray.copyOf(array); array[2] = 2; assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder(); } public void testCopyOf_iterable_notCollection_empty() { Iterable<Long> iterable = iterable(Collections.<Long>emptySet()); assertThat(ImmutableLongArray.copyOf(iterable)).isSameAs(ImmutableLongArray.of()); } public void testCopyOf_iterable_notCollection_nonempty() { List<Long> list = Arrays.asList(0L, 1L, 3L); ImmutableLongArray iia = ImmutableLongArray.copyOf(iterable(list)); list.set(2, 2L); assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder(); } public void testCopyOf_iterable_collection_empty() { Iterable<Long> iterable = Collections.emptySet(); assertThat(ImmutableLongArray.copyOf(iterable)).isSameAs(ImmutableLongArray.of()); } public void testCopyOf_iterable_collection_nonempty() { List<Long> list = Arrays.asList(0L, 1L, 3L); ImmutableLongArray iia = ImmutableLongArray.copyOf((Iterable<Long>) list); list.set(2, 2L); assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder(); } public void testCopyOf_collection_empty() { Collection<Long> iterable = Collections.emptySet(); assertThat(ImmutableLongArray.copyOf(iterable)).isSameAs(ImmutableLongArray.of()); } public void testCopyOf_collection_nonempty() { List<Long> list = Arrays.asList(0L, 1L, 3L); ImmutableLongArray iia = ImmutableLongArray.copyOf(list); list.set(2, 2L); assertThat(iia.asList()).containsExactly(0L, 1L, 3L).inOrder(); } public void testCopyOf_stream() { assertThat(ImmutableLongArray.copyOf(LongStream.empty())).isSameAs(ImmutableLongArray.of()); assertThat(ImmutableLongArray.copyOf(LongStream.of(0, 1, 3)).asList()) .containsExactly(0L, 1L, 3L) .inOrder(); } public void testBuilder_presize_zero() { ImmutableLongArray.Builder builder = ImmutableLongArray.builder(0); builder.add(5L); ImmutableLongArray array = builder.build(); assertThat(array.asList()).containsExactly(5L); } public void testBuilder_presize_negative() { try { ImmutableLongArray.builder(-1); fail(); } catch (IllegalArgumentException expected) { } } /** * If there's a bug in builder growth, we wouldn't know how to expose it. So, brute force the hell * out of it for a while and see what happens. */ public void testBuilder_bruteForce() { for (long i = 0; i < 100; i++) { ImmutableLongArray.Builder builder = ImmutableLongArray.builder(RANDOM.nextInt(20)); AtomicLong counter = new AtomicLong(0); while (counter.get() < 1000) { BuilderOp op = BuilderOp.randomOp(); op.doIt(builder, counter); } ImmutableLongArray iia = builder.build(); for (int j = 0; j < iia.length(); j++) { assertThat(iia.get(j)).isEqualTo((long) j); } } } private enum BuilderOp { ADD_ONE { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { builder.add(counter.getAndIncrement()); } }, ADD_ARRAY { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { long[] array = new long[RANDOM.nextInt(10)]; for (int i = 0; i < array.length; i++) { array[i] = counter.getAndIncrement(); } builder.addAll(array); } }, ADD_COLLECTION { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { List<Long> list = new ArrayList<>(); long num = RANDOM.nextInt(10); for (int i = 0; i < num; i++) { list.add(counter.getAndIncrement()); } builder.addAll(list); } }, ADD_ITERABLE { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { List<Long> list = new ArrayList<>(); long num = RANDOM.nextInt(10); for (int i = 0; i < num; i++) { list.add(counter.getAndIncrement()); } builder.addAll(iterable(list)); } }, ADD_STREAM { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { long[] array = new long[RANDOM.nextInt(10)]; for (int i = 0; i < array.length; i++) { array[i] = counter.getAndIncrement(); } builder.addAll(Arrays.stream(array)); } }, ADD_IIA { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { long[] array = new long[RANDOM.nextInt(10)]; for (int i = 0; i < array.length; i++) { array[i] = counter.getAndIncrement(); } builder.addAll(ImmutableLongArray.copyOf(array)); } }, ADD_LARGER_ARRAY { @Override void doIt(ImmutableLongArray.Builder builder, AtomicLong counter) { long[] array = new long[RANDOM.nextInt(200) + 200]; for (int i = 0; i < array.length; i++) { array[i] = counter.getAndIncrement(); } builder.addAll(array); } }, ; static final BuilderOp[] values = values(); static BuilderOp randomOp() { return values[RANDOM.nextInt(values.length)]; } abstract void doIt(ImmutableLongArray.Builder builder, AtomicLong counter); } private static final Random RANDOM = new Random(42); public void testLength() { assertThat(ImmutableLongArray.of().length()).isEqualTo(0); assertThat(ImmutableLongArray.of(0).length()).isEqualTo(1); assertThat(ImmutableLongArray.of(0, 1, 3).length()).isEqualTo(3); assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 1).length()).isEqualTo(0); assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 2).length()).isEqualTo(1); } public void testIsEmpty() { assertThat(ImmutableLongArray.of().isEmpty()).isTrue(); assertThat(ImmutableLongArray.of(0).isEmpty()).isFalse(); assertThat(ImmutableLongArray.of(0, 1, 3).isEmpty()).isFalse(); assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 1).isEmpty()).isTrue(); assertThat(ImmutableLongArray.of(0, 1, 3).subArray(1, 2).isEmpty()).isFalse(); } public void testGet_good() { ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3); assertThat(iia.get(0)).isEqualTo(0L); assertThat(iia.get(2)).isEqualTo(3L); assertThat(iia.subArray(1, 3).get(1)).isEqualTo(3L); } public void testGet_bad() { ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3); try { iia.get(-1); fail(); } catch (IndexOutOfBoundsException expected) { } try { iia.get(3); fail(); } catch (IndexOutOfBoundsException expected) { } iia = iia.subArray(1, 2); try { iia.get(-1); fail(); } catch (IndexOutOfBoundsException expected) { } } public void testIndexOf() { ImmutableLongArray iia = ImmutableLongArray.of(1, 1, 2, 3, 5, 8); assertThat(iia.indexOf(1)).isEqualTo(0); assertThat(iia.indexOf(8)).isEqualTo(5); assertThat(iia.indexOf(4)).isEqualTo(-1); assertThat(ImmutableLongArray.of(13).indexOf(13)).isEqualTo(0); assertThat(ImmutableLongArray.of().indexOf(21)).isEqualTo(-1); assertThat(iia.subArray(1, 5).indexOf(1)).isEqualTo(0); } public void testLastIndexOf() { ImmutableLongArray iia = ImmutableLongArray.of(1, 1, 2, 3, 5, 8); assertThat(iia.lastIndexOf(1)).isEqualTo(1); assertThat(iia.lastIndexOf(8)).isEqualTo(5); assertThat(iia.lastIndexOf(4)).isEqualTo(-1); assertThat(ImmutableLongArray.of(13).lastIndexOf(13)).isEqualTo(0); assertThat(ImmutableLongArray.of().lastIndexOf(21)).isEqualTo(-1); assertThat(iia.subArray(1, 5).lastIndexOf(1)).isEqualTo(0); } public void testContains() { ImmutableLongArray iia = ImmutableLongArray.of(1, 1, 2, 3, 5, 8); assertThat(iia.contains(1)).isTrue(); assertThat(iia.contains(8)).isTrue(); assertThat(iia.contains(4)).isFalse(); assertThat(ImmutableLongArray.of(13).contains(13)).isTrue(); assertThat(ImmutableLongArray.of().contains(21)).isFalse(); assertThat(iia.subArray(1, 5).contains(1)).isTrue(); } public void testForEach() { ImmutableLongArray.of().forEach(i -> fail()); ImmutableLongArray.of(0, 1, 3).subArray(1, 1).forEach(i -> fail()); AtomicLong count = new AtomicLong(0); ImmutableLongArray.of(0, 1, 2, 3) .forEach(i -> assertThat(i).isEqualTo(count.getAndIncrement())); assertEquals(4, count.get()); } public void testStream() { ImmutableLongArray.of().stream().forEach(i -> fail()); ImmutableLongArray.of(0, 1, 3).subArray(1, 1).stream().forEach(i -> fail()); assertThat(ImmutableLongArray.of(0, 1, 3).stream().toArray()).isEqualTo(new long[] {0, 1, 3}); } public void testSubArray() { ImmutableLongArray iia0 = ImmutableLongArray.of(); ImmutableLongArray iia1 = ImmutableLongArray.of(5); ImmutableLongArray iia3 = ImmutableLongArray.of(5, 25, 125); assertThat(iia0.subArray(0, 0)).isSameAs(ImmutableLongArray.of()); assertThat(iia1.subArray(0, 0)).isSameAs(ImmutableLongArray.of()); assertThat(iia1.subArray(1, 1)).isSameAs(ImmutableLongArray.of()); assertThat(iia1.subArray(0, 1).asList()).containsExactly(5L); assertThat(iia3.subArray(0, 2).asList()).containsExactly(5L, 25L).inOrder(); assertThat(iia3.subArray(1, 3).asList()).containsExactly(25L, 125L).inOrder(); try { iia3.subArray(-1, 1); fail(); } catch (IndexOutOfBoundsException expected) { } try { iia3.subArray(1, 4); fail(); } catch (IndexOutOfBoundsException expected) { } } /* * Whenever an implementation uses `instanceof` on a parameter instance, the test has to know that * (so much for "black box") and try instances that both do and don't pass the check. The "don't" * half of that is more awkward to arrange... */ private static <T> Iterable<T> iterable(final Collection<T> collection) { // return collection::iterator; return new Iterable<T>() { @Override public Iterator<T> iterator() { return collection.iterator(); } }; } public void testEquals() { new EqualsTester() .addEqualityGroup(ImmutableLongArray.of()) .addEqualityGroup( ImmutableLongArray.of(1, 2), reserialize(ImmutableLongArray.of(1, 2)), ImmutableLongArray.of(0, 1, 2, 3).subArray(1, 3)) .addEqualityGroup(ImmutableLongArray.of(1, 3)) .addEqualityGroup(ImmutableLongArray.of(1, 2, 3)) .testEquals(); } /** * This is probably a weird and hacky way to test what we're really trying to test, but hey, it * caught a bug. */ public void testTrimmed() { ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3); assertDoesntActuallyTrim(iia); assertDoesntActuallyTrim(iia.subArray(0, 3)); assertActuallyTrims(iia.subArray(0, 2)); assertActuallyTrims(iia.subArray(1, 3)); ImmutableLongArray rightSized = ImmutableLongArray.builder(3).add(0).add(1).add(3).build(); assertDoesntActuallyTrim(rightSized); ImmutableLongArray overSized = ImmutableLongArray.builder(3).add(0).add(1).build(); assertActuallyTrims(overSized); ImmutableLongArray underSized = ImmutableLongArray.builder(2).add(0).add(1).add(3).build(); assertActuallyTrims(underSized); } @GwtIncompatible // SerializableTester public void testSerialization() { assertThat(reserialize(ImmutableLongArray.of())).isSameAs(ImmutableLongArray.of()); assertThat(reserialize(ImmutableLongArray.of(0, 1).subArray(1, 1))) .isSameAs(ImmutableLongArray.of()); ImmutableLongArray iia = ImmutableLongArray.of(0, 1, 3, 6).subArray(1, 3); ImmutableLongArray iia2 = reserialize(iia); assertThat(iia2).isEqualTo(iia); assertDoesntActuallyTrim(iia2); } private static void assertActuallyTrims(ImmutableLongArray iia) { ImmutableLongArray trimmed = iia.trimmed(); assertThat(trimmed).isNotSameAs(iia); // Yes, this is apparently how you check array equality in Truth assertThat(trimmed.toArray()).isEqualTo(iia.toArray()); } private static void assertDoesntActuallyTrim(ImmutableLongArray iia) { assertThat(iia.trimmed()).isSameAs(iia); } @GwtIncompatible // suite public static Test suite() { List<ListTestSuiteBuilder<Long>> builders = ImmutableList.of( ListTestSuiteBuilder.using(new ImmutableLongArrayAsListGenerator()) .named("ImmutableLongArray.asList"), ListTestSuiteBuilder.using(new ImmutableLongArrayHeadSubListAsListGenerator()) .named("ImmutableLongArray.asList, head subList"), ListTestSuiteBuilder.using(new ImmutableLongArrayTailSubListAsListGenerator()) .named("ImmutableLongArray.asList, tail subList"), ListTestSuiteBuilder.using(new ImmutableLongArrayMiddleSubListAsListGenerator()) .named("ImmutableLongArray.asList, middle subList")); TestSuite suite = new TestSuite(); for (ListTestSuiteBuilder<Long> builder : builders) { suite.addTest( builder .withFeatures( CollectionSize.ZERO, CollectionSize.ONE, CollectionSize.SEVERAL, CollectionFeature.ALLOWS_NULL_QUERIES, CollectionFeature.RESTRICTS_ELEMENTS, CollectionFeature.KNOWN_ORDER /*, SERIALIZABLE_INCLUDING_VIEWS*/) .createTestSuite()); } return suite; } @GwtIncompatible // used only from suite private static ImmutableLongArray makeArray(Long[] values) { return ImmutableLongArray.copyOf(Arrays.asList(values)); } // Test generators. To let the GWT test suite generator access them, they need to be public named // classes with a public default constructor (not that we run these suites under GWT yet). @GwtIncompatible // used only from suite public static final class ImmutableLongArrayAsListGenerator extends TestLongListGenerator { @Override protected List<Long> create(Long[] elements) { return makeArray(elements).asList(); } } @GwtIncompatible // used only from suite public static final class ImmutableLongArrayHeadSubListAsListGenerator extends TestLongListGenerator { @Override protected List<Long> create(Long[] elements) { Long[] suffix = {Long.MIN_VALUE, Long.MAX_VALUE}; Long[] all = concat(elements, suffix); return makeArray(all).subArray(0, elements.length).asList(); } } @GwtIncompatible // used only from suite public static final class ImmutableLongArrayTailSubListAsListGenerator extends TestLongListGenerator { @Override protected List<Long> create(Long[] elements) { Long[] prefix = {86L, 99L}; Long[] all = concat(prefix, elements); return makeArray(all).subArray(2, elements.length + 2).asList(); } } @GwtIncompatible // used only from suite public static final class ImmutableLongArrayMiddleSubListAsListGenerator extends TestLongListGenerator { @Override protected List<Long> create(Long[] elements) { Long[] prefix = {Long.MIN_VALUE, Long.MAX_VALUE}; Long[] suffix = {86L, 99L}; Long[] all = concat(concat(prefix, elements), suffix); return makeArray(all).subArray(2, elements.length + 2).asList(); } } @GwtIncompatible // used only from suite private static Long[] concat(Long[] a, Long[] b) { return ObjectArrays.concat(a, b, Long.class); } @GwtIncompatible // used only from suite public abstract static class TestLongListGenerator implements TestListGenerator<Long> { @Override public SampleElements<Long> samples() { return new SampleLongs(); } @Override public List<Long> create(Object... elements) { Long[] array = new Long[elements.length]; int i = 0; for (Object e : elements) { array[i++] = (Long) e; } return create(array); } /** * Creates a new collection containing the given elements; implement this method instead of * {@link #create(Object...)}. */ protected abstract List<Long> create(Long[] elements); @Override public Long[] createArray(int length) { return new Long[length]; } /** Returns the original element list, unchanged. */ @Override public List<Long> order(List<Long> insertionOrder) { return insertionOrder; } } @GwtIncompatible // used only from suite public static class SampleLongs extends SampleElements<Long> { public SampleLongs() { super(1L << 31, 1L << 33, 1L << 36, 1L << 40, 1L << 45); } } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.lang.PrepareExecuteDDL Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * Changes for GemFireXD distributed data platform (some marked by "GemStone changes") * * Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package org.apache.derbyTesting.functionTests.tests.lang; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; import java.sql.*; import junit.framework.Test; import junit.framework.TestSuite; /** * Test the dependency system for active statements when * a DDL is executed in a separate connection after the * prepare but before the execute. * */ public class PrepareExecuteDDL extends BaseJDBCTestCase { /** * Connection to execute the DDL on. Needs * to be different to the single connection * provided by the super-class. This connection * is used to execute DDL while the other connection * has open objcts dependent on the objct changed by the DDL. */ private Connection connDDL; /** * List of statements that are prepared and then executed. * The testPrepareExecute method prepares each statement * in this list, executes one DDL, executes each prepared * statement and then checks the result. * <BR> * The result checking is driven off the initial text * of the statement. */ private static final String[] STMTS = { "SELECT * FROM APP.PED001", "SELECT A, B FROM APP.PED001", "GRANT SELECT ON APP.PED001 TO U_PED_001", "GRANT SELECT(A,B) ON APP.PED001 TO U_PED_001", "REVOKE SELECT(A,B) ON APP.PED001 FROM U_PED_001", "REVOKE SELECT ON APP.PED001 FROM U_PED_001", }; /** * All the DDL commands that will be executed, one per * fixture, as the mutation between the prepare and execute. */ private static final String[] DDL = { "ALTER TABLE APP.PED001 ADD COLUMN D BIGINT", "ALTER TABLE APP.PED001 ADD CONSTRAINT PED001_PK PRIMARY KEY (A)", //GemFireXD does not support ALTER ... LOCKSIZE //"ALTER TABLE PED001 LOCKSIZE ROW", //"ALTER TABLE PED001 LOCKSIZE TABLE", "DROP TABLE APP.PED001", }; /** * Create a suite of tests, one per statement in DDL. * This test is for testing the embedded dependency system * though possibly it could be used for testing in client * as well. */ public static Test suite() { TestSuite suite = new TestSuite("PrepareExecuteDDL"); for (int i = 0; i < DDL.length; i++) suite.addTest(new PrepareExecuteDDL("testPrepareExcute", DDL[i])); return TestConfiguration.sqlAuthorizationDecorator(suite); } private final String ddl; private PrepareExecuteDDL(String name, String ddl) { super(name); this.ddl = ddl; } private boolean tableDropped() { return ddl.startsWith("DROP TABLE "); } public void testPrepareExcute() throws SQLException { Connection conn = getConnection(); PreparedStatement[] psa= new PreparedStatement[STMTS.length]; for (int i = 0; i < STMTS.length; i++) { String sql = STMTS[i]; psa[i] = conn.prepareStatement(sql); } connDDL.createStatement().execute(ddl); for (int i = 0; i < STMTS.length; i++) { String sql = STMTS[i]; if (sql.startsWith("SELECT ")) checkSelect(psa[i], sql); else if (sql.startsWith("GRANT ") || sql.startsWith("REVOKE ")) checkGrantRevoke(psa[i], sql); else fail("unknown SQL" + sql); psa[i].close(); } } private void checkSelect(PreparedStatement ps, String sql) throws SQLException { assertEquals(true, sql.startsWith("SELECT ")); boolean result; try { result = ps.execute(); } catch (SQLException e) { //TODO: Use DMD to see if table exists or not. assertSQLState("42X05", e); assertTrue(tableDropped()); return; } assertTrue(result); ResultSet rs = ps.getResultSet(); DatabaseMetaData dmd = connDDL.getMetaData(); JDBC.assertMetaDataMatch(dmd, rs.getMetaData()); boolean isSelectStar = sql.startsWith("SELECT * "); if (isSelectStar) ; JDBC.assertDrainResults(rs); } private void checkGrantRevoke(PreparedStatement ps, String sql) throws SQLException { assertEquals(true, sql.startsWith("GRANT ") || sql.startsWith("REVOKE ")); try { assertFalse(ps.execute()); } catch (SQLException e) { assertSQLState("42X05", e); assertTrue(tableDropped()); return; } } /** * Set the fixture up with a clean, standard table PED001. */ protected void setUp() throws SQLException { setSystemProperty("gemfirexd.sql-authorization", "true"); connDDL = openDefaultConnection(); Statement s = connDDL.createStatement(); s.execute( "CREATE TABLE APP.PED001 (A INT NOT NULL, B DECIMAL(6,4), C VARCHAR(20))"); s.close(); } /** * Tear-down the fixture by removing the table (if it still * exists). */ protected void tearDown() throws Exception { // GemStone changes BEGIN super.preTearDown(); // GemStone changes END Statement s = connDDL.createStatement(); try { s.execute("DROP TABLE APP.PED001"); } catch (SQLException e) { assertSQLState("42Y55", e); } s.close(); JDBC.cleanup(connDDL); connDDL = null; super.tearDown(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.checkin; import com.intellij.CommonBundle; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.CheckinProjectPanel; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.changes.ChangesUtil; import com.intellij.openapi.vcs.changes.CommitExecutor; import com.intellij.openapi.vcs.checkin.CheckinHandler; import com.intellij.openapi.vcs.checkin.VcsCheckinHandlerFactory; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.PairConsumer; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import com.intellij.xml.util.XmlStringUtil; import git4idea.GitUtil; import git4idea.GitVcs; import git4idea.commands.Git; import git4idea.config.GitConfigUtil; import git4idea.config.GitVcsSettings; import git4idea.config.GitVersion; import git4idea.config.GitVersionSpecialty; import git4idea.crlf.GitCrlfDialog; import git4idea.crlf.GitCrlfProblemsDetector; import git4idea.crlf.GitCrlfUtil; import git4idea.i18n.GitBundle; import git4idea.repo.GitRepository; import git4idea.repo.GitRepositoryManager; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.concurrent.atomic.AtomicReference; /** * Prohibits committing with an empty messages, warns if committing into detached HEAD, checks if user name and correct CRLF attributes * are set. * @author Kirill Likhodedov */ public class GitCheckinHandlerFactory extends VcsCheckinHandlerFactory { private static final Logger LOG = Logger.getInstance(GitCheckinHandlerFactory.class); public GitCheckinHandlerFactory() { super(GitVcs.getKey()); } @NotNull @Override protected CheckinHandler createVcsHandler(final CheckinProjectPanel panel) { return new MyCheckinHandler(panel); } private class MyCheckinHandler extends CheckinHandler { @NotNull private final CheckinProjectPanel myPanel; @NotNull private final Project myProject; public MyCheckinHandler(@NotNull CheckinProjectPanel panel) { myPanel = panel; myProject = myPanel.getProject(); } @Override public ReturnResult beforeCheckin(@Nullable CommitExecutor executor, PairConsumer<Object, Object> additionalDataConsumer) { if (emptyCommitMessage()) { return ReturnResult.CANCEL; } if (commitOrCommitAndPush(executor)) { ReturnResult result = checkUserName(); if (result != ReturnResult.COMMIT) { return result; } result = warnAboutCrlfIfNeeded(); if (result != ReturnResult.COMMIT) { return result; } return warnAboutDetachedHeadIfNeeded(); } return ReturnResult.COMMIT; } @NotNull private ReturnResult warnAboutCrlfIfNeeded() { GitVcsSettings settings = GitVcsSettings.getInstance(myProject); if (!settings.warnAboutCrlf()) { return ReturnResult.COMMIT; } final Git git = ServiceManager.getService(Git.class); final Collection<VirtualFile> files = myPanel.getVirtualFiles(); // deleted files aren't included, but for them we don't care about CRLFs. final AtomicReference<GitCrlfProblemsDetector> crlfHelper = new AtomicReference<>(); ProgressManager.getInstance().run( new Task.Modal(myProject, "Checking for line separator issues...", true) { @Override public void run(@NotNull ProgressIndicator indicator) { crlfHelper.set(GitCrlfProblemsDetector.detect(GitCheckinHandlerFactory.MyCheckinHandler.this.myProject, git, files)); } }); if (crlfHelper.get() == null) { // detection cancelled return ReturnResult.CANCEL; } if (crlfHelper.get().shouldWarn()) { Pair<Integer, Boolean> codeAndDontWarn = UIUtil.invokeAndWaitIfNeeded(new Computable<Pair<Integer, Boolean>>() { @Override public Pair<Integer, Boolean> compute() { final GitCrlfDialog dialog = new GitCrlfDialog(myProject); dialog.show(); return Pair.create(dialog.getExitCode(), dialog.dontWarnAgain()); } }); int decision = codeAndDontWarn.first; boolean dontWarnAgain = codeAndDontWarn.second; if (decision == GitCrlfDialog.CANCEL) { return ReturnResult.CANCEL; } else { if (decision == GitCrlfDialog.SET) { VirtualFile anyRoot = myPanel.getRoots().iterator().next(); // config will be set globally => any root will do. setCoreAutoCrlfAttribute(anyRoot); } else { if (dontWarnAgain) { settings.setWarnAboutCrlf(false); } } return ReturnResult.COMMIT; } } return ReturnResult.COMMIT; } private void setCoreAutoCrlfAttribute(@NotNull VirtualFile aRoot) { try { GitConfigUtil.setValue(myProject, aRoot, GitConfigUtil.CORE_AUTOCRLF, GitCrlfUtil.RECOMMENDED_VALUE, "--global"); } catch (VcsException e) { // it is not critical: the user just will get the dialog again next time LOG.warn("Couldn't globally set core.autocrlf in " + aRoot, e); } } private ReturnResult checkUserName() { final Project project = myPanel.getProject(); GitVcs vcs = GitVcs.getInstance(project); assert vcs != null; Collection<VirtualFile> affectedRoots = getSelectedRoots(); Map<VirtualFile, Couple<String>> defined = getDefinedUserNames(project, affectedRoots, false); Collection<VirtualFile> allRoots = new ArrayList<>(Arrays.asList(ProjectLevelVcsManager.getInstance(project).getRootsUnderVcs(vcs))); Collection<VirtualFile> notDefined = new ArrayList<>(affectedRoots); notDefined.removeAll(defined.keySet()); if (notDefined.isEmpty()) { return ReturnResult.COMMIT; } GitVersion version = vcs.getVersion(); if (System.getenv("HOME") == null && GitVersionSpecialty.DOESNT_DEFINE_HOME_ENV_VAR.existsIn(version)) { Messages.showErrorDialog(project, "You are using Git " + version + " which doesn't define %HOME% environment variable properly.\n" + "Consider updating Git to a newer version " + "or define %HOME% to point to the place where the global .gitconfig is stored \n" + "(it is usually %USERPROFILE% or %HOMEDRIVE%%HOMEPATH%).", "HOME Variable Is Not Defined"); return ReturnResult.CANCEL; } // try to find a root with defined user name among other roots - to propose this user name in the dialog if (defined.isEmpty() && allRoots.size() > affectedRoots.size()) { allRoots.removeAll(affectedRoots); defined.putAll(getDefinedUserNames(project, allRoots, true)); } final GitUserNameNotDefinedDialog dialog = new GitUserNameNotDefinedDialog(project, notDefined, affectedRoots, defined); if (dialog.showAndGet()) { return setUserNameUnderProgress(project, notDefined, dialog) ? ReturnResult.COMMIT : ReturnResult.CANCEL; } return ReturnResult.CLOSE_WINDOW; } @NotNull private Map<VirtualFile, Couple<String>> getDefinedUserNames(@NotNull final Project project, @NotNull final Collection<VirtualFile> roots, final boolean stopWhenFoundFirst) { final Map<VirtualFile, Couple<String>> defined = ContainerUtil.newHashMap(); ProgressManager.getInstance().run(new Task.Modal(project, "Checking Git user name...", true) { @Override public void run(@NotNull ProgressIndicator pi) { for (VirtualFile root : roots) { try { Couple<String> nameAndEmail = getUserNameAndEmailFromGitConfig(project, root); String name = nameAndEmail.getFirst(); String email = nameAndEmail.getSecond(); if (name != null && email != null) { defined.put(root, nameAndEmail); if (stopWhenFoundFirst) { return; } } } catch (VcsException e) { LOG.error("Couldn't get user.name and user.email for root " + root, e); // doing nothing - let commit with possibly empty user.name/email } } } }); return defined; } private boolean setUserNameUnderProgress(@NotNull final Project project, @NotNull final Collection<VirtualFile> notDefined, @NotNull final GitUserNameNotDefinedDialog dialog) { final Ref<String> error = Ref.create(); ProgressManager.getInstance().run(new Task.Modal(project, "Setting Git User Name...", true) { @Override public void run(@NotNull ProgressIndicator pi) { try { if (dialog.isGlobal()) { GitConfigUtil.setValue(project, notDefined.iterator().next(), GitConfigUtil.USER_NAME, dialog.getUserName(), "--global"); GitConfigUtil.setValue(project, notDefined.iterator().next(), GitConfigUtil.USER_EMAIL, dialog.getUserEmail(), "--global"); } else { for (VirtualFile root : notDefined) { GitConfigUtil.setValue(project, root, GitConfigUtil.USER_NAME, dialog.getUserName()); GitConfigUtil.setValue(project, root, GitConfigUtil.USER_EMAIL, dialog.getUserEmail()); } } } catch (VcsException e) { String message = "Couldn't set user.name and user.email"; LOG.error(message, e); error.set(message); } } }); if (error.isNull()) { return true; } else { Messages.showErrorDialog(myPanel.getComponent(), error.get()); return false; } } @NotNull private Couple<String> getUserNameAndEmailFromGitConfig(@NotNull Project project, @NotNull VirtualFile root) throws VcsException { String name = GitConfigUtil.getValue(project, root, GitConfigUtil.USER_NAME); String email = GitConfigUtil.getValue(project, root, GitConfigUtil.USER_EMAIL); return Couple.of(name, email); } private boolean emptyCommitMessage() { if (myPanel.getCommitMessage().trim().isEmpty()) { Messages.showMessageDialog(myPanel.getComponent(), GitBundle.message("git.commit.message.empty"), GitBundle.message("git.commit.message.empty.title"), Messages.getErrorIcon()); return true; } return false; } private ReturnResult warnAboutDetachedHeadIfNeeded() { // Warning: commit on a detached HEAD DetachedRoot detachedRoot = getDetachedRoot(); if (detachedRoot == null || !GitVcsSettings.getInstance(myProject).warnAboutDetachedHead()) { return ReturnResult.COMMIT; } final String title; final String message; final CharSequence rootPath = StringUtil.last(detachedRoot.myRoot.getPresentableUrl(), 50, true); final String messageCommonStart = "The Git repository <code>" + rootPath + "</code>"; if (detachedRoot.myRebase) { title = "Unfinished rebase process"; message = messageCommonStart + " <br/> has an <b>unfinished rebase</b> process. <br/>" + "You probably want to <b>continue rebase</b> instead of committing. <br/>" + "Committing during rebase may lead to the commit loss. <br/>" + readMore("http://www.kernel.org/pub/software/scm/git/docs/git-rebase.html", "Read more about Git rebase"); } else { title = "Commit in detached HEAD may be dangerous"; message = messageCommonStart + " is in the <b>detached HEAD</b> state. <br/>" + "You can look around, make experimental changes and commit them, but be sure to checkout a branch not to lose your work. <br/>" + "Otherwise you risk losing your changes. <br/>" + readMore("http://gitolite.com/detached-head.html", "Read more about detached HEAD"); } DialogWrapper.DoNotAskOption dontAskAgain = new DialogWrapper.DoNotAskOption.Adapter() { @Override public void rememberChoice(boolean isSelected, int exitCode) { GitVcsSettings.getInstance(myProject).setWarnAboutDetachedHead(!isSelected); } @NotNull @Override public String getDoNotShowMessage() { return "Don't warn again"; } }; int choice = Messages.showOkCancelDialog(myProject, XmlStringUtil.wrapInHtml(message), title, "Commit", CommonBundle.getCancelButtonText(), Messages.getWarningIcon(), dontAskAgain); if (choice == Messages.OK) { return ReturnResult.COMMIT; } else { return ReturnResult.CLOSE_WINDOW; } } private boolean commitOrCommitAndPush(@Nullable CommitExecutor executor) { return executor == null || executor instanceof GitCommitAndPushExecutor; } private String readMore(String link, String message) { return String.format("<a href='%s'>%s</a>.", link, message); } /** * Scans the Git roots, selected for commit, for the root which is on a detached HEAD. * Returns null, if all repositories are on the branch. * There might be several detached repositories, - in that case only one is returned. * This is because the situation is very rare, while it requires a lot of additional effort of making a well-formed message. */ @Nullable private DetachedRoot getDetachedRoot() { GitRepositoryManager repositoryManager = GitUtil.getRepositoryManager(myPanel.getProject()); for (VirtualFile root : getSelectedRoots()) { GitRepository repository = repositoryManager.getRepositoryForRoot(root); if (repository == null) { continue; } if (!repository.isOnBranch()) { return new DetachedRoot(root, repository.isRebaseInProgress()); } } return null; } @NotNull private Collection<VirtualFile> getSelectedRoots() { ProjectLevelVcsManager vcsManager = ProjectLevelVcsManager.getInstance(myProject); Collection<VirtualFile> result = new HashSet<>(); for (FilePath path : ChangesUtil.getPaths(myPanel.getSelectedChanges())) { VirtualFile root = vcsManager.getVcsRootFor(path); if (root != null) { result.add(root); } } return result; } private class DetachedRoot { final VirtualFile myRoot; final boolean myRebase; // rebase in progress, or just detached due to a checkout of a commit. public DetachedRoot(@NotNull VirtualFile root, boolean rebase) { myRoot = root; myRebase = rebase; } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.irc; import java.net.URI; import java.net.URLEncoder; import java.util.Dictionary; import java.util.List; import org.apache.camel.Endpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; public class IrcConfigurationTest extends CamelTestSupport { @Test public void testInvalidUriConversion() throws Exception { // Note: valid URIs won't throw on new URI(endpoint.getEndpointUri()) String deprecate; String sanitized; Endpoint endpoint; IrcComponent component = context.getComponent("irc", IrcComponent.class); // Test conversion of the URI path to @channel parameter (drop the '#') deprecate = "irc://camelbot@irc.freenode.net:1234/#camel"; sanitized = "irc://camelbot@irc.freenode.net:1234?channel=camel"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test conversion of the URI path to @channel parameter (encode the double '##') deprecate = "irc://camelbot@irc.freenode.net/##camel"; sanitized = "irc://camelbot@irc.freenode.net?channel=%23%23camel"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test drop path and both path and @channels are specified deprecate = "irc://camelbot@irc.freenode.net/#karaf?channels=#camel,#cxf"; sanitized = "irc://camelbot@irc.freenode.net?channel=camel&channel=cxf"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test multiple channels, no keys deprecate = "irc://camelbot@irc.freenode.net?channels=#camel,#cxf"; sanitized = "irc://camelbot@irc.freenode.net?channel=camel&channel=cxf"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test multiple channels, with keys deprecate = "irc://camelbot@irc.freenode.net?channels=#camel,#cxf&keys=foo,bar"; sanitized = "irc://camelbot@irc.freenode.net?channel=camel!foo&channel=cxf!bar"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test multiple channels, with keys (last key is empty) deprecate = "irc://camelbot@irc.freenode.net?channels=#camel,#cxf&keys=foo,"; sanitized = "irc://camelbot@irc.freenode.net?channel=camel!foo&channel=cxf"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test multiple channels, deprecated @username deprecate = "irc://irc.freenode.net?keys=,foo&channels=#camel,#cxf&username=camelbot"; sanitized = "irc://camelbot@irc.freenode.net?channel=camel&channel=cxf!foo"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test multiple channels, deprecated @username and @password deprecate = "irc://irc.freenode.net?keys=,foo&channels=#camel,#cxf&username=camelbot&password=secret"; sanitized = "irc://camelbot:secret@irc.freenode.net?channel=camel&channel=cxf!foo"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test multiple channels, drop @nickname same as @username deprecate = "irc://irc.freenode.net?channels=#camel,#cxf&nickname=camelbot"; sanitized = "irc://camelbot@irc.freenode.net?channel=camel&channel=cxf"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); // Test with encoding of @realname deprecate = "irc://user@irc.freenode.net?keys=foo,&channels=#camel,#cxf&realname=Camel Bot&username=user&nickname=camelbot"; sanitized = "irc://user@irc.freenode.net?channel=camel!foo&channel=cxf&nickname=camelbot&realname=Camel%20Bot"; endpoint = component.createEndpoint(deprecate); assertEquals(sanitized, endpoint.getEndpointUri()); assertNotNull(new URI(endpoint.getEndpointUri())); } @Test public void testConfigureFormat1() throws Exception { IrcComponent component = context.getComponent("irc", IrcComponent.class); // irc:nick@host[:port]/#room[?options] IrcEndpoint endpoint = (IrcEndpoint) component.createEndpoint("irc://camelbot@irc.freenode.net/#camel"); IrcConfiguration conf = endpoint.getConfiguration(); assertEquals("camelbot", conf.getNickname()); assertEquals("irc.freenode.net", conf.getHostname()); List<IrcChannel> channels = conf.getChannels(); assertEquals(1, channels.size()); assertEquals("#camel", channels.get(0).getName()); } @Test public void testConfigureFormat2() throws Exception { IrcComponent component = context.getComponent("irc", IrcComponent.class); // irc:nick@host[:port]/#room[?options] IrcEndpoint endpoint = (IrcEndpoint) component.createEndpoint("irc://camelbot@irc.freenode.net?channels=#camel"); IrcConfiguration conf = endpoint.getConfiguration(); assertEquals("camelbot", conf.getNickname()); assertEquals("irc.freenode.net", conf.getHostname()); List<IrcChannel> channels = conf.getChannels(); assertEquals(1, channels.size()); assertEquals("#camel", channels.get(0).getName()); } @Test public void testConfigureFormat3() throws Exception { IrcComponent component = context.getComponent("irc", IrcComponent.class); // irc:nick@host[:port]/#room[?options] IrcEndpoint endpoint = (IrcEndpoint) component.createEndpoint("irc://irc.freenode.net?channels=#camel&nickname=camelbot"); IrcConfiguration conf = endpoint.getConfiguration(); assertEquals("camelbot", conf.getNickname()); assertEquals("irc.freenode.net", conf.getHostname()); List<IrcChannel> channels = conf.getChannels(); assertEquals(1, channels.size()); assertEquals("#camel", channels.get(0).getName()); } @Test public void testConfigureFormat4() throws Exception { IrcComponent component = context.getComponent("irc", IrcComponent.class); // irc:nick@host[:port]/#room[?options] IrcEndpoint endpoint = (IrcEndpoint) component.createEndpoint("irc://irc.freenode.net?keys=,foo&channels=%23camel,%23smx&nickname=camelbot"); IrcConfiguration conf = endpoint.getConfiguration(); assertEquals("camelbot", conf.getNickname()); assertEquals("irc.freenode.net", conf.getHostname()); List<IrcChannel> channels = conf.getChannels(); assertEquals(2, channels.size()); assertNotNull(conf.findChannel("#camel")); assertNotNull(conf.findChannel("#smx")); assertEquals("foo", conf.findChannel("#smx").getKey()); } @Test public void testConfigureFormat5() throws Exception { IrcComponent component = context.getComponent("irc", IrcComponent.class); // irc:nick@host[:port]/#room[?options] IrcEndpoint endpoint = (IrcEndpoint) component. createEndpoint("irc://badnick@irc.freenode.net?keys=foo,&channels=#camel,#smx&realname=Camel+Bot&nickname=camelbot"); IrcConfiguration conf = endpoint.getConfiguration(); assertEquals("camelbot", conf.getNickname()); assertEquals("irc.freenode.net", conf.getHostname()); List<IrcChannel> channels = conf.getChannels(); assertEquals(2, channels.size()); assertNotNull(conf.findChannel("#camel")); assertEquals("foo", conf.findChannel("#camel").getKey()); assertEquals("Camel Bot", conf.getRealname()); } @Test public void testConfigureFormat6() throws Exception { IrcComponent component = context.getComponent("irc", IrcComponent.class); // irc:nick@host[:port]/#room[?options] IrcEndpoint endpoint = (IrcEndpoint) component. createEndpoint("irc://badnick@irc.freenode.net?keys=foo,bar&channels=#camel,#smx&realname=Camel+Bot&nickname=camelbot"); IrcConfiguration conf = endpoint.getConfiguration(); assertEquals("camelbot", conf.getNickname()); assertEquals("irc.freenode.net", conf.getHostname()); List<IrcChannel> channels = conf.getChannels(); assertEquals(2, channels.size()); assertNotNull(conf.findChannel("#camel")); assertNotNull(conf.findChannel("#smx")); assertEquals("foo", conf.findChannel("#camel").getKey()); assertEquals("bar", conf.findChannel("#smx").getKey()); assertEquals("Camel Bot", conf.getRealname()); } }
package com.tumblr.jumblr.types; import java.util.List; import java.util.Map; /** * This class represents an individual Tumbelog * @author jc */ public class Blog extends Resource { private String name; private String title; private String description; private int posts, likes, followers, drafts, queue; private Long updated; private boolean ask, ask_anon, followed; /** * Get the description of this blog * @return String description */ public String getDescription() { return this.description; } /** * Can we ask questions on this blog? * @return boolean */ public boolean canAsk() { return this.ask; } /** * Can we ask questions on this blog anonymously? * @return boolean */ public boolean canAskAnonymously() { return this.ask_anon; } /** * Is this blog followed? * @return boolean */ public boolean isFollowed() { return this.followed; } /** * Get the number of posts for this blog * @return int the number of posts */ public Integer getPostCount() { return this.posts; } /** * Get the number of likes for this blog * @return int the number of likes */ public Integer getLikeCount() { return this.likes; } /** * Get the number of drafts for this blog * @return int the number of drafts */ public Integer getDraftCount() { return this.drafts; } /** * Get the number of Queued Posts for this blog * @return int the number of Queued Posts */ public Integer getQueuedCount() { return this.queue; } /** * Get the time of the most recent post (in seconds since epoch) * @return Long of time */ public Long getUpdated() { return updated; } /** * Get the title of this blog * @return The title of the blog */ public String getTitle() { return title; } /** * Get the name of this blog * @return The name of the blog */ public String getName() { return name; } /** * Get the avatar for this blog (of a given size * @param size the size to get the avatar for * @return A string URL for the avatar */ public String avatar(Integer size) { return client.blogAvatar(this.name, size); } public String avatar() { return this.avatar(null); } public Integer getFollowersCount() { return this.followers; } /** * Get followers for this blog * @param options a map of options (or null) * @return A List of users */ public List<User> followers(Map<String, ?> options) { return client.blogFollowers(this.name, options); } public List<User> followers() { return this.followers(null); } /** * Get the posts for this blog * @param options a map of options (or null) * @return A List of posts */ public List<Post> posts(Map<String, ?> options) { return client.blogPosts(name, options); } public List<Post> posts() { return this.posts(null); } /** * Get an individual post by id * @param postId the id of the post to retrieve * @return the post (or null) */ public Post getPost(Long postId) { return client.blogPost(name, postId); } /** * Get likes posts for this blog * @param options a map of options (or null) * @return A List of posts */ public List<Post> likedPosts(Map<String, ?> options) { return client.blogLikes(this.name, options); } public List<Post> likedPosts() { return this.likedPosts(null); } /** * Follow this blog */ public void follow() { client.follow(this.name); } /** * Unfollow this blog */ public void unfollow() { client.unfollow(this.name); } /** * Get the queued posts for this blog * @param options the options (or null) * @return a List of posts */ public List<Post> queuedPosts(Map<String, ?> options) { return client.blogQueuedPosts(name, options); } public List<Post> queuedPosts() { return this.queuedPosts(null); } /** * Get the draft posts for this blog * @param options the options (or null) * @return a List of posts */ public List<Post> draftPosts(Map<String, ?> options) { return client.blogDraftPosts(name, options); } public List<Post> draftPosts() { return this.draftPosts(null); } /** * Get the submissions for this blog * @param options the options (or null) * @return a List of posts */ public List<Post> submissions(Map<String, ?> options) { return client.blogSubmissions(name, options); } public List<Post> submissions() { return this.submissions(null); } /** * Create a new post of a given type for this blog * @param klass the class of the post to make * @param <T> the class of the post to make * @return new post * @throws IllegalAccessException if class instantiation fails * @throws InstantiationException if class instantiation fails */ public <T extends Post> T newPost(Class<T> klass) throws IllegalAccessException, InstantiationException { return client.newPost(name, klass); } /** * Set the name of this blog * @param name the name to set */ public void setName(String name) { this.name = name; } }
/* * CPAchecker is a tool for configurable software verification. * This file is part of CPAchecker. * * Copyright (C) 2007-2013 Dirk Beyer * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * CPAchecker web page: * http://cpachecker.sosy-lab.org */ package org.sosy_lab.cpachecker.cpa.bdd; import java.util.regex.Pattern; import javax.annotation.Nullable; import org.sosy_lab.common.configuration.Configuration; import org.sosy_lab.common.configuration.InvalidConfigurationException; import org.sosy_lab.common.configuration.Option; import org.sosy_lab.common.configuration.Options; import org.sosy_lab.cpachecker.cfa.model.CFANode; import org.sosy_lab.cpachecker.core.interfaces.Precision; import org.sosy_lab.cpachecker.cpa.explicit.ExplicitState.MemoryLocation; import org.sosy_lab.cpachecker.util.VariableClassification; import com.google.common.base.Optional; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Multimap; @Options(prefix = "cpa.bdd") public class BDDPrecision implements Precision { @Option(description = "track boolean variables from cfa") private boolean trackBoolean = true; @Option(description = "track variables from cfa, that are only compared " + "for equality, they are tracked as (small) bitvectors") private boolean trackIntEqual = true; @Option(description = "track variables, only used in simple calculations " + "(add, sub, gt, lt, eq,...) from cfa as bitvectors with (default) 32 bits") private boolean trackIntAdd = true; @Option(name = "forceTrackingPattern", description = "Pattern for variablenames that will always be tracked with BDDs." + "This pattern should only be used for known variables, i.e. for boolean vars.") private String forceTrackingPatternStr = ""; @Option(name = "precision.refinement.useScopedInterpolation", description = "whether or not to add newly-found variables " + "only to the exact program location or to the whole scope of the variable.") private boolean useScopedInterpolation = false; @Option(description = "whether the precision is initially empty (this should be set to true when refinement is used)") private boolean initiallyEmptyPrecision = false; private final Pattern forceTrackingPattern; private final CegarPrecision cegarPrecision; private final Optional<VariableClassification> varClass; /** initial constructor */ public BDDPrecision(Configuration config, Optional<VariableClassification> vc) throws InvalidConfigurationException { config.inject(this); if (!forceTrackingPatternStr.isEmpty()) { this.forceTrackingPattern = Pattern.compile(forceTrackingPatternStr); } else { this.forceTrackingPattern = null; } if (initiallyEmptyPrecision) { this.cegarPrecision = new CegarPrecision(useScopedInterpolation); } else { this.cegarPrecision = new CegarPrecision(); } this.varClass = vc; } /** copy-constructor, that allows to add new variables to cegar-precision. */ public BDDPrecision(BDDPrecision original, Multimap<CFANode, MemoryLocation> increment) { this.varClass = original.varClass; this.forceTrackingPattern = original.forceTrackingPattern; this.trackBoolean = original.trackBoolean; this.trackIntEqual = original.trackIntEqual; this.trackIntAdd = original.trackIntAdd; this.cegarPrecision = original.cegarPrecision.withAdditionalMappings(increment); } public boolean isDisabled() { if (forceTrackingPattern != null) { return false; } if (cegarPrecision.isEmpty()) { return true; } if (!varClass.isPresent()) { return true; } boolean trackSomeIntBools = trackBoolean && !varClass.get().getIntBoolVars().isEmpty(); boolean trackSomeIntEquals = trackIntEqual && !varClass.get().getIntEqualVars().isEmpty(); boolean trackSomeIntAdds = trackIntAdd && !varClass.get().getIntAddVars().isEmpty(); return !(trackSomeIntBools || trackSomeIntEquals || trackSomeIntAdds); } /** * This method tells if the precision demands the given variable to be tracked. * * @param variable the name of the variable to check * @param variable function of current scope or null, if variable is global * @return true, if the variable has to be tracked, else false */ public boolean isTracking(@Nullable String function, String var) { // this pattern should only be used, if we know the class of the matching variables if (this.forceTrackingPattern != null && this.forceTrackingPattern.matcher(var).matches()) { return true; } if (!cegarPrecision.allowsTrackingAt(function, var)) { return false; } return isInTrackedClass(function, var); } private boolean isInTrackedClass(@Nullable String function, String var) { if (!varClass.isPresent()) { return false; } final boolean isIntBool = varClass.get().getIntBoolVars().containsEntry(function, var); final boolean isIntEq = varClass.get().getIntEqualVars().containsEntry(function, var); final boolean isIntAdd = varClass.get().getIntAddVars().containsEntry(function, var); final boolean isTrackedBoolean = trackBoolean && isIntBool; final boolean isTrackedIntEqual = trackIntEqual && isIntEq; final boolean isTrackedIntAdd = trackIntAdd && isIntAdd; return isTrackedBoolean || isTrackedIntEqual || isTrackedIntAdd; } public CegarPrecision getCegarPrecision() { return cegarPrecision; } public static class CegarPrecision { /** the collection that determines which variables are tracked at * a specific location - if it is null, all variables are tracked */ private final Multimap<CFANode, MemoryLocation> mapping; private final boolean useScopedInterpolation; /** Constructor for creating a precision that tracks all variables. */ public CegarPrecision() { mapping = null; useScopedInterpolation = false; // value does not matter. } /** Constructor for creating a precision that tracks no variables. */ public CegarPrecision(boolean pUseScopedInterpolation) { mapping = ImmutableMultimap.of(); useScopedInterpolation = pUseScopedInterpolation; } /** copy constructor */ private CegarPrecision(Multimap<CFANode, MemoryLocation> pMapping, boolean pUseScopedInterpolation) { mapping = HashMultimap.create(pMapping); useScopedInterpolation = pUseScopedInterpolation; } /** returns, if nothing should be tracked. */ public boolean isEmpty() { return mapping != null && mapping.isEmpty(); } /** * This method determines if the given variable is being * tracked at the given location. * * @param location the location to check at * @param function the function of the variable or null, if global scope * @param var the variable to check for * @return if the given variable is being tracked at the given location */ public boolean allowsTrackingAt(@Nullable String function, String var) { if (mapping == null) { return true; } final MemoryLocation variable = function == null ? MemoryLocation.valueOf(var, 0) : MemoryLocation.valueOf(function, var, 0); // when using scoped interpolation, it suffices to have the (scoped) variable identifier in the precision if (useScopedInterpolation) { return mapping.containsValue(variable); } // when not using scoped interpolation, there must a pair of location -> variable identifier in the mapping else { return mapping.containsValue(variable); // TODO support for location-based tracking // return mapping.containsEntry(location, variable); } } /** * This method adds the additional mapping to the current mapping, * i.e., this precision can only grow in size, and never gets smaller. * * @param additionalMapping to be added to the current mapping */ private CegarPrecision withAdditionalMappings(Multimap<CFANode, MemoryLocation> additionalMapping) { if (mapping == null) { // all variables are tracked anyway return this; } CegarPrecision result = new CegarPrecision(mapping, useScopedInterpolation); result.mapping.putAll(additionalMapping); return result; } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates and open the template * in the editor. */ package com.progressiveaccess.cdkRenderer; // i'm being nice import org.openscience.cdk.geometry.GeometryTools; import org.openscience.cdk.interfaces.IAtomContainer; import org.openscience.cdk.renderer.BoundsCalculator; import org.openscience.cdk.renderer.RendererModel; import org.openscience.cdk.renderer.elements.AtomSymbolElement; import org.openscience.cdk.renderer.elements.ElementGroup; import org.openscience.cdk.renderer.elements.IRenderingElement; import org.openscience.cdk.renderer.elements.LineElement; import org.openscience.cdk.renderer.elements.WedgeLineElement; import org.openscience.cdk.renderer.generators.BasicSceneGenerator; import org.openscience.cdk.renderer.generators.BasicSceneGenerator.Scale; import org.openscience.cdk.renderer.generators.BasicSceneGenerator.ZoomFactor; import org.openscience.cdk.renderer.generators.IGenerator; import org.openscience.cdk.renderer.elements.MarkedElement; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Font; import java.awt.Stroke; import java.awt.geom.AffineTransform; import java.awt.geom.Rectangle2D; import java.util.List; import javax.vecmath.Point2d; import org.openscience.cdk.renderer.elements.OvalElement; /** * * * @author sacko * @param <T> */ public abstract class AbstractRenderer<T> { /** * */ protected final double DEFAULT_SCALE = 30; /** * */ protected final double DEFAULT_XPAD = 2; /** * */ protected final double DEFAULT_YPAD = 2; /** * */ protected final Font DEFAULT_FONT = new Font("serif", Font.PLAIN, 15); /** * */ private Color color; /** * */ private BasicStroke stroke; /** * */ protected AffineTransform transform; /** * */ private final RendererModel model; /** * */ private Point2d modelCentre = new Point2d(0, 0); /** * */ private Point2d drawingCentre = new Point2d(100, 100); private Rectangle2D boundBox; /** * */ private final List<IGenerator<IAtomContainer>> generators; /** * * @param model */ protected AbstractRenderer(final RendererModel model, final List<IGenerator<IAtomContainer>> generators) { this.model = model; this.generators = generators; for (final IGenerator<IAtomContainer> generator : this.generators) { this.model.registerParameters(generator); } this.updateTransformer(); } /** * * @return */ public Color getColor() { return this.color; } /** * * @param color */ public void setColor(final Color color) { this.color = color; } /** * * @return */ public Point2d getModelCentre() { return this.modelCentre; } /** * * @param modelCentre */ public void setModelCentre(final Point2d modelCentre) { this.modelCentre = modelCentre; this.updateTransformer(); } /** * * @return */ public Point2d getDrawingCentre() { return this.drawingCentre; } /** * * @param drawingCentre */ public void setDrawingCentre(final Point2d drawingCentre) { this.drawingCentre = drawingCentre; this.updateTransformer(); } /** * * @return */ public final Stroke getStroke() { return this.stroke; } /** * * @param stroke */ public final void setStroke(final Stroke stroke) { if (stroke instanceof BasicStroke) { this.stroke = (BasicStroke) stroke; } } /** * * @return */ public final Double getScale() { return this.getModel().getParameter(Scale.class).getValue(); } /** * * @param atomContainer */ public final void setScale(final IAtomContainer atomContainer) { this.getModel() .getParameter(Scale.class) .setValue( this.calculateScaleForBondLength(GeometryTools .getBondLengthAverage(atomContainer))); this.updateTransformer(); } /** * * @return */ public final Double getZoom() { return this.getModel().getParameter(ZoomFactor.class).getValue(); } /** * * @param value */ public final void setZoom(final Double value) { this.getModel().getParameter(ZoomFactor.class).setValue(value); this.updateTransformer(); } /** * * @return */ public RendererModel getModel() { return this.model; } /** * * @return */ public List<IGenerator<IAtomContainer>> getGenerators() { return this.generators; } /** * * @param x * @param y * @return */ protected Point2d XY(final Double x, final Double y) { final double[] i = new double[] { x, y }; this.transform.transform(i, 0, i, 0, 1); return new Point2d(i); } /** * * @param element * @return */ protected abstract Point2d WH(T element); /** * */ protected final void updateTransformer() { this.transform = new AffineTransform(); this.transform.translate(this.drawingCentre.x, this.drawingCentre.y); this.transform.scale(1, -1); this.transform.scale(this.getScale(), this.getScale()); this.transform.scale(this.getZoom(), this.getZoom()); this.transform.translate(-this.modelCentre.x, -this.modelCentre.y); } /** * Given a bond length for a model, calculate the scale that will transform * this length to the on screen bond length in RendererModel. * * @param bondLenght * the average bond length of the model * @return the scale necessary to transform this to a screen bond */ public double calculateScaleForBondLength(final Double bondLenght) { if (Double.isNaN(bondLenght) || bondLenght == 0) { return this.DEFAULT_SCALE; } else { return this.getModel().getParameter( BasicSceneGenerator.BondLength.class).getValue() / bondLenght; } } /** * * @param atomContainer * @param width * @param height * @return */ public T render(final IAtomContainer atomContainer) { this.boundBox = BoundsCalculator.calculateBounds(atomContainer); this.setScale(atomContainer); this.setDrawingCentre(new Point2d(this.getWidth() / 2, this.getHeight() / 2)); final ElementGroup diagram = new ElementGroup(); for (final IGenerator<IAtomContainer> generator : this.getGenerators()) { diagram.add(generator.generate(atomContainer, this.getModel())); } return this.render(diagram, atomContainer); } /** * * @param element * @param atomContainer * @return */ protected T render(final IRenderingElement element, final IAtomContainer atomContainer) { final Rectangle2D boundBox = BoundsCalculator .calculateBounds(atomContainer); this.setModelCentre(new Point2d(boundBox.getCenterX(), boundBox .getCenterY())); return this.render(element); } /** * * @param element * @return */ protected T render(IRenderingElement element) { // save current colours/stroke final Color pColor = this.getColor(); final Stroke pStroke = this.getStroke(); T result; // generate the result if (element instanceof MarkedElement) { element = ((MarkedElement) element).element(); } if (element instanceof WedgeLineElement) { result = this.render((WedgeLineElement) element); } else if (element instanceof LineElement) { result = this.render((LineElement) element); } else if (element instanceof ElementGroup) { result = this.render((ElementGroup) element); } else if (element instanceof AtomSymbolElement) { result = this.render((AtomSymbolElement) element); } else if (element instanceof OvalElement) { result = this.render((OvalElement) element); } else { throw new UnsupportedOperationException( "The rendering of " + element.getClass().getCanonicalName() + " is not supported."); } // restore the colours/strokes this.setColor(pColor); this.setStroke(pStroke); return result; } /** * * @param element * @return */ protected abstract T render(WedgeLineElement element); /** * * @param element * @return */ protected T render(MarkedElement element) { return null; }; /** * * @param element * @return */ protected abstract T render(LineElement element); /** * * @param element * @return */ protected abstract T render(ElementGroup element); /** * * @param element * @return */ protected abstract T render(AtomSymbolElement element); /** * * @param element * @return */ protected abstract T render(OvalElement element); /** * * @param element */ protected abstract void setFill(T element); /** * * @param element */ protected abstract void setStroke(T element); protected final Double getWidth() { return this.boundBox.getWidth() * this.getScale() + 50; } protected final Double getHeight() { return this.boundBox.getHeight() * this.getScale() + 50; } }
/*L * Copyright Oracle Inc * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details. */ /* * eXist Open Source Native XML Database * Copyright (C) 2001-07 The eXist Project * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * * $Id: JMXClient.java 7641 2008-04-20 19:39:58Z wolfgang_m $ */ package org.exist.management.client; import org.apache.avalon.excalibur.cli.CLArgsParser; import org.apache.avalon.excalibur.cli.CLOption; import org.apache.avalon.excalibur.cli.CLOptionDescriptor; import org.apache.avalon.excalibur.cli.CLUtil; import javax.management.Attribute; import javax.management.AttributeList; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; import javax.management.MBeanException; import javax.management.MBeanServerConnection; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import javax.management.ReflectionException; import javax.management.openmbean.CompositeData; import javax.management.openmbean.TabularData; import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; import javax.management.remote.JMXServiceURL; import java.io.IOException; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** */ public class JMXClient { private MBeanServerConnection connection; private String instance; public JMXClient(String instanceName) { this.instance = instanceName; } public void connect(String address,int port) throws IOException { JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://"+address+":" + port + "/jmxrmi"); Map env = new HashMap(); String[] creds = {"guest", "guest"}; env.put(JMXConnector.CREDENTIALS, creds); JMXConnector jmxc = JMXConnectorFactory.connect(url, env); connection = jmxc.getMBeanServerConnection(); echo("Connected to MBean server."); } public void memoryStats() { try { ObjectName name = new ObjectName("java.lang:type=Memory"); CompositeData composite = (CompositeData) connection.getAttribute(name, "HeapMemoryUsage"); if (composite != null) { echo("\nMEMORY:"); echo(String.format("Current heap: %,12d k Committed memory: %,12d k", ((Long)composite.get("used")) / 1024, ((Long)composite.get("committed")) / 1024)); echo(String.format("Max memory: %,12d k", ((Long)composite.get("max")) / 1024)); } } catch (Exception e) { error(e); } } public void instanceStats() { try { echo("\nINSTANCE:"); ObjectName name = new ObjectName("org.exist.management." + instance + ":type=Database"); Long memReserved = (Long) connection.getAttribute(name, "ReservedMem"); echo(String.format("%25s: %10d k", "Reserved memory", memReserved.longValue() / 1024)); Long memCache = (Long) connection.getAttribute(name, "CacheMem"); echo(String.format("%25s: %10d k", "Cache memory", memCache.longValue() / 1024)); Long memCollCache = (Long) connection.getAttribute(name, "CollectionCacheMem"); echo(String.format("%25s: %10d k", "Collection cache memory", memCollCache.longValue() / 1024)); String cols[] = { "MaxBrokers", "AvailableBrokers", "ActiveBrokers" }; echo(String.format("\n%17s %17s %17s", cols[0], cols[1], cols[2])); AttributeList attrs = connection.getAttributes(name, cols); Object values[] = getValues(attrs); echo(String.format("%17d %17d %17d", values[0], values[1], values[2])); TabularData table = (TabularData) connection.getAttribute(name, "ActiveBrokersMap"); if (table.size() > 0) echo("\nCurrently active threads:"); for (Iterator i = table.values().iterator(); i.hasNext(); ) { CompositeData data = (CompositeData) i.next(); echo(String.format("\t%20s: %3d", data.get("owner"), data.get("referenceCount"))); } } catch (Exception e) { error(e); } } public void cacheStats() { try { ObjectName name = new ObjectName("org.exist.management." + instance + ":type=CacheManager"); String cols[] = { "MaxTotal", "CurrentSize" }; AttributeList attrs = connection.getAttributes(name, cols); Object values[] = getValues(attrs); echo(String.format("\nCACHE [%8d pages max. / %8d pages allocated]", values[0], values[1])); Set beans = connection.queryNames(new ObjectName("org.exist.management." + instance + ":type=CacheManager.Cache,*"), null); cols = new String[] {"Type", "FileName", "Size", "Used", "Hits", "Fails"}; echo(String.format("%10s %20s %10s %10s %10s %10s", cols[0], cols[1], cols[2], cols[3], cols[4], cols[5])); for (Iterator i = beans.iterator(); i.hasNext();) { name = (ObjectName) i.next(); attrs = connection.getAttributes(name, cols); values = getValues(attrs); echo(String.format("%10s %20s %,10d %,10d %,10d %,10d", values[0], values[1], values[2], values[3], values[4], values[5])); } } catch (IOException e) { error(e); } catch (MalformedObjectNameException e) { error(e); } catch (InstanceNotFoundException e) { error(e); } catch (ReflectionException e) { error(e); } } public void lockTable() { echo("\nList of threads currently waiting for a lock:"); echo("-----------------------------------------------"); try { TabularData table = (TabularData) connection.getAttribute(new ObjectName("org.exist.management:type=LockManager"), "WaitingThreads"); for (Iterator i = table.values().iterator(); i.hasNext(); ) { CompositeData data = (CompositeData) i.next(); echo("Thread " + data.get("waitingThread")); echo(String.format("%20s: %s", "Lock type", data.get("lockType"))); echo(String.format("%20s: %s", "Lock mode", data.get("lockMode"))); echo(String.format("%20s: %s", "Lock id", data.get("id"))); echo(String.format("%20s: %s", "Held by", Arrays.toString((String[]) data.get("owner")))); String[] readers = (String[]) data.get("waitingForRead"); if (readers.length > 0) { echo(String.format("%20s: %s", "Wait for read", Arrays.toString(readers))); } String[] writers = (String[]) data.get("waitingForWrite"); if (writers.length > 0) { echo(String.format("%20s: %s", "Wait for write", Arrays.toString(writers))); } } } catch (MBeanException e) { error(e); } catch (AttributeNotFoundException e) { error(e); } catch (InstanceNotFoundException e) { error(e); } catch (ReflectionException e) { error(e); } catch (IOException e) { error(e); } catch (MalformedObjectNameException e) { error(e); } } public void sanityReport() { echo("\nSanity report"); echo("-----------------------------------------------"); try { ObjectName name = new ObjectName("org.exist.management." + instance + ".tasks:type=SanityReport"); String status = (String) connection.getAttribute(name, "Status"); Date lastCheckStart = (Date) connection.getAttribute(name, "LastCheckStart"); Date lastCheckEnd = (Date) connection.getAttribute(name, "LastCheckEnd"); echo(String.format("%22s: %s", "Status", status)); echo(String.format("%22s: %s", "Last check start", lastCheckStart)); echo(String.format("%22s: %s", "Last check end", lastCheckEnd)); if (lastCheckStart != null && lastCheckEnd != null) echo(String.format("%22s: %dms", "Check took", (lastCheckEnd.getTime() - lastCheckStart.getTime()))); TabularData table = (TabularData) connection.getAttribute(name, "Errors"); for (Iterator i = table.values().iterator(); i.hasNext(); ) { CompositeData data = (CompositeData) i.next(); echo(String.format("%22s: %s", "Error code", data.get("errcode"))); echo(String.format("%22s: %s", "Description", data.get("description"))); } } catch (MBeanException e) { e.printStackTrace(); } catch (AttributeNotFoundException e) { e.printStackTrace(); } catch (InstanceNotFoundException e) { e.printStackTrace(); } catch (ReflectionException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (MalformedObjectNameException e) { e.printStackTrace(); } } private Object[] getValues(AttributeList attribs) { Object[] v = new Object[attribs.size()]; for (int i = 0; i < attribs.size(); i++) { v[i] = ((Attribute)attribs.get(i)).getValue(); } return v; } private void echo(String msg) { System.out.println(msg); } private void error(Exception e) { System.err.println("ERROR: " + e.getMessage()); e.printStackTrace(); } private final static int HELP_OPT = 'h'; private final static int CACHE_OPT = 'c'; private final static int DB_OPT = 'd'; private final static int WAIT_OPT = 'w'; private final static int LOCK_OPT = 'l'; private final static int MEMORY_OPT = 'm'; private final static int PORT_OPT = 'p'; private final static int INSTANCE_OPT = 'i'; private final static int ADDRESS_OPT = 'a'; private final static int SANITY_OPT = 's'; private final static CLOptionDescriptor OPTIONS[] = new CLOptionDescriptor[] { new CLOptionDescriptor( "help", CLOptionDescriptor.ARGUMENT_DISALLOWED, HELP_OPT, "print help on command line options and exit." ), new CLOptionDescriptor( "cache", CLOptionDescriptor.ARGUMENT_DISALLOWED, CACHE_OPT, "displays server statistics on cache and memory usage." ), new CLOptionDescriptor( "db", CLOptionDescriptor.ARGUMENT_DISALLOWED, DB_OPT, "display general info about the db instance." ), new CLOptionDescriptor( "wait", CLOptionDescriptor.ARGUMENT_REQUIRED, WAIT_OPT, "while displaying server statistics: keep retrieving statistics, but wait the " + "specified number of seconds between calls." ), new CLOptionDescriptor( "locks", CLOptionDescriptor.ARGUMENT_DISALLOWED, LOCK_OPT, "lock manager: display locking information on all threads currently waiting for a lock on a resource " + "or collection. Useful to debug deadlocks. During normal operation, the list will usually be empty (means: no " + "blocked threads)." ), new CLOptionDescriptor( "memory", CLOptionDescriptor.ARGUMENT_DISALLOWED, MEMORY_OPT, "display info on free and total memory. Can be combined with other parameters." ), new CLOptionDescriptor( "port", CLOptionDescriptor.ARGUMENT_REQUIRED, PORT_OPT, "RMI port of the server"), new CLOptionDescriptor( "address", CLOptionDescriptor.ARGUMENT_REQUIRED, ADDRESS_OPT, "RMI address of the server"), new CLOptionDescriptor( "instance", CLOptionDescriptor.ARGUMENT_REQUIRED, INSTANCE_OPT, "the ID of the database instance to connect to"), new CLOptionDescriptor( "report", CLOptionDescriptor.ARGUMENT_DISALLOWED, SANITY_OPT, "retrieve sanity check report from the db") }; private final static int MODE_STATS = 0; private final static int MODE_LOCKS = 1; public static void main(String[] args) { CLArgsParser optParser = new CLArgsParser( args, OPTIONS ); if(optParser.getErrorString() != null) { System.err.println( "ERROR: " + optParser.getErrorString()); return; } String dbInstance = "exist"; long waitTime = 0; List opt = optParser.getArguments(); int size = opt.size(); CLOption option; int mode = -1; int port = 1099; String address = "localhost"; boolean displayMem = false; boolean displayInstance = false; boolean displayReport = false; for(int i = 0; i < size; i++) { option = (CLOption)opt.get(i); switch(option.getId()) { case HELP_OPT : System.out.println(CLUtil.describeOptions(OPTIONS).toString()); return; case WAIT_OPT : try { waitTime = Integer.parseInt( option.getArgument() ) * 1000; } catch( NumberFormatException e ) { System.err.println("option -w|--wait requires a numeric argument"); return; } break; case CACHE_OPT: mode = MODE_STATS; break; case LOCK_OPT : mode = MODE_LOCKS; break; case PORT_OPT : try { port = Integer.parseInt(option.getArgument()); } catch (NumberFormatException e) { System.err.println("option -p|--port requires a numeric argument"); return; } break; case ADDRESS_OPT : try { address = option.getArgument(); } catch (NumberFormatException e) { System.err.println("option -a|--address requires a numeric argument"); return; } break; case MEMORY_OPT : displayMem = true; break; case DB_OPT : displayInstance = true; break; case INSTANCE_OPT : dbInstance = option.getArgument(); break; case SANITY_OPT : displayReport = true; break; } } try { JMXClient stats = new JMXClient(dbInstance); stats.connect(address,port); stats.memoryStats(); while (true) { switch (mode) { case MODE_STATS : stats.cacheStats(); break; case MODE_LOCKS : stats.lockTable(); break; } if (displayInstance) stats.instanceStats(); if (displayMem) stats.memoryStats(); if (displayReport) stats.sanityReport(); if (waitTime > 0) { synchronized (stats) { try { stats.wait(waitTime); } catch (InterruptedException e) { System.err.println("INTERRUPTED: " + e.getMessage()); } } } else return; } } catch (IOException e) { e.printStackTrace(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.Expression; import org.apache.camel.NoSuchEndpointException; import org.apache.camel.RuntimeCamelException; import org.apache.camel.model.language.ExchangePropertyExpression; import org.apache.camel.model.language.HeaderExpression; import org.apache.camel.model.language.JoorExpression; import org.apache.camel.model.language.JsonPathExpression; import org.apache.camel.model.language.XPathExpression; import org.apache.camel.support.builder.Namespaces; import org.apache.camel.util.ObjectHelper; /** * Base class for implementation inheritance for different clauses in the * <a href="http://camel.apache.org/dsl.html">Java DSL</a> */ public abstract class BuilderSupport { private CamelContext context; private ErrorHandlerBuilder errorHandlerBuilder; protected BuilderSupport() { } protected BuilderSupport(CamelContext context) { this.context = context; } // Builder methods // ------------------------------------------------------------------------- /** * Returns a value builder for the given header */ public ValueBuilder header(String name) { Expression exp = new HeaderExpression(name); return new ValueBuilder(exp); } /** * Returns a value builder for the given exchange property */ public ValueBuilder exchangeProperty(String name) { Expression exp = new ExchangePropertyExpression(name); return new ValueBuilder(exp); } /** * Returns a predicate and value builder for the inbound body on an exchange */ public ValueBuilder body() { return Builder.body(); } /** * Returns a predicate and value builder for the inbound message body as a specific type */ public <T> ValueBuilder bodyAs(Class<T> type) { return Builder.bodyAs(type); } /** * Returns a value builder for the given system property */ public ValueBuilder systemProperty(String name) { return Builder.systemProperty(name); } /** * Returns a value builder for the given system property */ public ValueBuilder systemProperty(String name, String defaultValue) { return Builder.systemProperty(name, defaultValue); } /** * Returns a constant expression value builder */ public ValueBuilder constant(Object value) { return Builder.constant(value); } /** * Returns a JOOR expression value builder */ public ValueBuilder joor(String value) { JoorExpression exp = new JoorExpression(value); return new ValueBuilder(exp); } /** * Returns a JOOR expression value builder */ public ValueBuilder joor(String value, Class<?> resultType) { JoorExpression exp = new JoorExpression(value); exp.setResultType(resultType); return new ValueBuilder(exp); } /** * Returns a JSonPath expression value builder * * @param value The JSonPath expression * @param resultType The result type that the JSonPath expression will return. */ public ValueBuilder jsonpath(String value, Class<?> resultType) { JsonPathExpression exp = new JsonPathExpression(value); exp.setResultType(resultType); return new ValueBuilder(exp); } /** * Returns a simple expression value builder */ public SimpleBuilder simple(String value) { return SimpleBuilder.simple(value); } /** * Returns a simple expression value builder */ public SimpleBuilder simple(String value, Class<?> resultType) { return SimpleBuilder.simple(value, resultType); } /** * Returns a simple expression value builder, using String.format style */ public SimpleBuilder simpleF(String format, Object... values) { return SimpleBuilder.simpleF(format, values); } /** * Returns a simple expression value builder, using String.format style */ public SimpleBuilder simpleF(String format, Class<?> resultType, Object... values) { return SimpleBuilder.simpleF(format, resultType, values); } /** * Returns a xpath expression value builder * * @param value the XPath expression * @return the builder */ public ValueBuilder xpath(String value) { return xpath(value, null, null); } /** * Returns a xpath expression value builder * * @param value the XPath expression * @param resultType the result type that the XPath expression will return. * @return the builder */ public ValueBuilder xpath(String value, Class<?> resultType) { return xpath(value, resultType, null); } /** * Returns a xpath expression value builder * * @param value the XPath expression * @param namespaces namespace mappings * @return the builder */ public ValueBuilder xpath(String value, Namespaces namespaces) { return xpath(value, null, namespaces); } /** * Returns a xpath expression value builder * * @param value the XPath expression * @param resultType the result type that the XPath expression will return. * @param namespaces namespace mappings * @return the builder */ public ValueBuilder xpath(String value, Class<?> resultType, Namespaces namespaces) { // the value may contain property placeholders as it may be used // directly from Java DSL try { value = getContext().resolvePropertyPlaceholders(value); } catch (Exception e) { throw RuntimeCamelException.wrapRuntimeCamelException(e); } XPathExpression exp = new XPathExpression(value); exp.setResultType(resultType); if (namespaces != null) { exp.setNamespaces(namespaces.getNamespaces()); } return new ValueBuilder(exp); } /** * Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder * <p/> * This method accepts dual parameters. Either an bean instance or a reference to a bean (String). * * @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry * @return the builder */ public ValueBuilder method(Object beanOrBeanRef) { return method(beanOrBeanRef, null); } /** * Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder * <p/> * This method accepts dual parameters. Either an bean instance or a reference to a bean (String). * * @param beanOrBeanRef either an instanceof a bean or a reference to bean to lookup in the Registry * @param method name of method to invoke * @return the builder */ public ValueBuilder method(Object beanOrBeanRef, String method) { return Builder.bean(beanOrBeanRef, method); } /** * Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder * * @param beanType the Class of the bean which we want to invoke * @return the builder */ public ValueBuilder method(Class<?> beanType) { return Builder.bean(beanType); } /** * Returns a <a href="http://camel.apache.org/bean-language.html">method call expression</a> value builder * * @param beanType the Class of the bean which we want to invoke * @param method name of method to invoke * @return the builder */ public ValueBuilder method(Class<?> beanType, String method) { return Builder.bean(beanType, method); } /** * Returns an expression value builder that replaces all occurrences of the regular expression with the given * replacement */ public ValueBuilder regexReplaceAll(Expression content, String regex, String replacement) { return Builder.regexReplaceAll(content, regex, replacement); } /** * Returns an expression value builder that replaces all occurrences of the regular expression with the given * replacement */ public ValueBuilder regexReplaceAll(Expression content, String regex, Expression replacement) { return Builder.regexReplaceAll(content, regex, replacement); } /** * Returns a exception expression value builder */ public ValueBuilder exceptionMessage() { return Builder.exceptionMessage(); } /** * Resolves the given URI to an endpoint * * @param uri the uri to resolve * @throws NoSuchEndpointException if the endpoint URI could not be resolved * @return the endpoint */ public Endpoint endpoint(String uri) throws NoSuchEndpointException { ObjectHelper.notNull(uri, "uri"); Endpoint endpoint = getContext().getEndpoint(uri); if (endpoint == null) { throw new NoSuchEndpointException(uri); } return endpoint; } /** * Resolves the given URI to an endpoint of the specified type * * @param uri the uri to resolve * @param type the excepted type of the endpoint * @throws NoSuchEndpointException if the endpoint URI could not be resolved * @return the endpoint */ public <T extends Endpoint> T endpoint(String uri, Class<T> type) throws NoSuchEndpointException { ObjectHelper.notNull(uri, "uri"); T endpoint = getContext().getEndpoint(uri, type); if (endpoint == null) { throw new NoSuchEndpointException(uri); } return endpoint; } /** * Resolves the list of URIs into a list of {@link Endpoint} instances * * @param uris list of endpoints to resolve * @throws NoSuchEndpointException if an endpoint URI could not be resolved * @return list of endpoints */ public List<Endpoint> endpoints(String... uris) throws NoSuchEndpointException { List<Endpoint> endpoints = new ArrayList<>(); for (String uri : uris) { endpoints.add(endpoint(uri)); } return endpoints; } /** * Helper method to create a list of {@link Endpoint} instances * * @param endpoints endpoints * @return list of the given endpoints */ public List<Endpoint> endpoints(Endpoint... endpoints) { List<Endpoint> answer = new ArrayList<>(); answer.addAll(Arrays.asList(endpoints)); return answer; } /** * Creates a default <a href="http://camel.apache.org/error-handler.html">error handler</a>. * * @return the builder */ public DefaultErrorHandlerBuilder defaultErrorHandler() { return new DefaultErrorHandlerBuilder(); } /** * Creates a disabled <a href="http://camel.apache.org/error-handler.html">error handler</a> for removing the * default error handler * * @return the builder */ public NoErrorHandlerBuilder noErrorHandler() { return new NoErrorHandlerBuilder(); } /** * <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel EIP:</a> is a error handler for * handling messages that could not be delivered to it's intended destination. * * @param deadLetterUri uri to the dead letter endpoint storing dead messages * @return the builder */ public DeadLetterChannelBuilder deadLetterChannel(String deadLetterUri) { return deadLetterChannel(endpoint(deadLetterUri)); } /** * <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter Channel EIP:</a> is a error handler for * handling messages that could not be delivered to it's intended destination. * * @param deadLetterEndpoint dead letter endpoint storing dead messages * @return the builder */ public DeadLetterChannelBuilder deadLetterChannel(Endpoint deadLetterEndpoint) { return new DeadLetterChannelBuilder(deadLetterEndpoint); } // Properties // ------------------------------------------------------------------------- public CamelContext getContext() { return context; } public void setContext(CamelContext context) { ObjectHelper.notNull(context, "CamelContext", this); this.context = context; } public ErrorHandlerBuilder getErrorHandlerBuilder() { if (errorHandlerBuilder == null) { errorHandlerBuilder = createErrorHandlerBuilder(); } return errorHandlerBuilder; } protected ErrorHandlerBuilder createErrorHandlerBuilder() { return new DefaultErrorHandlerBuilder(); } /** * Sets the error handler to use with processors created by this builder */ public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) { this.errorHandlerBuilder = errorHandlerBuilder; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.store; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.gora.GoraTestDriver; import org.apache.gora.examples.generated.Employee; import org.apache.gora.examples.generated.WebPage; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * A base class for {@link DataStore} tests. This is just a convenience * class, which actually only uses {@link DataStoreTestUtil} methods to * run the tests. Not all test cases can extend this class (like TestHBaseStore), * so all test logic should reside in DataStoreTestUtil class. */ public abstract class DataStoreTestBase { public static final Logger log = LoggerFactory.getLogger(DataStoreTestBase.class); protected static GoraTestDriver testDriver; protected DataStore<String,Employee> employeeStore; protected DataStore<String,WebPage> webPageStore; /** junit annoyingly forces BeforeClass to be static, so this method * should be called from a static block */ protected static void setTestDriver(GoraTestDriver driver) { testDriver = driver; } private static boolean setUpClassCalled = false; @BeforeClass public static void setUpClass() throws Exception { if(testDriver != null && !setUpClassCalled) { log.info("setting up class"); testDriver.setUpClass(); setUpClassCalled = true; } } @AfterClass public static void tearDownClass() throws Exception { if(testDriver != null) { log.info("tearing down class"); testDriver.tearDownClass(); } } @Before public void setUp() throws Exception { //There is an issue in JUnit 4 tests in Eclipse where TestSqlStore static //methods are not called BEFORE setUpClass. I think this is a bug in //JUnitRunner in Eclipse. Below is a workaround for that problem. if(!setUpClassCalled) { setUpClass(); } log.info("setting up test"); if(testDriver != null) { employeeStore = testDriver.createDataStore(String.class, Employee.class); webPageStore = testDriver.createDataStore(String.class, WebPage.class); testDriver.setUp(); } } @After public void tearDown() throws Exception { log.info("tearing down test"); if(testDriver != null) { testDriver.tearDown(); } //employeeStore.close(); //webPageStore.close(); } @Test public void testNewInstance() throws Exception { log.info("test method: testNewInstance"); DataStoreTestUtil.testNewPersistent(employeeStore); } @Test public void testCreateSchema() throws Exception { log.info("test method: testCreateSchema"); DataStoreTestUtil.testCreateEmployeeSchema(employeeStore); assertSchemaExists("Employee"); } // Override this to assert that schema is created correctly public void assertSchemaExists(String schemaName) throws Exception { } @Test public void testAutoCreateSchema() throws Exception { log.info("test method: testAutoCreateSchema"); DataStoreTestUtil.testAutoCreateSchema(employeeStore); assertAutoCreateSchema(); } public void assertAutoCreateSchema() throws Exception { assertSchemaExists("Employee"); } @Test public void testTruncateSchema() throws Exception { log.info("test method: testTruncateSchema"); DataStoreTestUtil.testTruncateSchema(webPageStore); assertSchemaExists("WebPage"); } @Test public void testDeleteSchema() throws Exception { log.info("test method: testDeleteSchema"); DataStoreTestUtil.testDeleteSchema(webPageStore); } @Test public void testSchemaExists() throws Exception { log.info("test method: testSchemaExists"); DataStoreTestUtil.testSchemaExists(webPageStore); } @Test public void testPut() throws Exception { log.info("test method: testPut"); Employee employee = DataStoreTestUtil.testPutEmployee(employeeStore); assertPut(employee); } public void assertPut(Employee employee) throws IOException { } @Test public void testPutNested() throws Exception { log.info("test method: testPutNested"); DataStoreTestUtil.testPutNested(webPageStore); } @Test public void testPutArray() throws Exception { log.info("test method: testPutArray"); DataStoreTestUtil.testPutArray(webPageStore); assertPutArray(); } public void assertPutArray() throws IOException { } @Test public void testPutBytes() throws Exception { log.info("test method: testPutBytes"); byte[] contentBytes = DataStoreTestUtil.testPutBytes(webPageStore); assertPutBytes(contentBytes); } public void assertPutBytes(byte[] contentBytes) throws IOException { } @Test public void testPutMap() throws Exception { log.info("test method: testPutMap"); DataStoreTestUtil.testPutMap(webPageStore); assertPutMap(); } public void assertPutMap() throws IOException { } @Test public void testPutMixedMaps() throws Exception { log.info("Testing put of map objects with different union data types"); DataStoreTestUtil.testPutMixedMapTypes(webPageStore); } @Test public void testUpdate() throws Exception { log.info("test method: testUpdate"); DataStoreTestUtil.testUpdateEmployee(employeeStore); DataStoreTestUtil.testUpdateWebPagePutToArray(webPageStore); DataStoreTestUtil.testUpdateWebPagePutToNotNullableMap(webPageStore); DataStoreTestUtil.testUpdateWebPagePutToNullableMap(webPageStore); DataStoreTestUtil.testUpdateWebPageRemoveMapEntry(webPageStore); DataStoreTestUtil.testUpdateWebPageRemoveField(webPageStore); } public void testEmptyUpdate() throws Exception { DataStoreTestUtil.testEmptyUpdateEmployee(employeeStore); } @Test public void testExists() throws Exception { log.info("test method: testExists"); DataStoreTestUtil.testExistsEmployee(employeeStore); } @Test public void testBenchmarkExists() throws Exception { log.info("test method: testBenchmarkExists"); DataStoreTestUtil.testBenchmarkGetExists(employeeStore); } @Test public void testGet() throws Exception { log.info("test method: testGet"); DataStoreTestUtil.testGetEmployee(employeeStore); } @Test /** * Tests put and get a record with a nested recursive record * Employee with a boss (nested). * @throws IOException * @throws Exception */ public void testGetRecursive() throws Exception { log.info("test method: testGetRecursive") ; DataStoreTestUtil.testGetEmployeeRecursive(employeeStore) ; } @Test /** * Tests put and get a record with a double nested recursive record * Employee with a boss (nested). * @throws IOException * @throws Exception */ public void testGetDoubleRecursive() throws Exception { log.info("test method: testGetDoubleRecursive") ; DataStoreTestUtil.testGetEmployeeDoubleRecursive(employeeStore) ; } @Test /** * Tests put and get of an {@link org.apache.gora.examples.generated.Employee} * record with a nested {@link org.apache.gora.examples.generated.WegPage} record (not recursive) * the webpage of an Employee. * @throws IOException * @throws Exception */ public void testGetNested() throws Exception { log.info("test method: testGetNested") ; DataStoreTestUtil.testGetEmployeeNested(employeeStore) ; } @Test /** * Tests put and get a record with a 3 types union, and * having the value of the 3rd type. * @throws IOException * @throws Exception */ public void testGet3UnionField() throws Exception { log.info("test method: testGet3UnionField") ; DataStoreTestUtil.testGetEmployee3UnionField(employeeStore) ; } @Test public void testGetWithFields() throws Exception { log.info("test method: testGetWithFields"); DataStoreTestUtil.testGetEmployeeWithFields(employeeStore); } @Test public void testGetWebPage() throws Exception { log.info("test method: testGetWebPage"); DataStoreTestUtil.testGetWebPage(webPageStore); } @Test public void testGetWebPageDefaultFields() throws Exception { log.info("test method: testGetWebPageDefaultFields"); DataStoreTestUtil.testGetWebPageDefaultFields(webPageStore); } @Test public void testGetNonExisting() throws Exception { log.info("test method: testGetNonExisting"); DataStoreTestUtil.testGetEmployeeNonExisting(employeeStore); } @Test public void testQuery() throws Exception { log.info("test method: testQuery"); DataStoreTestUtil.testQueryWebPages(webPageStore); } @Test public void testQueryStartKey() throws Exception { log.info("test method: testQueryStartKey"); DataStoreTestUtil.testQueryWebPageStartKey(webPageStore); } @Test public void testQueryEndKey() throws Exception { log.info("test method: testQueryEndKey"); DataStoreTestUtil.testQueryWebPageEndKey(webPageStore); } @Test public void testQueryKeyRange() throws Exception { log.info("test method: testQueryKetRange"); DataStoreTestUtil.testQueryWebPageKeyRange(webPageStore); } @Test public void testQueryWebPageSingleKey() throws Exception { log.info("test method: testQueryWebPageSingleKey"); DataStoreTestUtil.testQueryWebPageSingleKey(webPageStore); } @Test public void testQueryWebPageSingleKeyDefaultFields() throws Exception { log.info("test method: testQuerySingleKeyDefaultFields"); DataStoreTestUtil.testQueryWebPageSingleKeyDefaultFields(webPageStore); } @Test public void testQueryWebPageQueryEmptyResults() throws Exception { log.info("test method: testQueryEmptyResults"); DataStoreTestUtil.testQueryWebPageEmptyResults(webPageStore); } @Test public void testDelete() throws Exception { log.info("test method: testDelete"); DataStoreTestUtil.testDelete(webPageStore); } @Test public void testDeleteByQuery() throws Exception { log.info("test method: testDeleteByQuery"); DataStoreTestUtil.testDeleteByQuery(webPageStore); } @Test public void testDeleteByQueryFields() throws Exception { log.info("test method: testQueryByQueryFields"); DataStoreTestUtil.testDeleteByQueryFields(webPageStore); } @Test public void testGetPartitions() throws Exception { log.info("test method: testGetPartitions"); DataStoreTestUtil.testGetPartitions(webPageStore); } @Test public void testResultSize() throws Exception { log.info("test method: testResultSize"); DataStoreTestUtil.testResultSizeWebPages(webPageStore); } @Test public void testResultSizeStartKey() throws Exception { log.info("test method: testResultSizeStartKey"); DataStoreTestUtil.testResultSizeWebPagesStartKey(webPageStore); } @Test public void testResultSizeEndKey() throws Exception { log.info("test method: testResultSizeEndKey"); DataStoreTestUtil.testResultSizeWebPagesEndKey(webPageStore); } @Test public void testResultSizeKeyRange() throws Exception { log.info("test method: testResultSizeKeyRange"); DataStoreTestUtil.testResultSizeWebPagesKeyRange(webPageStore); } @Test public void testResultSizeWithLimit() throws Exception { log.info("test method: testResultSizeWithLimit"); DataStoreTestUtil.testResultSizeWebPagesWithLimit(webPageStore); } @Test public void testResultSizeStartKeyWithLimit() throws Exception { log.info("test method: testResultSizeStartKeyWithLimit"); DataStoreTestUtil.testResultSizeWebPagesStartKeyWithLimit(webPageStore); } @Test public void testResultSizeEndKeyWithLimit() throws Exception { log.info("test method: testResultSizeEndKeyWithLimit"); DataStoreTestUtil.testResultSizeWebPagesEndKeyWithLimit(webPageStore); } @Test public void testResultSizeKeyRangeWithLimit() throws Exception { log.info("test method: testResultSizeKeyRangeWithLimit"); DataStoreTestUtil.testResultSizeWebPagesKeyRangeWithLimit(webPageStore); } @Test public void testObjectFieldValue() throws Exception { //GORA-538 log.info("test method: testObjectFieldValue"); DataStoreTestUtil.testObjectFieldValue(employeeStore); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/contactcenterinsights/v1/contact_center_insights.proto package com.google.cloud.contactcenterinsights.v1; /** * * * <pre> * The request to list analyses. * </pre> * * Protobuf type {@code google.cloud.contactcenterinsights.v1.ListAnalysesRequest} */ public final class ListAnalysesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.contactcenterinsights.v1.ListAnalysesRequest) ListAnalysesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListAnalysesRequest.newBuilder() to construct. private ListAnalysesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAnalysesRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAnalysesRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListAnalysesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); filter_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto .internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto .internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.class, com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * * * <pre> * The maximum number of analyses to return in the response. If this * value is zero, the service will select a default size. A call might return * fewer objects than requested. A non-empty `next_page_token` in the response * indicates that more data is available. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; private volatile java.lang.Object filter_; /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest)) { return super.equals(obj); } com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest other = (com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request to list analyses. * </pre> * * Protobuf type {@code google.cloud.contactcenterinsights.v1.ListAnalysesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.contactcenterinsights.v1.ListAnalysesRequest) com.google.cloud.contactcenterinsights.v1.ListAnalysesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto .internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto .internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.class, com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.Builder.class); } // Construct using com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto .internal_static_google_cloud_contactcenterinsights_v1_ListAnalysesRequest_descriptor; } @java.lang.Override public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest getDefaultInstanceForType() { return com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest build() { com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest buildPartial() { com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest result = new com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest(this); result.parent_ = parent_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; result.filter_ = filter_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) { return mergeFrom((com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest other) { if (other == com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The parent resource of the analyses. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of analyses to return in the response. If this * value is zero, the service will select a default size. A call might return * fewer objects than requested. A non-empty `next_page_token` in the response * indicates that more data is available. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of analyses to return in the response. If this * value is zero, the service will select a default size. A call might return * fewer objects than requested. A non-empty `next_page_token` in the response * indicates that more data is available. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * The maximum number of analyses to return in the response. If this * value is zero, the service will select a default size. A call might return * fewer objects than requested. A non-empty `next_page_token` in the response * indicates that more data is available. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * The value returned by the last `ListAnalysesResponse`; indicates * that this is a continuation of a prior `ListAnalyses` call and * the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); return this; } /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); onChanged(); return this; } /** * * * <pre> * A filter to reduce results to a specific subset. Useful for querying * conversations with specific properties. * </pre> * * <code>string filter = 4;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.contactcenterinsights.v1.ListAnalysesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.contactcenterinsights.v1.ListAnalysesRequest) private static final com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest(); } public static com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAnalysesRequest> PARSER = new com.google.protobuf.AbstractParser<ListAnalysesRequest>() { @java.lang.Override public ListAnalysesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListAnalysesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListAnalysesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAnalysesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.contactcenterinsights.v1.ListAnalysesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions; import com.intellij.lang.ASTNode; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PropertyUtilBase; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.SmartList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes; import org.jetbrains.plugins.groovy.lang.lexer.TokenSets; import org.jetbrains.plugins.groovy.lang.psi.GrReferenceElement; import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor; import org.jetbrains.plugins.groovy.lang.psi.GroovyFile; import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory; import org.jetbrains.plugins.groovy.lang.psi.api.GroovyMethodResult; import org.jetbrains.plugins.groovy.lang.psi.api.GroovyReference; import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult; import org.jetbrains.plugins.groovy.lang.psi.api.SpreadState; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrConstructorInvocation; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrMethodCall; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrParenthesizedExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.imports.GrImportStatement; import org.jetbrains.plugins.groovy.lang.psi.dataFlow.types.TypeInferenceHelper; import org.jetbrains.plugins.groovy.lang.psi.impl.GrReferenceElementImpl; import org.jetbrains.plugins.groovy.lang.psi.impl.GroovyTargetElementEvaluator; import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil; import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.literals.GrLiteralImpl; import org.jetbrains.plugins.groovy.lang.psi.typeEnhancers.GrReferenceTypeEnhancer; import org.jetbrains.plugins.groovy.lang.psi.util.*; import org.jetbrains.plugins.groovy.lang.resolve.DependentResolver; import org.jetbrains.plugins.groovy.lang.resolve.GroovyResolver; import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil; import org.jetbrains.plugins.groovy.lang.resolve.api.GroovyProperty; import org.jetbrains.plugins.groovy.lang.resolve.references.GrStaticExpressionReference; import org.jetbrains.plugins.groovy.lang.typing.GrTypeCalculator; import java.util.*; import static com.intellij.psi.util.PsiUtilCore.ensureValid; import static java.util.Collections.emptyList; import static org.jetbrains.plugins.groovy.lang.psi.GroovyTokenSets.REFERENCE_DOTS; import static org.jetbrains.plugins.groovy.lang.psi.util.GroovyLValueUtil.isLValue; import static org.jetbrains.plugins.groovy.lang.psi.util.GroovyLValueUtil.isRValue; import static org.jetbrains.plugins.groovy.lang.resolve.impl.IncompleteKt.resolveIncomplete; import static org.jetbrains.plugins.groovy.lang.typing.DefaultMethodCallTypeCalculatorKt.getTypeFromCandidate; /** * @author ilyas */ public class GrReferenceExpressionImpl extends GrReferenceElementImpl<GrExpression> implements GrReferenceExpression { private static final Logger LOG = Logger.getInstance(GrReferenceExpressionImpl.class); public GrReferenceExpressionImpl(@NotNull ASTNode node) { super(node); } private final GroovyReference myStaticReference = new GrStaticExpressionReference(this); private final GroovyReference myRValueReference = new GrRValueExpressionReference(this); private final GroovyReference myLValueReference = new GrLValueExpressionReference(this); @Override public void accept(@NotNull GroovyElementVisitor visitor) { visitor.visitReferenceExpression(this); } @Override @Nullable public PsiElement getReferenceNameElement() { return findChildByType(TokenSets.REFERENCE_NAMES); } @Override @Nullable public GrExpression getQualifier() { return getQualifierExpression(); } @Override @Nullable public String getReferenceName() { PsiElement nameElement = getReferenceNameElement(); if (nameElement != null) { IElementType nodeType = nameElement.getNode().getElementType(); if (TokenSets.STRING_LITERAL_SET.contains(nodeType)) { final Object value = GrLiteralImpl.getLiteralValue(nameElement); if (value instanceof String) { return (String)value; } } return nameElement.getText(); } return null; } @Override public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException { if (!PsiUtil.isValidReferenceName(newElementName)) { final PsiElement old = getReferenceNameElement(); if (old == null) throw new IncorrectOperationException("ref has no name element"); PsiElement element = GroovyPsiElementFactory.getInstance(getProject()).createStringLiteralForReference(newElementName); old.replace(element); return this; } if (PsiUtil.isThisOrSuperRef(this)) return this; final GroovyResolveResult result = advancedResolve(); if (result.isInvokedOnProperty()) { final String name = GroovyPropertyUtils.getPropertyNameByAccessorName(newElementName); if (name != null) { newElementName = name; } } return super.handleElementRename(newElementName); } @NotNull @Override protected GrReferenceElement<GrExpression> createQualifiedRef(@NotNull String qName) { return GroovyPsiElementFactory.getInstance(getProject()).createReferenceExpressionFromText(qName); } @Override public boolean isFullyQualified() { if (!hasMemberPointer() && !ResolveUtil.canResolveToMethod(this) && resolve() instanceof PsiPackage) return true; final GrExpression qualifier = getQualifier(); if (!(qualifier instanceof GrReferenceExpressionImpl)) return false; return ((GrReferenceExpressionImpl)qualifier).isFullyQualified(); } @Override public String toString() { return "Reference expression"; } @Override @Nullable public PsiType getNominalType() { return getNominalType(false); } @Nullable private PsiType getNominalType(boolean rValue) { final GroovyResolveResult resolveResult = PsiImplUtil.extractUniqueResult(lrResolve(rValue)); PsiElement resolved = resolveResult.getElement(); for (GrReferenceTypeEnhancer enhancer : GrReferenceTypeEnhancer.EP_NAME.getExtensions()) { PsiType type = enhancer.getReferenceType(this, resolved); if (type != null) { return type; } } PsiType result = getNominalTypeInner(resolveResult); if (result == null) return null; result = TypesUtil.substituteAndNormalizeType(result, resolveResult.getSubstitutor(), resolveResult.getSpreadState(), this); return result; } @Nullable private PsiType getNominalTypeInner(GroovyResolveResult result) { PsiElement resolved = result.getElement(); if (resolved instanceof GroovyProperty) { return ((GroovyProperty)resolved).getPropertyType(); } if (resolved == null && !"class".equals(getReferenceName())) { resolved = resolve(); } if (resolved instanceof PsiClass) { final PsiElementFactory factory = JavaPsiFacade.getInstance(getProject()).getElementFactory(); if (PsiUtil.isInstanceThisRef(this)) { final PsiClassType categoryType = GdkMethodUtil.getCategoryType((PsiClass)resolved); if (categoryType != null) { return categoryType; } else { return factory.createType((PsiClass)resolved); } } else if (PsiUtil.isSuperReference(this)) { PsiClass contextClass = PsiUtil.getContextClass(this); if (GrTraitUtil.isTrait(contextClass)) { PsiClassType[] extendsTypes = contextClass.getExtendsListTypes(); PsiClassType[] implementsTypes = contextClass.getImplementsListTypes(); PsiClassType[] superTypes = ArrayUtil.mergeArrays(implementsTypes, extendsTypes, PsiClassType.ARRAY_FACTORY); if (superTypes.length > 0) { return PsiIntersectionType.createIntersection(ArrayUtil.reverseArray(superTypes)); } } return factory.createType((PsiClass)resolved); } return TypesUtil.createJavaLangClassType(factory.createType((PsiClass)resolved), this); } if (resolved instanceof GrVariable) { return ((GrVariable)resolved).getDeclaredType(); } if (resolved instanceof PsiVariable) { return ((PsiVariable)resolved).getType(); } if (resolved instanceof PsiMethod) { PsiMethod method = (PsiMethod)resolved; if (PropertyUtilBase.isSimplePropertySetter(method) && !method.getName().equals(getReferenceName())) { return method.getParameterList().getParameters()[0].getType(); } if (result instanceof GroovyMethodResult) { return getTypeFromCandidate((GroovyMethodResult)result, this); } return PsiUtil.getSmartReturnType(method); } if (resolved == null) { if ("class".equals(getReferenceName())) { final PsiType fromClassRef = getTypeFromClassRef(); if (fromClassRef != null) { return fromClassRef; } } final PsiType fromSpreadOperator = getTypeFromSpreadOperator(this); if (fromSpreadOperator != null) { return fromSpreadOperator; } } return null; } @Nullable private static PsiType getTypeFromSpreadOperator(@NotNull GrReferenceExpressionImpl ref) { if (ref.getDotTokenType() == GroovyTokenTypes.mSPREAD_DOT) { return TypesUtil.createType(CommonClassNames.JAVA_UTIL_LIST, ref); } return null; } @Nullable private PsiType getTypeFromClassRef() { PsiType qualifierType = PsiImplUtil.getQualifierType(this); if (qualifierType == null && !CompileStaticUtil.isCompileStatic(this)) return null; return TypesUtil.createJavaLangClassType(qualifierType, this); } @Nullable private static PsiType calculateType(@NotNull GrReferenceExpressionImpl refExpr) { final Collection<? extends GroovyResolveResult> results = refExpr.lrResolve(true); final GroovyResolveResult result = PsiImplUtil.extractUniqueResult(results); final PsiElement resolved = result.getElement(); PsiType typeFromCalculators = GrTypeCalculator.getTypeFromCalculators(refExpr); if (typeFromCalculators != null) return typeFromCalculators; if (ResolveUtil.isClassReference(refExpr)) { GrExpression qualifier = refExpr.getQualifier(); LOG.assertTrue(qualifier != null); return qualifier.getType(); } final PsiType nominal = refExpr.getNominalType(true); final PsiType inferred = getInferredTypes(refExpr, resolved); if (inferred == null) { return nominal == null ? getDefaultType(refExpr, result) : nominal; } if (nominal == null) { if (inferred.equals(PsiType.NULL) && CompileStaticUtil.isCompileStatic(refExpr)) { return TypesUtil.getJavaLangObject(refExpr); } else { return inferred; } } if (!TypeConversionUtil.isAssignable(TypeConversionUtil.erasure(nominal), inferred, false)) { if (resolved instanceof GrVariable) { if (((GrVariable)resolved).getTypeElementGroovy() != null) { return nominal; } } else if (resolved instanceof PsiVariable) { return nominal; } } return inferred; } @Nullable private static PsiType getInferredTypes(@NotNull GrReferenceExpressionImpl refExpr, @Nullable PsiElement resolved) { final GrExpression qualifier = refExpr.getQualifier(); if (qualifier != null || resolved instanceof PsiClass || resolved instanceof PsiPackage || resolved instanceof PsiEnumConstant) { return null; } return TypeInferenceHelper.getCurrentContext().getVariableType(refExpr); } @Nullable private static PsiType getDefaultType(@NotNull GrReferenceExpression refExpr, @NotNull GroovyResolveResult result) { final PsiElement resolved = result.getElement(); if (resolved instanceof GrField) { ensureValid(resolved); if (CompileStaticUtil.isCompileStatic(refExpr)) { return TypesUtil.getJavaLangObject(refExpr); } else { return SpreadState.apply(((GrVariable)resolved).getTypeGroovy(), result.getSpreadState(), refExpr.getProject()); } } else if (resolved instanceof GrVariable) { ensureValid(resolved); PsiType typeGroovy = SpreadState.apply(((GrVariable)resolved).getTypeGroovy(), result.getSpreadState(), refExpr.getProject()); if (typeGroovy == null && CompileStaticUtil.isCompileStatic(refExpr)) { return TypesUtil.getJavaLangObject(refExpr); } else { return typeGroovy; } } else { return null; } } @Nullable @Override public PsiType getType() { return TypeInferenceHelper.getCurrentContext().getExpressionType(this, e -> calculateType(e)); } @Override public GrExpression replaceWithExpression(@NotNull GrExpression newExpr, boolean removeUnnecessaryParentheses) { return PsiImplUtil.replaceExpression(this, newExpr, removeUnnecessaryParentheses); } @Override @NotNull public String getCanonicalText() { return getRangeInElement().substring(getElement().getText()); } @Override public boolean hasAt() { return false; } @Override public boolean hasMemberPointer() { return false; } @NotNull @Override public GroovyReference getStaticReference() { return myStaticReference; } @Nullable private GroovyReference getCallReference() { PsiElement parent = getParent(); return parent instanceof GrMethodCall ? ((GrMethodCall)parent).getExplicitCallReference() : null; } @Nullable @Override public GroovyReference getRValueReference() { return isRValue(this) ? myRValueReference : null; } @Nullable @Override public GroovyReference getLValueReference() { return isLValue(this) ? myLValueReference : null; } @Override public boolean isReferenceTo(@NotNull PsiElement element) { GroovyResolveResult[] results = multiResolve(false); for (GroovyResolveResult result : results) { PsiElement baseTarget = result.getElement(); if (baseTarget == null) continue; if (getManager().areElementsEquivalent(element, baseTarget)) { return true; } PsiElement target = GroovyTargetElementEvaluator.correctSearchTargets(baseTarget); if (target != baseTarget && getManager().areElementsEquivalent(element, target)) { return true; } if (element instanceof PsiMethod && target instanceof PsiMethod) { PsiMethod[] superMethods = ((PsiMethod)target).findSuperMethods(false); if (Arrays.asList(superMethods).contains(element)) { return true; } } } return false; } @Override public boolean isSoft() { return false; } @Override @Nullable public GrExpression getQualifierExpression() { return findExpressionChild(this); } @Override @Nullable public PsiElement getDotToken() { return findChildByType(REFERENCE_DOTS); } @Override public void replaceDotToken(PsiElement newDot) { if (newDot == null) return; if (!TokenSets.DOTS.contains(newDot.getNode().getElementType())) return; final PsiElement oldDot = getDotToken(); if (oldDot == null) return; getNode().replaceChild(oldDot.getNode(), newDot.getNode()); } @Override @Nullable public IElementType getDotTokenType() { PsiElement dot = getDotToken(); return dot == null ? null : dot.getNode().getElementType(); } @Override public PsiReference getReference() { return this; } private static final GroovyResolver<GrReferenceExpressionImpl> RESOLVER = new DependentResolver<>() { @Nullable @Override public Collection<PsiPolyVariantReference> collectDependencies(@NotNull GrReferenceExpressionImpl expression) { final GrExpression qualifier = expression.getQualifier(); if (qualifier == null) return null; final List<PsiPolyVariantReference> result = new SmartList<>(); qualifier.accept(new PsiRecursiveElementWalkingVisitor() { @Override public void visitElement(@NotNull PsiElement element) { if (element instanceof GrReferenceExpression) { super.visitElement(element); } else if (element instanceof GrMethodCall) { super.visitElement(((GrMethodCall)element).getInvokedExpression()); } else if (element instanceof GrParenthesizedExpression) { GrExpression operand = ((GrParenthesizedExpression)element).getOperand(); if (operand != null) super.visitElement(operand); } } @Override protected void elementFinished(PsiElement element) { if (element instanceof GrReferenceExpression) { result.add(((GrReferenceExpression)element)); } } }); return result; } @NotNull @Override public Collection<GroovyResolveResult> doResolve(@NotNull GrReferenceExpressionImpl ref, boolean incomplete) { if (incomplete) { return resolveIncomplete(ref); } final GroovyReference rValueRef = ref.getRValueReference(); final GroovyReference lValueRef = ref.getLValueReference(); if (rValueRef != null && lValueRef != null) { // merge results from both references final Map<PsiElement, GroovyResolveResult> results = new HashMap<>(); for (GroovyResolveResult result : rValueRef.resolve(false)) { results.putIfAbsent(result.getElement(), result); } for (GroovyResolveResult result : lValueRef.resolve(false)) { results.putIfAbsent(result.getElement(), result); } return new SmartList<>(results.values()); } else if (rValueRef != null) { // r-value only return new SmartList<>(rValueRef.resolve(false)); } else if (lValueRef != null) { // l-value only return new SmartList<>(lValueRef.resolve(false)); } else { LOG.error("Reference expression has no references"); return emptyList(); } } }; @NotNull @Override public Collection<? extends GroovyResolveResult> resolve(boolean incomplete) { final PsiElement parent = getParent(); if (parent instanceof GrConstructorInvocation) { // Archaeology notice. // // GrConstructorInvocation only consists of 'this'/'super' keyword and argument list. // It has own fake reference, while this GrReferenceExpression exists so user may click on something, // i.e. this GrReferenceExpression provides text range for GrConstructorInvocation. // // GrConstructorInvocation might have had its own real reference with proper range, // but instead it returns this GrReferenceExpression as invoked one. return ((GrConstructorInvocation)parent).getConstructorReference().resolve(incomplete); } final Collection<? extends GroovyResolveResult> staticResults = getStaticReference().resolve(false); if (!staticResults.isEmpty()) { return staticResults; } final GroovyReference callReference = getCallReference(); if (callReference != null) { return callReference.resolve(incomplete); } return TypeInferenceHelper.getCurrentContext().resolve(this, incomplete, RESOLVER); } @NotNull protected Collection<? extends GroovyResolveResult> lrResolve(boolean rValue) { final Collection<? extends GroovyResolveResult> staticResults = getStaticReference().resolve(false); if (!staticResults.isEmpty()) { return staticResults; } final GroovyReference callReference = getCallReference(); if (callReference != null) { return callReference.resolve(false); } final GroovyReference ref = rValue ? getRValueReference() : getLValueReference(); return ref == null ? emptyList() : ref.resolve(false); } @Override public GrReferenceExpression bindToElementViaStaticImport(@NotNull PsiMember member) { if (getQualifier() != null) { throw new IncorrectOperationException("Reference has qualifier"); } if (StringUtil.isEmpty(getReferenceName())) { throw new IncorrectOperationException("Reference has empty name"); } PsiClass containingClass = member.getContainingClass(); if (containingClass == null) { throw new IncorrectOperationException("Member has no containing class"); } final PsiFile file = getContainingFile(); if (file instanceof GroovyFile) { GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(getProject()); String text = "import static " + containingClass.getQualifiedName() + "." + member.getName(); final GrImportStatement statement = factory.createImportStatementFromText(text); ((GroovyFile)file).addImport(statement); } return this; } @Override public boolean isImplicitCallReceiver() { // `a.&foo()` compiles into `new MethodClosure(a, "foo").call()` as if `call` was explicitly in the code // `a.@foo()` compiles into `a@.foo.call()` as if `call` was an explicitly in the code return hasAt() || hasMemberPointer() || myStaticReference.resolve() instanceof GrVariable; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.skywalking.oap.log.analyzer.dsl.spec.extractor; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import groovy.lang.Closure; import groovy.lang.DelegatesTo; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import lombok.experimental.Delegate; import org.apache.commons.lang3.StringUtils; import org.apache.skywalking.apm.network.common.v3.KeyStringValuePair; import org.apache.skywalking.apm.network.logging.v3.LogData; import org.apache.skywalking.apm.network.logging.v3.TraceContext; import org.apache.skywalking.oap.log.analyzer.dsl.spec.AbstractSpec; import org.apache.skywalking.oap.log.analyzer.provider.LogAnalyzerModuleConfig; import org.apache.skywalking.oap.meter.analyzer.MetricConvert; import org.apache.skywalking.oap.meter.analyzer.dsl.Sample; import org.apache.skywalking.oap.meter.analyzer.dsl.SampleFamily; import org.apache.skywalking.oap.meter.analyzer.dsl.SampleFamilyBuilder; import org.apache.skywalking.oap.server.core.CoreModule; import org.apache.skywalking.oap.server.core.analysis.meter.MeterSystem; import org.apache.skywalking.oap.server.library.module.ModuleManager; import org.apache.skywalking.oap.server.library.module.ModuleStartException; import org.apache.skywalking.oap.server.library.util.CollectionUtils; import static java.util.Objects.nonNull; import static org.apache.skywalking.oap.server.library.util.StringUtil.isNotBlank; public class ExtractorSpec extends AbstractSpec { private final List<MetricConvert> metricConverts; public ExtractorSpec(final ModuleManager moduleManager, final LogAnalyzerModuleConfig moduleConfig) throws ModuleStartException { super(moduleManager, moduleConfig); final MeterSystem meterSystem = moduleManager.find(CoreModule.NAME).provider().getService(MeterSystem.class); metricConverts = moduleConfig.malConfigs() .stream() .map(it -> new MetricConvert(it, meterSystem)) .collect(Collectors.toList()); } @SuppressWarnings("unused") public void service(final String service) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(service)) { BINDING.get().log().setService(service); } } @SuppressWarnings("unused") public void instance(final String instance) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(instance)) { BINDING.get().log().setServiceInstance(instance); } } @SuppressWarnings("unused") public void endpoint(final String endpoint) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(endpoint)) { BINDING.get().log().setEndpoint(endpoint); } } @SuppressWarnings("unused") public void tag(final Map<String, ?> kv) { if (BINDING.get().shouldAbort()) { return; } if (CollectionUtils.isEmpty(kv)) { return; } final LogData.Builder logData = BINDING.get().log(); logData.setTags( logData.getTags() .toBuilder() .addAllData( kv.entrySet() .stream() .filter(it -> isNotBlank(it.getKey())) .filter(it -> nonNull(it.getValue()) && isNotBlank(Objects.toString(it.getValue()))) .map(it -> { final Object val = it.getValue(); String valStr = Objects.toString(val); if (Collection.class.isAssignableFrom(val.getClass())) { valStr = Joiner.on(",").skipNulls().join((Collection<?>) val); } return KeyStringValuePair.newBuilder() .setKey(it.getKey()) .setValue(valStr) .build(); }) .collect(Collectors.toList()) ) ); BINDING.get().log(logData); } @SuppressWarnings("unused") public void traceId(final String traceId) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(traceId)) { final LogData.Builder logData = BINDING.get().log(); final TraceContext.Builder traceContext = logData.getTraceContext().toBuilder(); traceContext.setTraceId(traceId); logData.setTraceContext(traceContext); } } @SuppressWarnings("unused") public void segmentId(final String segmentId) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(segmentId)) { final LogData.Builder logData = BINDING.get().log(); final TraceContext.Builder traceContext = logData.getTraceContext().toBuilder(); traceContext.setTraceSegmentId(segmentId); logData.setTraceContext(traceContext); } } @SuppressWarnings("unused") public void spanId(final String spanId) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(spanId)) { final LogData.Builder logData = BINDING.get().log(); final TraceContext.Builder traceContext = logData.getTraceContext().toBuilder(); traceContext.setSpanId(Integer.parseInt(spanId)); logData.setTraceContext(traceContext); } } @SuppressWarnings("unused") public void timestamp(final String timestamp) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(timestamp) && StringUtils.isNumeric(timestamp)) { BINDING.get().log().setTimestamp(Long.parseLong(timestamp)); } } @SuppressWarnings("unused") public void layer(final String layer) { if (BINDING.get().shouldAbort()) { return; } if (nonNull(layer)) { final LogData.Builder logData = BINDING.get().log(); logData.setLayer(layer); } } @SuppressWarnings("unused") public void metrics(@DelegatesTo(SampleBuilder.class) final Closure<?> cl) { if (BINDING.get().shouldAbort()) { return; } final SampleBuilder builder = new SampleBuilder(); cl.setDelegate(builder); cl.call(); final Sample sample = builder.build(); final SampleFamily sampleFamily = SampleFamilyBuilder.newBuilder(sample).build(); final Optional<List<SampleFamily>> possibleMetricsContainer = BINDING.get().metricsContainer(); if (possibleMetricsContainer.isPresent()) { possibleMetricsContainer.get().add(sampleFamily); } else { metricConverts.forEach(it -> it.toMeter( ImmutableMap.<String, SampleFamily>builder() .put(sample.getName(), sampleFamily) .build() )); } } public static class SampleBuilder { @Delegate private final Sample.SampleBuilder sampleBuilder = Sample.builder(); @SuppressWarnings("unused") public Sample.SampleBuilder labels(final Map<String, ?> labels) { final Map<String, String> filtered = labels.entrySet() .stream() .filter(it -> isNotBlank(it.getKey()) && nonNull(it.getValue())) .collect( Collectors.toMap(Map.Entry::getKey, it -> Objects.toString(it.getValue())) ); return sampleBuilder.labels(ImmutableMap.copyOf(filtered)); } } }
/* * Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.org.apache.xalan.internal.utils; import jdk.xml.internal.JdkConstants; import jdk.xml.internal.SecuritySupport; /** * This is the base class for features and properties * * @LastModified: May 2021 */ public abstract class FeaturePropertyBase { /** * States of the settings of a property, in the order: default value, value * set by FEATURE_SECURE_PROCESSING, jaxp.properties file, jaxp system * properties, and jaxp api properties */ public static enum State { //this order reflects the overriding order DEFAULT, FSP, JAXPDOTPROPERTIES, SYSTEMPROPERTY, APIPROPERTY } /** * Values of the properties as defined in enum Properties */ String[] values = null; /** * States of the settings for each property in Properties above */ State[] states = {State.DEFAULT, State.DEFAULT}; /** * Set the value for a specific property. * * @param property the property * @param state the state of the property * @param value the value of the property */ public void setValue(Enum<?> property, State state, String value) { //only update if it shall override if (state.compareTo(states[property.ordinal()]) >= 0) { values[property.ordinal()] = value; states[property.ordinal()] = state; } } /** * Set the value of a property by its index * @param index the index of the property * @param state the state of the property * @param value the value of the property */ public void setValue(int index, State state, String value) { //only update if it shall override if (state.compareTo(states[index]) >= 0) { values[index] = value; states[index] = state; } } /** * Set value by property name and state * @param propertyName property name * @param state the state of the property * @param value the value of the property * @return true if the property is managed by the security property manager; * false if otherwise. */ public boolean setValue(String propertyName, State state, Object value) { int index = getIndex(propertyName); if (index > -1) { setValue(index, state, (String)value); return true; } return false; } /** * Set value by property name and state * @param propertyName property name * @param state the state of the property * @param value the value of the property * @return true if the property is managed by the security property manager; * false if otherwise. */ public boolean setValue(String propertyName, State state, boolean value) { int index = getIndex(propertyName); if (index > -1) { if (value) { setValue(index, state, JdkConstants.FEATURE_TRUE); } else { setValue(index, state, JdkConstants.FEATURE_FALSE); } return true; } return false; } /** * Return the value of the specified property * * @param property the property * @return the value of the property */ public String getValue(Enum<?> property) { return values[property.ordinal()]; } /** * Return the value of the specified property * * @param property the property * @return the value of the property */ public String getValue(String property) { int index = getIndex(property); if (index > -1) { return getValueByIndex(index); } return null; } /** * Return the value of the specified property. * * @param propertyName the property name * @return the value of the property as a string. If a property is managed * by this manager, its value shall not be null. */ public String getValueAsString(String propertyName) { int index = getIndex(propertyName); if (index > -1) { return getValueByIndex(index); } return null; } /** * Return the value of a property by its ordinal * @param index the index of a property * @return value of a property */ public String getValueByIndex(int index) { return values[index]; } /** * Get the index by property name * @param propertyName property name * @return the index of the property if found; return -1 if not */ public abstract int getIndex(String propertyName); public <E extends Enum<E>> int getIndex(Class<E> property, String propertyName) { for (Enum<E> enumItem : property.getEnumConstants()) { if (enumItem.toString().equals(propertyName)) { //internally, ordinal is used as index return enumItem.ordinal(); } } return -1; }; /** * Read from system properties, or those in jaxp.properties * * @param property the property * @param systemProperty the name of the system property */ void getSystemProperty(Enum<?> property, String systemProperty) { try { String value = SecuritySupport.getSystemProperty(systemProperty); if (value != null) { values[property.ordinal()] = value; states[property.ordinal()] = State.SYSTEMPROPERTY; return; } value = SecuritySupport.readJAXPProperty(systemProperty); if (value != null) { values[property.ordinal()] = value; states[property.ordinal()] = State.JAXPDOTPROPERTIES; } } catch (NumberFormatException e) { //invalid setting ignored } } }
package org.domeos.framework.api.service.loadBalancer.impl; import org.domeos.basemodel.ResultStat; import org.domeos.framework.api.biz.OperationHistory; import org.domeos.framework.api.biz.collection.CollectionBiz; import org.domeos.framework.api.biz.loadBalancer.LoadBalancerCollectionBiz; import org.domeos.framework.api.consolemodel.loadBalancer.LoadBalancerCollectionDraft; import org.domeos.framework.api.consolemodel.loadBalancer.LoadBalancerCollectionInfo; import org.domeos.framework.api.controller.exception.ApiException; import org.domeos.framework.api.model.auth.related.Role; import org.domeos.framework.api.model.collection.CollectionAuthorityMap; import org.domeos.framework.api.model.collection.CollectionResourceMap; import org.domeos.framework.api.model.collection.related.ResourceType; import org.domeos.framework.api.model.loadBalancer.LoadBalancerCollection; import org.domeos.framework.api.model.operation.OperationRecord; import org.domeos.framework.api.model.operation.OperationType; import org.domeos.framework.api.service.loadBalancer.LoadBalancerCollectionService; import org.domeos.framework.engine.AuthUtil; import org.domeos.global.ClientConfigure; import org.domeos.global.CurrentThreadInfo; import java.util.*; import java.util.concurrent.Callable; import org.domeos.util.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Created by jackfan on 17/2/27. */ @Service public class LoadBalancerCollectionServiceImpl implements LoadBalancerCollectionService { @Autowired LoadBalancerCollectionBiz lbcBiz; @Autowired CollectionBiz collectionBiz; @Autowired OperationHistory operationHistory; private final ResourceType resourceType = ResourceType.LOADBALANCER_COLLECTION; @Override public LoadBalancerCollectionDraft createLoadBalancerCollection(LoadBalancerCollectionDraft lbcDraft) { if (lbcDraft == null) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_NOT_LEGAL, "loadBalancerCollection is null"); } String error = lbcDraft.checkLegality(); if (!StringUtils.isBlank(error)) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_NOT_LEGAL, error); } List<LoadBalancerCollection> lbcOlds = lbcBiz.getLoadBalancerCollection(lbcDraft.getName()); if (lbcOlds != null) { for (LoadBalancerCollection lbc : lbcOlds) { if (lbcDraft.getType() == lbc.getType()) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_EXIST, "loadBalancerCollection name have been exist."); } } } LoadBalancerCollection lbc = lbcDraft.toLoadBalancerConnection(); lbc.setCreatorId(CurrentThreadInfo.getUserId()); lbcBiz.createLoadBalancerCollection(lbc); lbcDraft.setId(lbc.getId()); CollectionAuthorityMap collectionAuthorityMap = new CollectionAuthorityMap(lbc.getId(), resourceType, CurrentThreadInfo.getUserId(), Role.MASTER, System.currentTimeMillis() ); collectionBiz.addAuthority(collectionAuthorityMap); operationHistory.insertRecord(new OperationRecord( lbc.getId(), resourceType, OperationType.SET, CurrentThreadInfo.getUserId(), CurrentThreadInfo.getUserName(), "OK", "", System.currentTimeMillis() )); return lbcDraft; } @Override public void deleteLoadBalancerCollection(int lbcId) { int userId = CurrentThreadInfo.getUserId(); AuthUtil.collectionVerify(userId, lbcId, resourceType, OperationType.DELETE, -1); List<CollectionResourceMap> collectionResourceMaps = collectionBiz.getResourcesByCollectionIdAndResourceType( lbcId, ResourceType.LOADBALANCER); if (collectionResourceMaps != null && collectionResourceMaps.size() > 0) { throw ApiException.wrapMessage(ResultStat.CANNOT_DELETE_LOADBALANCER_COLLECTION, "You cannot delete a loadBalancer collection" + " with loadBalancer exists"); } lbcBiz.deleteLoadBalancerCollection(lbcId); collectionBiz.deleteAuthoritiesByCollectionIdAndResourceType(lbcId, resourceType); operationHistory.insertRecord(new OperationRecord( lbcId, resourceType, OperationType.DELETE, CurrentThreadInfo.getUserId(), CurrentThreadInfo.getUserName(), "OK", "", System.currentTimeMillis() )); } @Override public LoadBalancerCollectionDraft updateLoadBalancerCollection(LoadBalancerCollectionDraft lbcDraft) { if (lbcDraft == null) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_NOT_LEGAL, "loadBalancerCollection is null"); } String error = lbcDraft.checkLegality(); if (!StringUtils.isBlank(error)) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_NOT_LEGAL, error); } int userId = CurrentThreadInfo.getUserId(); AuthUtil.collectionVerify(userId, lbcDraft.getId(), resourceType, OperationType.MODIFY, -1); LoadBalancerCollection lbcOld = lbcBiz.getLoadBalancerCollection(lbcDraft.getId()); if (lbcOld == null) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_NOT_EXIST, "loadBalancer collection is not exist"); } List<LoadBalancerCollection> lbcOlds = lbcBiz.getLoadBalancerCollection(lbcDraft.getName()); if (lbcOlds != null) { for (LoadBalancerCollection lbc : lbcOlds) { if (lbcDraft.getType() == lbc.getType() && lbcDraft.getId() != lbc.getId()) { throw ApiException.wrapMessage(ResultStat.LOADBALANCER_COLLECTION_EXIST, "loadBalancerCollection name have been exist."); } } } if (lbcOld.getType() != lbcDraft.getType()) { List<CollectionResourceMap> collectionResourceMaps = collectionBiz.getResourcesByCollectionIdAndResourceType( lbcDraft.getId(), ResourceType.LOADBALANCER); if (collectionResourceMaps != null && collectionResourceMaps.size() > 0) { throw ApiException.wrapMessage(ResultStat.CANNOT_UPDATE_LOADBALANCER_COLLECTION, "You cannot update loadBalancerCollection type " + "with loadBalancer exists"); } lbcOld.setType(lbcDraft.getType()); } lbcOld.setName(lbcDraft.getName()); lbcOld.setDescription(lbcDraft.getDescription()); lbcBiz.updateLoadBalancerCollection(lbcOld); operationHistory.insertRecord(new OperationRecord( lbcDraft.getId(), resourceType, OperationType.MODIFY, CurrentThreadInfo.getUserId(), CurrentThreadInfo.getUserName(), "OK", "", System.currentTimeMillis() )); return lbcDraft; } @Override public List<LoadBalancerCollectionInfo> listLoadBalancerCollection() { int userId = CurrentThreadInfo.getUserId(); List<CollectionAuthorityMap> authorityMaps = AuthUtil.getCollectionList(userId, resourceType); if (authorityMaps == null) { return null; } List<GetLoadBalancerCollectionInfoTask> lbcTasks = new LinkedList<GetLoadBalancerCollectionInfoTask>(); Boolean isAdmin = AuthUtil.isAdmin(userId); for (CollectionAuthorityMap authorityMap : authorityMaps) { lbcTasks.add(new GetLoadBalancerCollectionInfoTask(authorityMap, isAdmin)); } List<LoadBalancerCollectionInfo> lbcInfos = ClientConfigure.executeCompletionService(lbcTasks); Collections.sort(lbcInfos, new LoadBalancerCollectionInfo.LoadBalancerCollectionComparator()); return lbcInfos; } private class GetLoadBalancerCollectionInfoTask implements Callable<LoadBalancerCollectionInfo> { Boolean isAdmin; CollectionAuthorityMap authorityMap; private GetLoadBalancerCollectionInfoTask(CollectionAuthorityMap authorityMap, boolean isAdmin) { this.isAdmin = isAdmin; this.authorityMap = authorityMap; } @Override public LoadBalancerCollectionInfo call() throws Exception { LoadBalancerCollection lbc = lbcBiz.getLoadBalancerCollection(authorityMap.getCollectionId()); LoadBalancerCollectionInfo lbcInfo = new LoadBalancerCollectionInfo(lbc); String userName = AuthUtil.getUserNameById(lbcInfo.getCreatorId()); lbcInfo.setCreatorName(userName); List<CollectionResourceMap> resourceMaps = collectionBiz. getResourcesByCollectionIdAndResourceType(lbc.getId(), ResourceType.LOADBALANCER); if (resourceMaps == null) { lbcInfo.setLoadBalancerCount(0); } else { lbcInfo.setLoadBalancerCount(resourceMaps.size()); } List<CollectionAuthorityMap> authorityMaps = collectionBiz.getAuthoritiesByCollectionIdAndResourceType(lbc.getId(), resourceType); if (authorityMaps == null) { lbcInfo.setMemberCount(0); } else { lbcInfo.setMemberCount(authorityMaps.size()); } if (isAdmin) { lbcInfo.setRole(Role.MASTER); } else { lbcInfo.setRole(authorityMap.getRole()); } return lbcInfo; } } @Override public LoadBalancerCollectionInfo getLoadBalancerCollection(int lbcId) { int userId = CurrentThreadInfo.getUserId(); AuthUtil.collectionVerify(userId, lbcId, resourceType, OperationType.GET, -1); LoadBalancerCollection lbcOld = lbcBiz.getLoadBalancerCollection(lbcId); if (lbcOld == null) { throw ApiException.wrapResultStat(ResultStat.LOADBALANCER_COLLECTION_NOT_EXIST); } LoadBalancerCollectionInfo lbcInfo = new LoadBalancerCollectionInfo(lbcOld); String userName = AuthUtil.getUserNameById(lbcInfo.getCreatorId()); lbcInfo.setCreatorName(userName); return lbcInfo; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.io; import java.io.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; public final class WritableUtils { public static final int INT_LENGTH_BYTES = 4; public static byte[] readCompressedByteArray(DataInput in) throws IOException { int length = in.readInt(); if (length == -1) return null; byte[] buffer = new byte[length]; in.readFully(buffer); // could/should use readFully(buffer,0,length)? GZIPInputStream gzi = new GZIPInputStream(new ByteArrayInputStream(buffer, 0, buffer.length)); byte[] outbuf = new byte[length]; ByteArrayOutputStream bos = new ByteArrayOutputStream(); int len; while((len=gzi.read(outbuf, 0, outbuf.length)) != -1){ bos.write(outbuf, 0, len); } byte[] decompressed = bos.toByteArray(); bos.close(); gzi.close(); return decompressed; } public static void skipCompressedByteArray(DataInput in) throws IOException { int length = in.readInt(); if (length != -1) { skipFully(in, length); } } public static int writeCompressedByteArray(DataOutput out, byte[] bytes) throws IOException { if (bytes != null) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gzout = new GZIPOutputStream(bos); gzout.write(bytes, 0, bytes.length); gzout.close(); byte[] buffer = bos.toByteArray(); int len = buffer.length; out.writeInt(len); out.write(buffer, 0, len); /* debug only! Once we have confidence, can lose this. */ return ((bytes.length != 0) ? (100*buffer.length)/bytes.length : 0); } else { out.writeInt(-1); return -1; } } /* Ugly utility, maybe someone else can do this better */ public static String readCompressedString(DataInput in) throws IOException { byte[] bytes = readCompressedByteArray(in); if (bytes == null) return null; return new String(bytes, "UTF-8"); } public static int writeCompressedString(DataOutput out, String s) throws IOException { return writeCompressedByteArray(out, (s != null) ? s.getBytes("UTF-8") : null); } /* * * Write a String as a Network Int n, followed by n Bytes * Alternative to 16 bit read/writeUTF. * Encoding standard is... ? * */ public static void writeString(DataOutput out, String s) throws IOException { if (s != null) { byte[] buffer = s.getBytes("UTF-8"); int len = buffer.length; out.writeInt(len); out.write(buffer, 0, len); } else { out.writeInt(-1); } } /* * Read a String as a Network Int n, followed by n Bytes * Alternative to 16 bit read/writeUTF. * Encoding standard is... ? * */ public static String readString(DataInput in) throws IOException{ int length = in.readInt(); if (length == -1) return null; byte[] buffer = new byte[length]; in.readFully(buffer); // could/should use readFully(buffer,0,length)? return new String(buffer,"UTF-8"); } /* * Write a String array as a Nework Int N, followed by Int N Byte Array Strings. * Could be generalised using introspection. * */ public static void writeStringArray(DataOutput out, String[] s) throws IOException{ out.writeInt(s.length); for(int i = 0; i < s.length; i++) { writeString(out, s[i]); } } /* * Write a String array as a Nework Int N, followed by Int N Byte Array of * compressed Strings. Handles also null arrays and null values. * Could be generalised using introspection. * */ public static void writeCompressedStringArray(DataOutput out, String[] s) throws IOException{ if (s == null) { out.writeInt(-1); return; } out.writeInt(s.length); for(int i = 0; i < s.length; i++) { writeCompressedString(out, s[i]); } } /* * Write a String array as a Nework Int N, followed by Int N Byte Array Strings. * Could be generalised using introspection. Actually this bit couldn't... * */ public static String[] readStringArray(DataInput in) throws IOException { int len = in.readInt(); if (len == -1) return null; String[] s = new String[len]; for(int i = 0; i < len; i++) { s[i] = readString(in); } return s; } /* * Write a String array as a Nework Int N, followed by Int N Byte Array Strings. * Could be generalised using introspection. Handles null arrays and null values. * */ public static String[] readCompressedStringArray(DataInput in) throws IOException { int len = in.readInt(); if (len == -1) return null; String[] s = new String[len]; for(int i = 0; i < len; i++) { s[i] = readCompressedString(in); } return s; } /* * * Test Utility Method Display Byte Array. * */ public static void displayByteArray(byte[] record){ int i; for(i=0;i < record.length -1; i++){ if (i % 16 == 0) { System.out.println(); } System.out.print(Integer.toHexString(record[i] >> 4 & 0x0F)); System.out.print(Integer.toHexString(record[i] & 0x0F)); System.out.print(","); } System.out.print(Integer.toHexString(record[i] >> 4 & 0x0F)); System.out.print(Integer.toHexString(record[i] & 0x0F)); System.out.println(); } /** * Make a copy of a writable object using serialization to a buffer. * @param orig The object to copy * @return The copied object */ public static <T extends Writable> T clone(T orig, Configuration conf) { try { @SuppressWarnings("unchecked") // Unchecked cast from Class to Class<T> T newInst = ReflectionUtils.newInstance((Class<T>) orig.getClass(), conf); ReflectionUtils.copy(conf, orig, newInst); return newInst; } catch (IOException e) { throw new RuntimeException("Error writing/reading clone buffer", e); } } /** * Make a copy of the writable object using serialiation to a buffer * @param dst the object to copy from * @param src the object to copy into, which is destroyed * @throws IOException * @deprecated use ReflectionUtils.cloneInto instead. */ @Deprecated public static void cloneInto(Writable dst, Writable src) throws IOException { ReflectionUtils.cloneWritableInto(dst, src); } /** * Serializes an integer to a binary stream with zero-compressed encoding. * For -120 <= i <= 127, only one byte is used with the actual value. * For other values of i, the first byte value indicates whether the * integer is positive or negative, and the number of bytes that follow. * If the first byte value v is between -121 and -124, the following integer * is positive, with number of bytes that follow are -(v+120). * If the first byte value v is between -125 and -128, the following integer * is negative, with number of bytes that follow are -(v+124). Bytes are * stored in the high-non-zero-byte-first order. * * @param stream Binary output stream * @param i Integer to be serialized * @throws java.io.IOException */ public static void writeVInt(DataOutput stream, int i) throws IOException { writeVLong(stream, i); } /** * Serializes a long to a binary stream with zero-compressed encoding. * For -112 <= i <= 127, only one byte is used with the actual value. * For other values of i, the first byte value indicates whether the * long is positive or negative, and the number of bytes that follow. * If the first byte value v is between -113 and -120, the following long * is positive, with number of bytes that follow are -(v+112). * If the first byte value v is between -121 and -128, the following long * is negative, with number of bytes that follow are -(v+120). Bytes are * stored in the high-non-zero-byte-first order. * * @param stream Binary output stream * @param i Long to be serialized * @throws java.io.IOException */ public static void writeVLong(DataOutput stream, long i) throws IOException { if (i >= -112 && i <= 127) { stream.writeByte((byte)i); return; } int len = -112; if (i < 0) { i ^= -1L; // take one's complement' len = -120; } long tmp = i; while (tmp != 0) { tmp = tmp >> 8; len--; } stream.writeByte((byte)len); len = (len < -120) ? -(len + 120) : -(len + 112); for (int idx = len; idx != 0; idx--) { int shiftbits = (idx - 1) * 8; long mask = 0xFFL << shiftbits; stream.writeByte((byte)((i & mask) >> shiftbits)); } } /** * Reads a zero-compressed encoded long from input stream and returns it. * @param stream Binary input stream * @throws java.io.IOException * @return deserialized long from stream. */ public static long readVLong(DataInput stream) throws IOException { byte firstByte = stream.readByte(); int len = decodeVIntSize(firstByte); if (len == 1) { return firstByte; } long i = 0; for (int idx = 0; idx < len-1; idx++) { byte b = stream.readByte(); i = i << 8; i = i | (b & 0xFF); } return (isNegativeVInt(firstByte) ? (i ^ -1L) : i); } /** * Reads a zero-compressed encoded integer from input stream and returns it. * @param stream Binary input stream * @throws java.io.IOException * @return deserialized integer from stream. */ public static int readVInt(DataInput stream) throws IOException { return (int) readVLong(stream); } /** * Given the first byte of a vint/vlong, determine the sign * @param value the first byte * @return is the value negative */ public static boolean isNegativeVInt(byte value) { return value < -120 || (value >= -112 && value < 0); } /** * Parse the first byte of a vint/vlong to determine the number of bytes * @param value the first byte of the vint/vlong * @return the total number of bytes (1 to 9) */ public static int decodeVIntSize(byte value) { if (value >= -112) { return 1; } else if (value < -120) { return -119 - value; } return -111 - value; } /** * Get the encoded length if an integer is stored in a variable-length format * @return the encoded length */ public static int getVIntSize(long i) { if (i >= -112 && i <= 127) { return 1; } if (i < 0) { i ^= -1L; // take one's complement' } // find the number of bytes with non-leading zeros int dataBits = Long.SIZE - Long.numberOfLeadingZeros(i); // find the number of data bytes + length byte return (dataBits + 7) / 8 + 1; } /** * Read an Enum value from DataInput, Enums are read and written * using String values. * @param <T> Enum type * @param in DataInput to read from * @param enumType Class type of Enum * @return Enum represented by String read from DataInput * @throws IOException */ public static <T extends Enum<T>> T readEnum(DataInput in, Class<T> enumType) throws IOException{ return T.valueOf(enumType, Text.readStringOpt(in)); } /** * writes String value of enum to DataOutput. * @param out Dataoutput stream * @param enumVal enum value * @throws IOException */ public static void writeEnum(DataOutput out, Enum<?> enumVal) throws IOException{ Text.writeStringOpt(out, enumVal.name()); } /** * Skip <i>len</i> number of bytes in input stream<i>in</i> * @param in input stream * @param len number of bytes to skip * @throws IOException when skipped less number of bytes */ public static void skipFully(DataInput in, int len) throws IOException { int total = 0; int cur = 0; while ((total<len) && ((cur = in.skipBytes(len-total)) > 0)) { total += cur; } if (total<len) { throw new IOException("Not able to skip " + len + " bytes, possibly " + "due to end of input."); } } /** Convert writables to a byte array */ public static byte[] toByteArray(Writable... writables) { final DataOutputBuffer out = new DataOutputBuffer(); try { for(Writable w : writables) { w.write(out); } out.close(); } catch (IOException e) { throw new RuntimeException("Fail to convert writables to a byte array",e); } return out.getData(); } /** * Read a string, but check it for sanity. The format consists of a vint * followed by the given number of bytes. * @param in the stream to read from * @param maxLength the largest acceptable length of the encoded string * @return the bytes as a string * @throws IOException if reading from the DataInput fails * @throws IllegalArgumentException if the encoded byte size for string * is negative or larger than maxSize. Only the vint is read. */ public static String readStringSafely( DataInput in, int maxLength) throws IOException, IllegalArgumentException { int length = readVInt(in); if (length < 0 || length > maxLength) { throw new IllegalArgumentException( "Encoded byte size for String was " + length + ", which is outside of 0.." + maxLength + " range."); } byte [] bytes = new byte[length]; in.readFully(bytes, 0, length); return Text.decode(bytes); } }
/** * Copyright 2004-2005 Sun Microsystems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.acre.visualizer.ui.pdmcomposer; import org.acre.common.AcreStringUtil; import org.acre.dao.PDMValidator; import org.acre.visualizer.ui.Main; import javax.swing.DefaultComboBoxModel; import javax.swing.JDialog; /** * * @author Administrator */ public class PDMRoleArgumentForm extends javax.swing.JPanel { /** Creates new form PDMRoleArgumentForm */ public PDMRoleArgumentForm() { initComponents(); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ private void initComponents() {//GEN-BEGIN:initComponents java.awt.GridBagConstraints gridBagConstraints; sequenceLabel = new javax.swing.JLabel(); sequenceTF = new javax.swing.JTextField(); typeLabel = new javax.swing.JLabel(); typeCB = new javax.swing.JComboBox(); nameLabel = new javax.swing.JLabel(); nameTF = new javax.swing.JTextField(); valueLabel = new javax.swing.JLabel(); valueTF = new javax.swing.JTextField(); buttonPanel = new javax.swing.JPanel(); saveButton = new javax.swing.JButton(); cancelButton = new javax.swing.JButton(); setLayout(new java.awt.GridBagLayout()); setPreferredSize(new java.awt.Dimension(300, 175)); sequenceLabel.setText("Sequence"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.insets = new java.awt.Insets(0, 0, 3, 5); add(sequenceLabel, gridBagConstraints); sequenceTF.setColumns(3); sequenceTF.setEditable(false); sequenceTF.setToolTipText("Sequence number for this argument"); sequenceTF.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { sequenceTFActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(0, 5, 3, 0); add(sequenceTF, gridBagConstraints); typeLabel.setText("Argument Type"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.insets = new java.awt.Insets(0, 0, 3, 5); add(typeLabel, gridBagConstraints); initTypeCB(); typeCB.setToolTipText("Type of Argument"); typeCB.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { typeCBActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(0, 5, 3, 0); add(typeCB, gridBagConstraints); nameLabel.setText("Argument Name"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.insets = new java.awt.Insets(0, 0, 3, 5); add(nameLabel, gridBagConstraints); nameTF.setColumns(15); nameTF.setToolTipText("Name for the Argument"); nameTF.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { nameTFActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 2; gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(0, 5, 3, 0); add(nameTF, gridBagConstraints); valueLabel.setText("Argument Value"); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST; gridBagConstraints.insets = new java.awt.Insets(0, 0, 3, 5); add(valueLabel, gridBagConstraints); valueTF.setColumns(15); valueTF.setToolTipText("Value for this argument"); valueTF.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { valueTFActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 3; gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(0, 5, 3, 0); add(valueTF, gridBagConstraints); saveButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/acre/visualizer/ui/icons/buttons/SaveButton.gif"))); saveButton.setMnemonic('S'); saveButton.setToolTipText("Save this argument"); saveButton.setBorderPainted(false); saveButton.setMargin(new java.awt.Insets(0, 0, 0, 0)); saveButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { saveButtonActionPerformed(evt); } }); buttonPanel.add(saveButton); cancelButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/acre/visualizer/ui/icons/buttons/CancelButton.gif"))); cancelButton.setMnemonic('C'); cancelButton.setToolTipText("Cancel this window"); cancelButton.setBorderPainted(false); cancelButton.setMargin(new java.awt.Insets(0, 0, 0, 0)); cancelButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { cancelButtonActionPerformed(evt); } }); buttonPanel.add(cancelButton); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 4; gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER; gridBagConstraints.gridheight = java.awt.GridBagConstraints.REMAINDER; gridBagConstraints.insets = new java.awt.Insets(5, 0, 0, 0); add(buttonPanel, gridBagConstraints); }//GEN-END:initComponents private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed cancel(); }//GEN-LAST:event_cancelButtonActionPerformed private void saveButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_saveButtonActionPerformed save(); }//GEN-LAST:event_saveButtonActionPerformed private void valueTFActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_valueTFActionPerformed // TODO add your handling code here: }//GEN-LAST:event_valueTFActionPerformed private void nameTFActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nameTFActionPerformed // TODO add your handling code here: }//GEN-LAST:event_nameTFActionPerformed private void sequenceTFActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_sequenceTFActionPerformed // TODO add your handling code here: }//GEN-LAST:event_sequenceTFActionPerformed private void typeCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_typeCBActionPerformed // TODO add your handling code here: }//GEN-LAST:event_typeCBActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JPanel buttonPanel; private javax.swing.JButton cancelButton; private javax.swing.JLabel nameLabel; private javax.swing.JTextField nameTF; private javax.swing.JButton saveButton; private javax.swing.JLabel sequenceLabel; private javax.swing.JTextField sequenceTF; private javax.swing.JComboBox typeCB; private javax.swing.JLabel typeLabel; private javax.swing.JLabel valueLabel; private javax.swing.JTextField valueTF; // End of variables declaration//GEN-END:variables boolean operationCanceled = false; JDialog dialog; private void initTypeCB() { Object [] values = PDMValidator.getInstance().getRoleArgumentTypes().toArray(); DefaultComboBoxModel model = new DefaultComboBoxModel(values); this.typeCB.setModel(model); } private void cancel() { operationCanceled = true; hideMe(); } private void save() { if (AcreStringUtil.isEmpty(getArgumentName())) { Main.showMainError("Argument Name cannot be null", "Save Role Argument"); return; } if (AcreStringUtil.isEmpty(getArgumentValue())) { Main.showMainError("Argument Value cannot be null", "Save Role Argument"); return; } if (AcreStringUtil.isEmpty(getType())) { Main.showMainError("Argument Type must be selected", "Save Role Argument"); return; } operationCanceled = false; hideMe(); } public void showMe() { if (dialog != null) dialog.show(); } private void hideMe() { if (dialog != null) dialog.hide(); } public JDialog getDialog() { return dialog; } public void setDialog(JDialog dialog) { this.dialog = dialog; this.dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE); } public boolean isOperationCanceled() { return operationCanceled; } public String getSequence() { return sequenceTF.getText(); } public String getType() { if (typeCB.getSelectedItem() != null) return typeCB.getSelectedItem().toString(); else return ""; } public String getArgumentName() { return nameTF.getText(); } public String getArgumentValue() { return valueTF.getText(); } public void clear() { nameTF.setText(""); valueTF.setText(""); } public void setSequence(String s) { sequenceTF.setText(s); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.api.dto; import io.swagger.annotations.ApiModelProperty; import org.apache.nifi.web.api.dto.util.TimestampAdapter; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import java.util.Date; /** * A request to drop the contents of a connection. */ @XmlType(name = "dropRequest") public class DropRequestDTO { private String id; private String uri; private Date submissionTime; private Date lastUpdated; private Integer percentCompleted; private Boolean finished; private String failureReason; private Integer currentCount; private Long currentSize; private String current; private Integer originalCount; private Long originalSize; private String original; private Integer droppedCount; private Long droppedSize; private String dropped; private String state; /** * The id for this drop request. * * @return The id */ @ApiModelProperty( value = "The id for this drop request." ) public String getId() { return this.id; } public void setId(final String id) { this.id = id; } /** * The uri for linking to this drop request in this NiFi. * * @return The uri */ @ApiModelProperty( value = "The URI for future requests to this drop request." ) public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } /** * @return time the query was submitted */ @XmlJavaTypeAdapter(TimestampAdapter.class) @ApiModelProperty( value = "The timestamp when the query was submitted.", dataType = "string" ) public Date getSubmissionTime() { return submissionTime; } public void setSubmissionTime(Date submissionTime) { this.submissionTime = submissionTime; } /** * @return percent completed */ @ApiModelProperty( value = "The current percent complete." ) public Integer getPercentCompleted() { return percentCompleted; } public void setPercentCompleted(Integer percentCompleted) { this.percentCompleted = percentCompleted; } /** * @return whether the query has finished */ @ApiModelProperty( value = "Whether the query has finished." ) public Boolean isFinished() { return finished; } public void setFinished(Boolean finished) { this.finished = finished; } /** * @return the reason, if any, that this drop request failed */ @ApiModelProperty( value = "The reason, if any, that this drop request failed." ) public String getFailureReason() { return failureReason; } public void setFailureReason(String failureReason) { this.failureReason = failureReason; } /** * @return the time this request was last updated */ @XmlJavaTypeAdapter(TimestampAdapter.class) @ApiModelProperty( value = "The last time this drop request was updated.", dataType = "string" ) public Date getLastUpdated() { return lastUpdated; } public void setLastUpdated(Date lastUpdated) { this.lastUpdated = lastUpdated; } /** * @return the number of flow files currently queued. */ @ApiModelProperty( value = "The number of flow files currently queued." ) public Integer getCurrentCount() { return currentCount; } public void setCurrentCount(Integer currentCount) { this.currentCount = currentCount; } /** * @return the size of the flow files currently queued in bytes. */ @ApiModelProperty( value = "The size of flow files currently queued in bytes." ) public Long getCurrentSize() { return currentSize; } public void setCurrentSize(Long currentSize) { this.currentSize = currentSize; } /** * @return the count and size of the currently queued flow files. */ @ApiModelProperty( value = "The count and size of flow files currently queued." ) public String getCurrent() { return current; } public void setCurrent(String current) { this.current = current; } /** * @return the number of flow files to be dropped as a result of this request. */ @ApiModelProperty( value = "The number of flow files to be dropped as a result of this request." ) public Integer getOriginalCount() { return originalCount; } public void setOriginalCount(Integer originalCount) { this.originalCount = originalCount; } /** * @return the size of the flow files to be dropped as a result of this request in bytes. */ @ApiModelProperty( value = "The size of flow files to be dropped as a result of this request in bytes." ) public Long getOriginalSize() { return originalSize; } public void setOriginalSize(Long originalSize) { this.originalSize = originalSize; } /** * @return the count and size of flow files to be dropped as a result of this request. */ @ApiModelProperty( value = "The count and size of flow files to be dropped as a result of this request." ) public String getOriginal() { return original; } public void setOriginal(String original) { this.original = original; } /** * @return the number of flow files that have been dropped thus far. */ @ApiModelProperty( value = "The number of flow files that have been dropped thus far." ) public Integer getDroppedCount() { return droppedCount; } public void setDroppedCount(Integer droppedCount) { this.droppedCount = droppedCount; } /** * @return the size of the flow files that have been dropped thus far in bytes. */ @ApiModelProperty( value = "The size of flow files that have been dropped thus far in bytes." ) public Long getDroppedSize() { return droppedSize; } public void setDroppedSize(Long droppedSize) { this.droppedSize = droppedSize; } /** * @return the count and size of the flow files that have been dropped thus far. */ @ApiModelProperty( value = "The count and size of flow files that have been dropped thus far." ) public String getDropped() { return dropped; } public void setDropped(String dropped) { this.dropped = dropped; } /** * @return the current state of the drop request. */ @ApiModelProperty( value = "The current state of the drop request." ) public String getState() { return state; } public void setState(String state) { this.state = state; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.ShapeBuilder; /** * A static factory for simple "import static" usage. */ public abstract class FilterBuilders { /** * A filter that matches all documents. */ public static MatchAllFilterBuilder matchAllFilter() { return new MatchAllFilterBuilder(); } /** * A filter that limits the results to the provided limit value (per shard!). * @deprecated Use {@link SearchRequestBuilder#setTerminateAfter(int)} instead. */ @Deprecated public static LimitFilterBuilder limitFilter(int limit) { return new LimitFilterBuilder(limit); } public static NestedFilterBuilder nestedFilter(String path, QueryBuilder query) { return new NestedFilterBuilder(path, query); } public static NestedFilterBuilder nestedFilter(String path, FilterBuilder filter) { return new NestedFilterBuilder(path, filter); } /** * Creates a new ids filter with the provided doc/mapping types. * * @param types The types to match the ids against. */ public static IdsFilterBuilder idsFilter(@Nullable String... types) { return new IdsFilterBuilder(types); } /** * A filter based on doc/mapping type. */ public static TypeFilterBuilder typeFilter(String type) { return new TypeFilterBuilder(type); } /** * A filter for a field based on a term. * * @param name The field name * @param value The term value */ public static TermFilterBuilder termFilter(String name, String value) { return new TermFilterBuilder(name, value); } /** * A filter for a field based on a term. * * @param name The field name * @param value The term value */ public static TermFilterBuilder termFilter(String name, int value) { return new TermFilterBuilder(name, value); } /** * A filter for a field based on a term. * * @param name The field name * @param value The term value */ public static TermFilterBuilder termFilter(String name, long value) { return new TermFilterBuilder(name, value); } /** * A filter for a field based on a term. * * @param name The field name * @param value The term value */ public static TermFilterBuilder termFilter(String name, float value) { return new TermFilterBuilder(name, value); } /** * A filter for a field based on a term. * * @param name The field name * @param value The term value */ public static TermFilterBuilder termFilter(String name, double value) { return new TermFilterBuilder(name, value); } /** * A filter for a field based on a term. * * @param name The field name * @param value The term value */ public static TermFilterBuilder termFilter(String name, Object value) { return new TermFilterBuilder(name, value); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, String... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, int... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, long... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, float... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, double... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, Object... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder termsFilter(String name, Iterable<?> values) { return new TermsFilterBuilder(name, values); } /** * A terms lookup filter for the provided field name. A lookup terms filter can * extract the terms to filter by from another doc in an index. */ public static TermsLookupFilterBuilder termsLookupFilter(String name) { return new TermsLookupFilterBuilder(name); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder inFilter(String name, String... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder inFilter(String name, int... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder inFilter(String name, long... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder inFilter(String name, float... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder inFilter(String name, double... values) { return new TermsFilterBuilder(name, values); } /** * A filer for a field based on several terms matching on any of them. * * @param name The field name * @param values The terms */ public static TermsFilterBuilder inFilter(String name, Object... values) { return new TermsFilterBuilder(name, values); } /** * A filter that restricts search results to values that have a matching prefix in a given * field. * * @param name The field name * @param prefix The prefix */ public static PrefixFilterBuilder prefixFilter(String name, String prefix) { return new PrefixFilterBuilder(name, prefix); } /** * A filter that restricts search results to field values that match a given regular expression. * * @param name The field name * @param regexp The regular expression */ public static RegexpFilterBuilder regexpFilter(String name, String regexp) { return new RegexpFilterBuilder(name, regexp); } /** * A filter that restricts search results to values that are within the given range. * * @param name The field name */ public static RangeFilterBuilder rangeFilter(String name) { return new RangeFilterBuilder(name); } /** * A filter that restricts search results to values that are within the given numeric range. Uses the * field data cache (loading all the values for the specified field into memory) * * @param name The field name * @deprecated The numeric_range filter will be removed at some point in time in favor for the range filter with * the execution mode <code>fielddata</code>. */ @Deprecated public static NumericRangeFilterBuilder numericRangeFilter(String name) { return new NumericRangeFilterBuilder(name); } /** * A filter that simply wraps a query. * * @param queryBuilder The query to wrap as a filter */ public static QueryFilterBuilder queryFilter(QueryBuilder queryBuilder) { return new QueryFilterBuilder(queryBuilder); } /** * A builder for filter based on a script. * * @param script The script to filter by. */ public static ScriptFilterBuilder scriptFilter(String script) { return new ScriptFilterBuilder(script); } /** * A filter to filter based on a specific distance from a specific geo location / point. * * @param name The location field name. */ public static GeoDistanceFilterBuilder geoDistanceFilter(String name) { return new GeoDistanceFilterBuilder(name); } /** * A filter to filter based on a specific range from a specific geo location / point. * * @param name The location field name. */ public static GeoDistanceRangeFilterBuilder geoDistanceRangeFilter(String name) { return new GeoDistanceRangeFilterBuilder(name); } /** * A filter to filter based on a bounding box defined by top left and bottom right locations / points * * @param name The location field name. */ public static GeoBoundingBoxFilterBuilder geoBoundingBoxFilter(String name) { return new GeoBoundingBoxFilterBuilder(name); } /** * A filter based on a bounding box defined by geohash. The field this filter is applied to * must have <code>{&quot;type&quot;:&quot;geo_point&quot;, &quot;geohash&quot;:true}</code> * to work. * * @param name The geo point field name. */ public static GeohashCellFilter.Builder geoHashCellFilter(String name) { return new GeohashCellFilter.Builder(name); } /** * A filter based on a bounding box defined by geohash. The field this filter is applied to * must have <code>{&quot;type&quot;:&quot;geo_point&quot;, &quot;geohash&quot;:true}</code> * to work. * * @param name The geo point field name. * @param geohash The Geohash to filter */ public static GeohashCellFilter.Builder geoHashCellFilter(String name, String geohash) { return new GeohashCellFilter.Builder(name, geohash); } /** * A filter based on a bounding box defined by geohash. The field this filter is applied to * must have <code>{&quot;type&quot;:&quot;geo_point&quot;, &quot;geohash&quot;:true}</code> * to work. * * @param name The geo point field name. * @param point a geo point within the geohash bucket */ public static GeohashCellFilter.Builder geoHashCellFilter(String name, GeoPoint point) { return new GeohashCellFilter.Builder(name, point); } /** * A filter based on a bounding box defined by geohash. The field this filter is applied to * must have <code>{&quot;type&quot;:&quot;geo_point&quot;, &quot;geohash&quot;:true}</code> * to work. * * @param name The geo point field name * @param geohash The Geohash to filter * @param neighbors should the neighbor cell also be filtered */ public static GeohashCellFilter.Builder geoHashCellFilter(String name, String geohash, boolean neighbors) { return new GeohashCellFilter.Builder(name, geohash, neighbors); } /** * A filter to filter based on a polygon defined by a set of locations / points. * * @param name The location field name. */ public static GeoPolygonFilterBuilder geoPolygonFilter(String name) { return new GeoPolygonFilterBuilder(name); } /** * A filter based on the relationship of a shape and indexed shapes * * @param name The shape field name * @param shape Shape to use in the filter * @param relation relation of the shapes */ public static GeoShapeFilterBuilder geoShapeFilter(String name, ShapeBuilder shape, ShapeRelation relation) { return new GeoShapeFilterBuilder(name, shape, relation); } public static GeoShapeFilterBuilder geoShapeFilter(String name, String indexedShapeId, String indexedShapeType, ShapeRelation relation) { return new GeoShapeFilterBuilder(name, indexedShapeId, indexedShapeType, relation); } /** * A filter to filter indexed shapes intersecting with shapes * * @param name The shape field name * @param shape Shape to use in the filter */ public static GeoShapeFilterBuilder geoIntersectionFilter(String name, ShapeBuilder shape) { return geoShapeFilter(name, shape, ShapeRelation.INTERSECTS); } public static GeoShapeFilterBuilder geoIntersectionFilter(String name, String indexedShapeId, String indexedShapeType) { return geoShapeFilter(name, indexedShapeId, indexedShapeType, ShapeRelation.INTERSECTS); } /** * A filter to filter indexed shapes that are contained by a shape * * @param name The shape field name * @param shape Shape to use in the filter */ public static GeoShapeFilterBuilder geoWithinFilter(String name, ShapeBuilder shape) { return geoShapeFilter(name, shape, ShapeRelation.WITHIN); } public static GeoShapeFilterBuilder geoWithinFilter(String name, String indexedShapeId, String indexedShapeType) { return geoShapeFilter(name, indexedShapeId, indexedShapeType, ShapeRelation.WITHIN); } /** * A filter to filter indexed shapes that are not intersection with the query shape * * @param name The shape field name * @param shape Shape to use in the filter */ public static GeoShapeFilterBuilder geoDisjointFilter(String name, ShapeBuilder shape) { return geoShapeFilter(name, shape, ShapeRelation.DISJOINT); } public static GeoShapeFilterBuilder geoDisjointFilter(String name, String indexedShapeId, String indexedShapeType) { return geoShapeFilter(name, indexedShapeId, indexedShapeType, ShapeRelation.DISJOINT); } /** * A filter to filter only documents where a field exists in them. * * @param name The name of the field */ public static ExistsFilterBuilder existsFilter(String name) { return new ExistsFilterBuilder(name); } /** * A filter to filter only documents where a field does not exists in them. * * @param name The name of the field */ public static MissingFilterBuilder missingFilter(String name) { return new MissingFilterBuilder(name); } /** * Constructs a child filter, with the child type and the query to run against child documents, with * the result of the filter being the *parent* documents. * * @param type The child type * @param query The query to run against the child type */ public static HasChildFilterBuilder hasChildFilter(String type, QueryBuilder query) { return new HasChildFilterBuilder(type, query); } /** * Constructs a child filter, with the child type and the filter to run against child documents, with * the result of the filter being the *parent* documents. * * @param type The child type * @param filter The query to run against the child type */ public static HasChildFilterBuilder hasChildFilter(String type, FilterBuilder filter) { return new HasChildFilterBuilder(type, filter); } /** * Constructs a parent filter, with the parent type and the query to run against parent documents, with * the result of the filter being the *child* documents. * * @param parentType The parent type * @param query The query to run against the parent type */ public static HasParentFilterBuilder hasParentFilter(String parentType, QueryBuilder query) { return new HasParentFilterBuilder(parentType, query); } /** * Constructs a parent filter, with the parent type and the filter to run against parent documents, with * the result of the filter being the *child* documents. * * @param parentType The parent type * @param filter The filter to run against the parent type */ public static HasParentFilterBuilder hasParentFilter(String parentType, FilterBuilder filter) { return new HasParentFilterBuilder(parentType, filter); } public static BoolFilterBuilder boolFilter() { return new BoolFilterBuilder(); } public static AndFilterBuilder andFilter(FilterBuilder... filters) { return new AndFilterBuilder(filters); } public static OrFilterBuilder orFilter(FilterBuilder... filters) { return new OrFilterBuilder(filters); } public static NotFilterBuilder notFilter(FilterBuilder filter) { return new NotFilterBuilder(filter); } public static IndicesFilterBuilder indicesFilter(FilterBuilder filter, String... indices) { return new IndicesFilterBuilder(filter, indices); } /** * A Filter builder which allows building a query thanks to a JSON string or binary data. */ public static WrapperFilterBuilder wrapperFilter(String filter) { return new WrapperFilterBuilder(filter); } /** * A Filter builder which allows building a query thanks to a JSON string or binary data. */ public static WrapperFilterBuilder wrapperFilter(byte[] data, int offset, int length) { return new WrapperFilterBuilder(data, offset, length); } /** * A Filter builder which allows building a query thanks to a JSON string or binary data. */ public static WrapperFilterBuilder wrapperFilter(BytesReference source) { return new WrapperFilterBuilder(source); } /** * Constructs a bytes filter to generate a filter from a {@link BytesReference} source * @deprecated replace with {@link #wrapperFilter(byte[], int, int)} * * @param source The filter source */ @Deprecated public static BytesFilterBuilder bytesFilter(BytesReference source) { return new BytesFilterBuilder(source); } private FilterBuilders() { } }
package com.solidnw.gametimer.database; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import android.content.Context; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteException; import android.database.sqlite.SQLiteOpenHelper; /** * The DatabaseLayer class provides the lowest level interaction with the database. * The database is placed in the asset directory of the project. * It places the database in the default directory for databases on the phone (/data/data/...) * * @author SickSta * @since 14:57:28 - 27.10.2012 * @project AndroBlocks */ public class DatabaseLayer extends SQLiteOpenHelper { // TODO: Rewrite this class so it does not load a predefined database from assets // but rather creates one itself (without content?) // =========================================================== // Constants // =========================================================== private static final String DB_NAME = "database"; private static final String DB_EXTENSION = ".db"; private static final int DB_VERSION = 1; // =========================================================== // Fields // =========================================================== private Context context; private SQLiteDatabase database; private String dbPath; // =========================================================== // Constructors // =========================================================== public DatabaseLayer(Context context) { super(context, DatabaseLayer.DB_NAME, null, DatabaseLayer.DB_VERSION); this.context = context; this.dbPath = "/data/data/"+ getContext().getPackageName() + "/databases/" + DatabaseLayer.DB_NAME; try { this.createDatabase(); this.openDatabase(); } catch(IOException ioe) { System.out.println("IOException in DatabaseLayer: " + ioe.getMessage()); } catch(SQLException sqle) { System.out.println("SQLException in DatabaseLayer: " + sqle.getMessage()); } } // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== public void onCreate(SQLiteDatabase db) { // Nothing to do here } public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // Nothing to do here } public synchronized void close() { if(database != null) { database.close(); } super.close(); } // =========================================================== // Methods // =========================================================== /** * Creates the database on the system. If it already exists it does nothing. * * @throws IOException */ public void createDatabase() throws IOException { boolean dbExists = this.checkDatabase(); if(dbExists) { // Do nothing because database already exists } else { // this creates an empty database on the system getReadableDatabase(); try { copyDatabase(); } catch(IOException ioe) { throw new IOException(ioe); } } } public void openDatabase() throws SQLException { this.database = SQLiteDatabase.openDatabase(getDbPath(), null, SQLiteDatabase.OPEN_READWRITE); } /** * Checks whether the database exists or not * @return true if the database already exists */ private boolean checkDatabase() { SQLiteDatabase db = null; boolean check = false; try { String path = getDbPath(); // A lot of errors get generated here, but you can ignore them db = SQLiteDatabase.openDatabase(path, null, SQLiteDatabase.OPEN_READONLY); } catch(SQLiteException e) { // Nothing do to here } if(db != null) { check = true; db.close(); } return check; } /** * Copies the database from the assets-directory of the project onto the device. * It uses an byte-stream so no information is lost. * * @throws IOException */ private void copyDatabase() throws IOException { // Standard directory for databases. Maybe rethink this for devices with small memory and put it on the sd-card File f = new File("/data/data/"+ getContext().getPackageName() + "/databases/"); if (f.exists() != true) { f.mkdir(); } InputStream is = getContext().getAssets().open(DB_NAME + DB_EXTENSION); String newDbName = getDbPath(); OutputStream os = new FileOutputStream(newDbName); byte[] buffer = new byte[1024]; int length; while ( (length = is.read(buffer)) > 0) { os.write(buffer, 0, length); } //Close the streams os.flush(); os.close(); is.close(); } // =========================================================== // Getter & Setter // =========================================================== /** * @return the context */ private Context getContext() { return this.context; } /** * @return the dbPath */ private String getDbPath() { return this.dbPath; } /** * @return the database */ public SQLiteDatabase getDatabase() { return database; } /** * @param context * the context to set */ private void setContext(Context context) { this.context = context; } // =========================================================== // Inner and Anonymous Classes // =========================================================== }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.job.entries.pgpdecryptfiles; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.DirectoryDialog; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entries.pgpdecryptfiles.JobEntryPGPDecryptFiles; import org.pentaho.di.job.entry.JobEntryDialogInterface; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.ui.core.gui.WindowProperty; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.job.dialog.JobDialog; import org.pentaho.di.ui.job.entry.JobEntryDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; /** * This dialog allows you to edit the Move Files job entry settings. * * @author Samatar Hassan * @since 20-02-2008 */ public class JobEntryPGPDecryptFilesDialog extends JobEntryDialog implements JobEntryDialogInterface { private static Class<?> PKG = JobEntryPGPDecryptFiles.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private static final String[] FILETYPES = new String[] { BaseMessages.getString(PKG, "JobPGPDecryptFiles.Filetype.All") }; private Label wlName; private Text wName; private FormData fdlName, fdName; private Label wlSourceFileFolder; private Button wbSourceFileFolder,wbDestinationFileFolder, wbSourceDirectory,wbDestinationDirectory; private TextVar wSourceFileFolder; private FormData fdlSourceFileFolder, fdbSourceFileFolder, fdSourceFileFolder,fdbDestinationFileFolder,fdbSourceDirectory,fdbDestinationDirectory; private Label wlIncludeSubfolders; private Button wIncludeSubfolders; private FormData fdlIncludeSubfolders, fdIncludeSubfolders; private Button wOK, wCancel; private Listener lsOK, lsCancel; private JobEntryPGPDecryptFiles jobEntry; private Shell shell; private SelectionAdapter lsDef; private boolean changed; private Label wlPrevious; private Button wPrevious; private FormData fdlPrevious, fdPrevious; private Label wlFields; private TableView wFields; private FormData fdlFields, fdFields; private Group wSettings; private FormData fdSettings; private Label wlDestinationFileFolder; private TextVar wDestinationFileFolder; private FormData fdlDestinationFileFolder, fdDestinationFileFolder; private Label wlGpgExe; private TextVar wGpgExe; private FormData fdlGpgExe, fdGpgExe; private Button wbbGpgExe; private FormData fdbbGpgExe; private Label wlWildcard; private TextVar wWildcard; private FormData fdlWildcard, fdWildcard; private Button wbdSourceFileFolder; // Delete private Button wbeSourceFileFolder; // Edit private Button wbaSourceFileFolder; // Add or change private CTabFolder wTabFolder; private Composite wGeneralComp,wAdvancedComp,wDestinationFileComp; private CTabItem wGeneralTab,wAdvancedTab,wDestinationFileTab; private FormData fdGeneralComp,fdAdvancedComp,fdDestinationFileComp; private FormData fdTabFolder; private Label wlCreateMoveToFolder; private Button wCreateMoveToFolder; private FormData fdlCreateMoveToFolder, fdCreateMoveToFolder; // Add File to result private Group wFileResult; private FormData fdFileResult; private Group wSuccessOn; private FormData fdSuccessOn; private Label wlAddFileToResult; private Button wAddFileToResult; private FormData fdlAddFileToResult, fdAddFileToResult; private Label wlCreateDestinationFolder; private Button wCreateDestinationFolder; private FormData fdlCreateDestinationFolder, fdCreateDestinationFolder; private Label wlDestinationIsAFile; private Button wDestinationIsAFile; private FormData fdlDestinationIsAFile, fdDestinationIsAFile; private FormData fdbeSourceFileFolder, fdbaSourceFileFolder, fdbdSourceFileFolder; private Label wlSuccessCondition; private CCombo wSuccessCondition; private FormData fdlSuccessCondition, fdSuccessCondition; private Label wlNrErrorsLessThan; private TextVar wNrErrorsLessThan; private FormData fdlNrErrorsLessThan, fdNrErrorsLessThan; private Group wDestinationFile; private FormData fdDestinationFile; private Group wMoveToGroup; private FormData fdMoveToGroup; private Label wlAddDate; private Button wAddDate; private FormData fdlAddDate, fdAddDate; private Label wlAddTime; private Button wAddTime; private FormData fdlAddTime, fdAddTime; private Label wlSpecifyFormat; private Button wSpecifyFormat; private FormData fdlSpecifyFormat, fdSpecifyFormat; private Label wlDateTimeFormat; private CCombo wDateTimeFormat; private FormData fdlDateTimeFormat, fdDateTimeFormat; private Label wlMovedDateTimeFormat; private CCombo wMovedDateTimeFormat; private FormData fdlMovedDateTimeFormat, fdMovedDateTimeFormat; private Label wlAddDateBeforeExtension; private Button wAddDateBeforeExtension; private FormData fdlAddDateBeforeExtension, fdAddDateBeforeExtension; private Label wlAddMovedDateBeforeExtension; private Button wAddMovedDateBeforeExtension; private FormData fdlAddMovedDateBeforeExtension, fdAddMovedDateBeforeExtension; private Label wlDoNotKeepFolderStructure; private Button wDoNotKeepFolderStructure; private FormData fdlDoNotKeepFolderStructure, fdDoNotKeepFolderStructure; private Label wlIfFileExists; private CCombo wIfFileExists; private FormData fdlIfFileExists, fdIfFileExists; private Label wlIfMovedFileExists; private CCombo wIfMovedFileExists; private FormData fdlIfMovedFileExists, fdIfMovedFileExists; private Button wbDestinationFolder; private Label wlDestinationFolder; private TextVar wDestinationFolder; private FormData fdlDestinationFolder, fdDestinationFolder,fdbDestinationFolder; private Label wlAddMovedDate; private Button wAddMovedDate; private FormData fdlAddMovedDate, fdAddMovedDate; private Label wlAddMovedTime; private Button wAddMovedTime; private FormData fdlAddMovedTime, fdAddMovedTime; private Label wlSpecifyMoveFormat; private Button wSpecifyMoveFormat; private FormData fdlSpecifyMoveFormat, fdSpecifyMoveFormat; public JobEntryPGPDecryptFilesDialog(Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta) { super(parent, jobEntryInt, rep, jobMeta); jobEntry = (JobEntryPGPDecryptFiles) jobEntryInt; if (this.jobEntry.getName() == null) this.jobEntry.setName(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Name.Default")); } public JobEntryInterface open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell(parent, props.getJobsDialogStyle()); props.setLook(shell); JobDialog.setShellImage(shell, jobEntry); ModifyListener lsMod = new ModifyListener() { public void modifyText(ModifyEvent e) { jobEntry.setChanged(); } }; changed = jobEntry.hasChanged(); FormLayout formLayout = new FormLayout (); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout(formLayout); shell.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Title")); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Filename line wlName=new Label(shell, SWT.RIGHT); wlName.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Name.Label")); props.setLook(wlName); fdlName=new FormData(); fdlName.left = new FormAttachment(0, 0); fdlName.right= new FormAttachment(middle, -margin); fdlName.top = new FormAttachment(0, margin); wlName.setLayoutData(fdlName); wName=new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wName); wName.addModifyListener(lsMod); fdName=new FormData(); fdName.left = new FormAttachment(middle, 0); fdName.top = new FormAttachment(0, margin); fdName.right= new FormAttachment(100, 0); wName.setLayoutData(fdName); wTabFolder = new CTabFolder(shell, SWT.BORDER); props.setLook(wTabFolder, Props.WIDGET_STYLE_TAB); ////////////////////////// // START OF GENERAL TAB /// ////////////////////////// wGeneralTab=new CTabItem(wTabFolder, SWT.NONE); wGeneralTab.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Tab.General.Label")); wGeneralComp = new Composite(wTabFolder, SWT.NONE); props.setLook(wGeneralComp); FormLayout generalLayout = new FormLayout(); generalLayout.marginWidth = 3; generalLayout.marginHeight = 3; wGeneralComp.setLayout(generalLayout); // SETTINGS grouping? // //////////////////////// // START OF SETTINGS GROUP // wSettings = new Group(wGeneralComp, SWT.SHADOW_NONE); props.setLook(wSettings); wSettings.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Settings.Label")); FormLayout groupLayout = new FormLayout(); groupLayout.marginWidth = 10; groupLayout.marginHeight = 10; wSettings.setLayout(groupLayout); // GPG Program wlGpgExe = new Label(wSettings, SWT.RIGHT); wlGpgExe.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.GpgExe.Label")); props.setLook(wlGpgExe); fdlGpgExe = new FormData(); fdlGpgExe.left = new FormAttachment(0, 0); fdlGpgExe.top = new FormAttachment(wName, margin); fdlGpgExe.right = new FormAttachment(middle, -margin); wlGpgExe.setLayoutData(fdlGpgExe); // Browse Source files button ... wbbGpgExe=new Button(wSettings, SWT.PUSH| SWT.CENTER); props.setLook(wbbGpgExe); wbbGpgExe.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.BrowseFiles.Label")); fdbbGpgExe=new FormData(); fdbbGpgExe.right= new FormAttachment(100, -margin); fdbbGpgExe.top = new FormAttachment(wName, margin); wbbGpgExe.setLayoutData(fdbbGpgExe); wbbGpgExe.addSelectionListener ( new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { FileDialog dialog = new FileDialog(shell, SWT.OPEN); dialog.setFilterExtensions(new String[] {"*"}); if (wSourceFileFolder.getText()!=null) { dialog.setFileName(jobMeta.environmentSubstitute(wGpgExe.getText()) ); } dialog.setFilterNames(FILETYPES); if (dialog.open()!=null) { wGpgExe.setText(dialog.getFilterPath()+Const.FILE_SEPARATOR+dialog.getFileName()); } } } ); wGpgExe = new TextVar(jobMeta, wSettings, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wGpgExe.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.GpgExe.Tooltip")); props.setLook(wGpgExe); wGpgExe.addModifyListener(lsMod); fdGpgExe = new FormData(); fdGpgExe.left = new FormAttachment(middle, 0); fdGpgExe.top = new FormAttachment(wName, margin); fdGpgExe.right= new FormAttachment(wbbGpgExe, -margin); wGpgExe.setLayoutData(fdGpgExe); wlIncludeSubfolders = new Label(wSettings, SWT.RIGHT); wlIncludeSubfolders.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.IncludeSubfolders.Label")); props.setLook(wlIncludeSubfolders); fdlIncludeSubfolders = new FormData(); fdlIncludeSubfolders.left = new FormAttachment(0, 0); fdlIncludeSubfolders.top = new FormAttachment(wGpgExe, margin); fdlIncludeSubfolders.right = new FormAttachment(middle, -margin); wlIncludeSubfolders.setLayoutData(fdlIncludeSubfolders); wIncludeSubfolders = new Button(wSettings, SWT.CHECK); props.setLook(wIncludeSubfolders); wIncludeSubfolders.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.IncludeSubfolders.Tooltip")); fdIncludeSubfolders = new FormData(); fdIncludeSubfolders.left = new FormAttachment(middle, 0); fdIncludeSubfolders.top = new FormAttachment(wGpgExe, margin); fdIncludeSubfolders.right = new FormAttachment(100, 0); wIncludeSubfolders.setLayoutData(fdIncludeSubfolders); wIncludeSubfolders.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); CheckIncludeSubFolders(); } }); // previous wlPrevious = new Label(wSettings, SWT.RIGHT); wlPrevious.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Previous.Label")); props.setLook(wlPrevious); fdlPrevious = new FormData(); fdlPrevious.left = new FormAttachment(0, 0); fdlPrevious.top = new FormAttachment(wIncludeSubfolders, margin ); fdlPrevious.right = new FormAttachment(middle, -margin); wlPrevious.setLayoutData(fdlPrevious); wPrevious = new Button(wSettings, SWT.CHECK); props.setLook(wPrevious); wPrevious.setSelection(jobEntry.arg_from_previous); wPrevious.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Previous.Tooltip")); fdPrevious = new FormData(); fdPrevious.left = new FormAttachment(middle, 0); fdPrevious.top = new FormAttachment(wIncludeSubfolders, margin ); fdPrevious.right = new FormAttachment(100, 0); wPrevious.setLayoutData(fdPrevious); wPrevious.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { RefreshArgFromPrevious(); } }); fdSettings = new FormData(); fdSettings.left = new FormAttachment(0, margin); fdSettings.top = new FormAttachment(wName, margin); fdSettings.right = new FormAttachment(100, -margin); wSettings.setLayoutData(fdSettings); // /////////////////////////////////////////////////////////// // / END OF SETTINGS GROUP // /////////////////////////////////////////////////////////// // SourceFileFolder line wlSourceFileFolder=new Label(wGeneralComp, SWT.RIGHT); wlSourceFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SourceFileFolder.Label")); props.setLook(wlSourceFileFolder); fdlSourceFileFolder=new FormData(); fdlSourceFileFolder.left = new FormAttachment(0, 0); fdlSourceFileFolder.top = new FormAttachment(wSettings, 2*margin); fdlSourceFileFolder.right= new FormAttachment(middle, -margin); wlSourceFileFolder.setLayoutData(fdlSourceFileFolder); // Browse Source folders button ... wbSourceDirectory=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbSourceDirectory); wbSourceDirectory.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.BrowseFolders.Label")); fdbSourceDirectory=new FormData(); fdbSourceDirectory.right= new FormAttachment(100, 0); fdbSourceDirectory.top = new FormAttachment(wSettings, margin); wbSourceDirectory.setLayoutData(fdbSourceDirectory); wbSourceDirectory.addSelectionListener ( new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { DirectoryDialog ddialog = new DirectoryDialog(shell, SWT.OPEN); if (wSourceFileFolder.getText()!=null) { ddialog.setFilterPath(jobMeta.environmentSubstitute(wSourceFileFolder.getText()) ); } // Calling open() will open and run the dialog. // It will return the selected directory, or // null if user cancels String dir = ddialog.open(); if (dir != null) { // Set the text box to the new selection wSourceFileFolder.setText(dir); } } } ); // Browse Source files button ... wbSourceFileFolder=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbSourceFileFolder); wbSourceFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.BrowseFiles.Label")); fdbSourceFileFolder=new FormData(); fdbSourceFileFolder.right= new FormAttachment(wbSourceDirectory, -margin); fdbSourceFileFolder.top = new FormAttachment(wSettings, margin); wbSourceFileFolder.setLayoutData(fdbSourceFileFolder); // Browse Destination file add button ... wbaSourceFileFolder=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbaSourceFileFolder); wbaSourceFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.FilenameAdd.Button")); fdbaSourceFileFolder=new FormData(); fdbaSourceFileFolder.right= new FormAttachment(wbSourceFileFolder, -margin); fdbaSourceFileFolder.top = new FormAttachment(wSettings, margin); wbaSourceFileFolder.setLayoutData(fdbaSourceFileFolder); wSourceFileFolder=new TextVar(jobMeta, wGeneralComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wSourceFileFolder.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SourceFileFolder.Tooltip")); props.setLook(wSourceFileFolder); wSourceFileFolder.addModifyListener(lsMod); fdSourceFileFolder=new FormData(); fdSourceFileFolder.left = new FormAttachment(middle, 0); fdSourceFileFolder.top = new FormAttachment(wSettings, 2*margin); fdSourceFileFolder.right= new FormAttachment(wbSourceFileFolder, -55); wSourceFileFolder.setLayoutData(fdSourceFileFolder); // Whenever something changes, set the tooltip to the expanded version: wSourceFileFolder.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { wSourceFileFolder.setToolTipText(jobMeta.environmentSubstitute(wSourceFileFolder.getText() ) ); } } ); wbSourceFileFolder.addSelectionListener ( new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { FileDialog dialog = new FileDialog(shell, SWT.OPEN); dialog.setFilterExtensions(new String[] {"*"}); if (wSourceFileFolder.getText()!=null) { dialog.setFileName(jobMeta.environmentSubstitute(wSourceFileFolder.getText()) ); } dialog.setFilterNames(FILETYPES); if (dialog.open()!=null) { wSourceFileFolder.setText(dialog.getFilterPath()+Const.FILE_SEPARATOR+dialog.getFileName()); } } } ); // Destination wlDestinationFileFolder = new Label(wGeneralComp, SWT.RIGHT); wlDestinationFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DestinationFileFolder.Label")); props.setLook(wlDestinationFileFolder); fdlDestinationFileFolder = new FormData(); fdlDestinationFileFolder.left = new FormAttachment(0, 0); fdlDestinationFileFolder.top = new FormAttachment(wSourceFileFolder, margin); fdlDestinationFileFolder.right = new FormAttachment(middle, -margin); wlDestinationFileFolder.setLayoutData(fdlDestinationFileFolder); // Browse Destination folders button ... wbDestinationDirectory=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbDestinationDirectory); wbDestinationDirectory.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.BrowseFolders.Label")); fdbDestinationDirectory=new FormData(); fdbDestinationDirectory.right= new FormAttachment(100, 0); fdbDestinationDirectory.top = new FormAttachment(wSourceFileFolder, margin); wbDestinationDirectory.setLayoutData(fdbDestinationDirectory); wbDestinationDirectory.addSelectionListener ( new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { DirectoryDialog ddialog = new DirectoryDialog(shell, SWT.OPEN); if (wDestinationFileFolder.getText()!=null) { ddialog.setFilterPath(jobMeta.environmentSubstitute(wDestinationFileFolder.getText()) ); } // Calling open() will open and run the dialog. // It will return the selected directory, or // null if user cancels String dir = ddialog.open(); if (dir != null) { // Set the text box to the new selection wDestinationFileFolder.setText(dir); } } } ); // Browse Destination file browse button ... wbDestinationFileFolder=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbDestinationFileFolder); wbDestinationFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.BrowseFiles.Label")); fdbDestinationFileFolder=new FormData(); fdbDestinationFileFolder.right= new FormAttachment(wbDestinationDirectory, -margin); fdbDestinationFileFolder.top = new FormAttachment(wSourceFileFolder, margin); wbDestinationFileFolder.setLayoutData(fdbDestinationFileFolder); wDestinationFileFolder = new TextVar(jobMeta, wGeneralComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wDestinationFileFolder.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DestinationFileFolder.Tooltip")); props.setLook(wDestinationFileFolder); wDestinationFileFolder.addModifyListener(lsMod); fdDestinationFileFolder = new FormData(); fdDestinationFileFolder.left = new FormAttachment(middle, 0); fdDestinationFileFolder.top = new FormAttachment(wSourceFileFolder, margin); fdDestinationFileFolder.right= new FormAttachment(wbSourceFileFolder, -55); wDestinationFileFolder.setLayoutData(fdDestinationFileFolder); wbDestinationFileFolder.addSelectionListener ( new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { FileDialog dialog = new FileDialog(shell, SWT.OPEN); dialog.setFilterExtensions(new String[] {"*"}); if (wDestinationFileFolder.getText()!=null) { dialog.setFileName(jobMeta.environmentSubstitute(wDestinationFileFolder.getText()) ); } dialog.setFilterNames(FILETYPES); if (dialog.open()!=null) { wDestinationFileFolder.setText(dialog.getFilterPath()+Const.FILE_SEPARATOR+dialog.getFileName()); } } } ); // Buttons to the right of the screen... wbdSourceFileFolder=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbdSourceFileFolder); wbdSourceFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.FilenameDelete.Button")); wbdSourceFileFolder.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.FilenameDelete.Tooltip")); fdbdSourceFileFolder=new FormData(); fdbdSourceFileFolder.right = new FormAttachment(100, 0); fdbdSourceFileFolder.top = new FormAttachment (wDestinationFileFolder, 40); wbdSourceFileFolder.setLayoutData(fdbdSourceFileFolder); wbeSourceFileFolder=new Button(wGeneralComp, SWT.PUSH| SWT.CENTER); props.setLook(wbeSourceFileFolder); wbeSourceFileFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.FilenameEdit.Button")); wbeSourceFileFolder.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.FilenameEdit.Tooltip")); fdbeSourceFileFolder=new FormData(); fdbeSourceFileFolder.right = new FormAttachment(100, 0); fdbeSourceFileFolder.left = new FormAttachment(wbdSourceFileFolder, 0, SWT.LEFT); fdbeSourceFileFolder.top = new FormAttachment (wbdSourceFileFolder, margin); wbeSourceFileFolder.setLayoutData(fdbeSourceFileFolder); // Wildcard wlWildcard = new Label(wGeneralComp, SWT.RIGHT); wlWildcard.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Wildcard.Label")); props.setLook(wlWildcard); fdlWildcard = new FormData(); fdlWildcard.left = new FormAttachment(0, 0); fdlWildcard.top = new FormAttachment(wDestinationFileFolder, margin); fdlWildcard.right = new FormAttachment(middle, -margin); wlWildcard.setLayoutData(fdlWildcard); wWildcard = new TextVar(jobMeta, wGeneralComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wWildcard.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Wildcard.Tooltip")); props.setLook(wWildcard); wWildcard.addModifyListener(lsMod); fdWildcard = new FormData(); fdWildcard.left = new FormAttachment(middle, 0); fdWildcard.top = new FormAttachment(wDestinationFileFolder, margin); fdWildcard.right= new FormAttachment(wbSourceFileFolder, -55); wWildcard.setLayoutData(fdWildcard); wlFields = new Label(wGeneralComp, SWT.NONE); wlFields.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.Label")); props.setLook(wlFields); fdlFields = new FormData(); fdlFields.left = new FormAttachment(0, 0); fdlFields.right= new FormAttachment(middle, -margin); fdlFields.top = new FormAttachment(wWildcard,margin); wlFields.setLayoutData(fdlFields); int rows = jobEntry.source_filefolder == null ? 1 : (jobEntry.source_filefolder.length == 0 ? 0 : jobEntry.source_filefolder.length); final int FieldsRows = rows; ColumnInfo[] colinf=new ColumnInfo[] { new ColumnInfo(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.SourceFileFolder.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), new ColumnInfo(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.Wildcard.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false ), new ColumnInfo(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.PassPhrase.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), new ColumnInfo(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.DestinationFileFolder.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), }; colinf[0].setUsingVariables(true); colinf[0].setToolTip(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.SourceFileFolder.Tooltip")); colinf[1].setUsingVariables(true); colinf[1].setToolTip(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.Wildcard.Tooltip")); colinf[2].setUsingVariables(true); colinf[2].setToolTip(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.PassPhrase.Tooltip")); colinf[2].setPasswordField(true); colinf[3].setUsingVariables(true); colinf[3].setToolTip(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fields.DestinationFileFolder.Tooltip")); wFields = new TableView(jobMeta, wGeneralComp, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props); fdFields = new FormData(); fdFields.left = new FormAttachment(0, 0); fdFields.top = new FormAttachment(wlFields, margin); fdFields.right = new FormAttachment(wbeSourceFileFolder, -margin); fdFields.bottom = new FormAttachment(100, -margin); wFields.setLayoutData(fdFields); RefreshArgFromPrevious(); // Add the file to the list of files... SelectionAdapter selA = new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { wFields.add(new String[] { wSourceFileFolder.getText(), wWildcard.getText(), null, wDestinationFileFolder.getText() } ); wSourceFileFolder.setText(""); wDestinationFileFolder.setText(""); wWildcard.setText(""); wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth(true); } }; wbaSourceFileFolder.addSelectionListener(selA); wSourceFileFolder.addSelectionListener(selA); // Delete files from the list of files... wbdSourceFileFolder.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { int idx[] = wFields.getSelectionIndices(); wFields.remove(idx); wFields.removeEmptyRows(); wFields.setRowNums(); } }); // Edit the selected file & remove from the list... wbeSourceFileFolder.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { int idx = wFields.getSelectionIndex(); if (idx>=0) { String string[] = wFields.getItem(idx); wSourceFileFolder.setText(string[0]); wDestinationFileFolder.setText(string[1]); wWildcard.setText(string[2]); wFields.remove(idx); } wFields.removeEmptyRows(); wFields.setRowNums(); } }); fdGeneralComp=new FormData(); fdGeneralComp.left = new FormAttachment(0, 0); fdGeneralComp.top = new FormAttachment(0, 0); fdGeneralComp.right = new FormAttachment(100, 0); fdGeneralComp.bottom= new FormAttachment(100, 0); wGeneralComp.setLayoutData(fdGeneralComp); wGeneralComp.layout(); wGeneralTab.setControl(wGeneralComp); props.setLook(wGeneralComp); ///////////////////////////////////////////////////////////// /// END OF GENERAL TAB ///////////////////////////////////////////////////////////// ////////////////////////////////////// // START OF DESTINATION FILE TAB /// ///////////////////////////////////// wDestinationFileTab=new CTabItem(wTabFolder, SWT.NONE); wDestinationFileTab.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DestinationFileTab.Label")); FormLayout DestcontentLayout = new FormLayout (); DestcontentLayout.marginWidth = 3; DestcontentLayout.marginHeight = 3; wDestinationFileComp = new Composite(wTabFolder, SWT.NONE); props.setLook(wDestinationFileComp); wDestinationFileComp.setLayout(DestcontentLayout); // DestinationFile grouping? // //////////////////////// // START OF DestinationFile GROUP // wDestinationFile = new Group(wDestinationFileComp, SWT.SHADOW_NONE); props.setLook(wDestinationFile); wDestinationFile.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.GroupDestinationFile.Label")); FormLayout groupLayoutFile = new FormLayout(); groupLayoutFile.marginWidth = 10; groupLayoutFile.marginHeight = 10; wDestinationFile.setLayout(groupLayoutFile); // Create destination folder/parent folder wlCreateDestinationFolder = new Label(wDestinationFile, SWT.RIGHT); wlCreateDestinationFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.CreateDestinationFolder.Label")); props.setLook(wlCreateDestinationFolder); fdlCreateDestinationFolder = new FormData(); fdlCreateDestinationFolder.left = new FormAttachment(0, 0); fdlCreateDestinationFolder.top = new FormAttachment(0, margin); fdlCreateDestinationFolder.right = new FormAttachment(middle, -margin); wlCreateDestinationFolder.setLayoutData(fdlCreateDestinationFolder); wCreateDestinationFolder = new Button(wDestinationFile, SWT.CHECK); props.setLook(wCreateDestinationFolder); wCreateDestinationFolder.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.CreateDestinationFolder.Tooltip")); fdCreateDestinationFolder = new FormData(); fdCreateDestinationFolder.left = new FormAttachment(middle, 0); fdCreateDestinationFolder.top = new FormAttachment(0, margin); fdCreateDestinationFolder.right = new FormAttachment(100, 0); wCreateDestinationFolder.setLayoutData(fdCreateDestinationFolder); wCreateDestinationFolder.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } }); // Destination is a file? wlDestinationIsAFile = new Label(wDestinationFile, SWT.RIGHT); wlDestinationIsAFile.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DestinationIsAFile.Label")); props.setLook(wlDestinationIsAFile); fdlDestinationIsAFile = new FormData(); fdlDestinationIsAFile.left = new FormAttachment(0, 0); fdlDestinationIsAFile.top = new FormAttachment(wCreateDestinationFolder, margin); fdlDestinationIsAFile.right = new FormAttachment(middle, -margin); wlDestinationIsAFile.setLayoutData(fdlDestinationIsAFile); wDestinationIsAFile = new Button(wDestinationFile, SWT.CHECK); props.setLook(wDestinationIsAFile); wDestinationIsAFile.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DestinationIsAFile.Tooltip")); fdDestinationIsAFile = new FormData(); fdDestinationIsAFile.left = new FormAttachment(middle, 0); fdDestinationIsAFile.top = new FormAttachment(wCreateDestinationFolder, margin); fdDestinationIsAFile.right = new FormAttachment(100, 0); wDestinationIsAFile.setLayoutData(fdDestinationIsAFile); wDestinationIsAFile.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } }); // Do not keep folder structure? wlDoNotKeepFolderStructure=new Label(wDestinationFile, SWT.RIGHT); wlDoNotKeepFolderStructure.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DoNotKeepFolderStructure.Label")); props.setLook(wlDoNotKeepFolderStructure); fdlDoNotKeepFolderStructure=new FormData(); fdlDoNotKeepFolderStructure.left = new FormAttachment(0, 0); fdlDoNotKeepFolderStructure.top = new FormAttachment(wDestinationIsAFile, margin); fdlDoNotKeepFolderStructure.right= new FormAttachment(middle, -margin); wlDoNotKeepFolderStructure.setLayoutData(fdlDoNotKeepFolderStructure); wDoNotKeepFolderStructure=new Button(wDestinationFile, SWT.CHECK); props.setLook(wDoNotKeepFolderStructure); wDoNotKeepFolderStructure.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DoNotKeepFolderStructure.Tooltip")); fdDoNotKeepFolderStructure=new FormData(); fdDoNotKeepFolderStructure.left = new FormAttachment(middle, 0); fdDoNotKeepFolderStructure.top = new FormAttachment(wDestinationIsAFile, margin); fdDoNotKeepFolderStructure.right= new FormAttachment(100, 0); wDoNotKeepFolderStructure.setLayoutData(fdDoNotKeepFolderStructure); wDoNotKeepFolderStructure.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } } ); // Create multi-part file? wlAddDate=new Label(wDestinationFile, SWT.RIGHT); wlAddDate.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddDate.Label")); props.setLook(wlAddDate); fdlAddDate=new FormData(); fdlAddDate.left = new FormAttachment(0, 0); fdlAddDate.top = new FormAttachment(wDoNotKeepFolderStructure, margin); fdlAddDate.right= new FormAttachment(middle, -margin); wlAddDate.setLayoutData(fdlAddDate); wAddDate=new Button(wDestinationFile, SWT.CHECK); props.setLook(wAddDate); wAddDate.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddDate.Tooltip")); fdAddDate=new FormData(); fdAddDate.left = new FormAttachment(middle, 0); fdAddDate.top = new FormAttachment(wDoNotKeepFolderStructure, margin); fdAddDate.right= new FormAttachment(100, 0); wAddDate.setLayoutData(fdAddDate); wAddDate.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); setAddDateBeforeExtension(); } } ); // Create multi-part file? wlAddTime=new Label(wDestinationFile, SWT.RIGHT); wlAddTime.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddTime.Label")); props.setLook(wlAddTime); fdlAddTime=new FormData(); fdlAddTime.left = new FormAttachment(0, 0); fdlAddTime.top = new FormAttachment(wAddDate, margin); fdlAddTime.right= new FormAttachment(middle, -margin); wlAddTime.setLayoutData(fdlAddTime); wAddTime=new Button(wDestinationFile, SWT.CHECK); props.setLook(wAddTime); wAddTime.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddTime.Tooltip")); fdAddTime=new FormData(); fdAddTime.left = new FormAttachment(middle, 0); fdAddTime.top = new FormAttachment(wAddDate, margin); fdAddTime.right= new FormAttachment(100, 0); wAddTime.setLayoutData(fdAddTime); wAddTime.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); setAddDateBeforeExtension(); } } ); // Specify date time format? wlSpecifyFormat=new Label(wDestinationFile, SWT.RIGHT); wlSpecifyFormat.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SpecifyFormat.Label")); props.setLook(wlSpecifyFormat); fdlSpecifyFormat=new FormData(); fdlSpecifyFormat.left = new FormAttachment(0, 0); fdlSpecifyFormat.top = new FormAttachment(wAddTime, margin); fdlSpecifyFormat.right= new FormAttachment(middle, -margin); wlSpecifyFormat.setLayoutData(fdlSpecifyFormat); wSpecifyFormat=new Button(wDestinationFile, SWT.CHECK); props.setLook(wSpecifyFormat); wSpecifyFormat.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SpecifyFormat.Tooltip")); fdSpecifyFormat=new FormData(); fdSpecifyFormat.left = new FormAttachment(middle, 0); fdSpecifyFormat.top = new FormAttachment(wAddTime, margin); fdSpecifyFormat.right= new FormAttachment(100, 0); wSpecifyFormat.setLayoutData(fdSpecifyFormat); wSpecifyFormat.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); setDateTimeFormat(); setAddDateBeforeExtension(); } } ); // DateTimeFormat wlDateTimeFormat=new Label(wDestinationFile, SWT.RIGHT); wlDateTimeFormat.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DateTimeFormat.Label")); props.setLook(wlDateTimeFormat); fdlDateTimeFormat=new FormData(); fdlDateTimeFormat.left = new FormAttachment(0, 0); fdlDateTimeFormat.top = new FormAttachment(wSpecifyFormat, margin); fdlDateTimeFormat.right= new FormAttachment(middle, -margin); wlDateTimeFormat.setLayoutData(fdlDateTimeFormat); wDateTimeFormat=new CCombo(wDestinationFile, SWT.BORDER | SWT.READ_ONLY); wDateTimeFormat.setEditable(true); props.setLook(wDateTimeFormat); wDateTimeFormat.addModifyListener(lsMod); fdDateTimeFormat=new FormData(); fdDateTimeFormat.left = new FormAttachment(middle, 0); fdDateTimeFormat.top = new FormAttachment(wSpecifyFormat, margin); fdDateTimeFormat.right= new FormAttachment(100, 0); wDateTimeFormat.setLayoutData(fdDateTimeFormat); // Prepare a list of possible DateTimeFormats... String dats[] = Const.getDateFormats(); for (int x=0;x<dats.length;x++) wDateTimeFormat.add(dats[x]); // Add Date before extension? wlAddDateBeforeExtension = new Label(wDestinationFile, SWT.RIGHT); wlAddDateBeforeExtension.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddDateBeforeExtension.Label")); props.setLook(wlAddDateBeforeExtension); fdlAddDateBeforeExtension = new FormData(); fdlAddDateBeforeExtension.left = new FormAttachment(0, 0); fdlAddDateBeforeExtension.top = new FormAttachment(wDateTimeFormat, margin); fdlAddDateBeforeExtension.right = new FormAttachment(middle, -margin); wlAddDateBeforeExtension.setLayoutData(fdlAddDateBeforeExtension); wAddDateBeforeExtension = new Button(wDestinationFile, SWT.CHECK); props.setLook(wAddDateBeforeExtension); wAddDateBeforeExtension.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddDateBeforeExtension.Tooltip")); fdAddDateBeforeExtension = new FormData(); fdAddDateBeforeExtension.left = new FormAttachment(middle, 0); fdAddDateBeforeExtension.top = new FormAttachment(wDateTimeFormat, margin); fdAddDateBeforeExtension.right = new FormAttachment(100, 0); wAddDateBeforeExtension.setLayoutData(fdAddDateBeforeExtension); wAddDateBeforeExtension.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } }); // If File Exists wlIfFileExists = new Label(wDestinationFile, SWT.RIGHT); wlIfFileExists.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.IfFileExists.Label")); props.setLook(wlIfFileExists); fdlIfFileExists = new FormData(); fdlIfFileExists.left = new FormAttachment(0, 0); fdlIfFileExists.right = new FormAttachment(middle, 0); fdlIfFileExists.top = new FormAttachment(wAddDateBeforeExtension, margin); wlIfFileExists.setLayoutData(fdlIfFileExists); wIfFileExists = new CCombo(wDestinationFile, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER); wIfFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Do_Nothing_IfFileExists.Label")); wIfFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Overwrite_File_IfFileExists.Label")); wIfFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Unique_Name_IfFileExists.Label")); wIfFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Delete_Source_File_IfFileExists.Label")); wIfFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Move_To_Folder_IfFileExists.Label")); wIfFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fail_IfFileExists.Label")); wIfFileExists.select(0); // +1: starts at -1 props.setLook(wIfFileExists); fdIfFileExists= new FormData(); fdIfFileExists.left = new FormAttachment(middle, 0); fdIfFileExists.top = new FormAttachment(wAddDateBeforeExtension, margin); fdIfFileExists.right = new FormAttachment(100, 0); wIfFileExists.setLayoutData(fdIfFileExists); wIfFileExists.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { activeDestinationFolder(); setMovedDateTimeFormat(); //setAddDateBeforeExtension(); setAddMovedDateBeforeExtension(); } }); fdDestinationFile = new FormData(); fdDestinationFile.left = new FormAttachment(0, margin); fdDestinationFile.top = new FormAttachment(wName, margin); fdDestinationFile.right = new FormAttachment(100, -margin); wDestinationFile.setLayoutData(fdDestinationFile); // /////////////////////////////////////////////////////////// // / END OF DestinationFile GROUP // /////////////////////////////////////////////////////////// // MoveTo grouping? // //////////////////////// // START OF MoveTo GROUP // wMoveToGroup = new Group(wDestinationFileComp, SWT.SHADOW_NONE); props.setLook(wMoveToGroup); wMoveToGroup.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.GroupMoveToGroup.Label")); FormLayout MovetoLayoutFile = new FormLayout(); MovetoLayoutFile.marginWidth = 10; MovetoLayoutFile.marginHeight = 10; wMoveToGroup.setLayout(MovetoLayoutFile); // DestinationFolder line wlDestinationFolder=new Label(wMoveToGroup, SWT.RIGHT); wlDestinationFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.DestinationFolder.Label")); props.setLook(wlDestinationFolder); fdlDestinationFolder=new FormData(); fdlDestinationFolder.left = new FormAttachment(0, 0); fdlDestinationFolder.top = new FormAttachment(wDestinationFile, margin); fdlDestinationFolder.right= new FormAttachment(middle, -margin); wlDestinationFolder.setLayoutData(fdlDestinationFolder); wbDestinationFolder=new Button(wMoveToGroup, SWT.PUSH| SWT.CENTER); props.setLook(wbDestinationFolder); wbDestinationFolder.setText(BaseMessages.getString(PKG, "System.Button.Browse")); fdbDestinationFolder=new FormData(); fdbDestinationFolder.right= new FormAttachment(100, 0); fdbDestinationFolder.top = new FormAttachment(wDestinationFile, 0); wbDestinationFolder.setLayoutData(fdbDestinationFolder); wDestinationFolder=new TextVar(jobMeta, wMoveToGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wDestinationFolder); wDestinationFolder.addModifyListener(lsMod); fdDestinationFolder=new FormData(); fdDestinationFolder.left = new FormAttachment(middle, 0); fdDestinationFolder.top = new FormAttachment(wDestinationFile, margin); fdDestinationFolder.right= new FormAttachment(wbDestinationFolder, -margin); wDestinationFolder.setLayoutData(fdDestinationFolder); // Whenever something changes, set the tooltip to the expanded version: wDestinationFolder.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { wDestinationFolder.setToolTipText(jobMeta.environmentSubstitute( wDestinationFolder.getText() ) ); } } ); wbDestinationFolder.addSelectionListener ( new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { DirectoryDialog dialog = new DirectoryDialog(shell, SWT.OPEN); if (wDestinationFolder.getText()!=null) { dialog.setFilterPath(jobMeta.environmentSubstitute(wDestinationFolder.getText()) ); } String dir=dialog.open(); if(dir!=null) { wDestinationFolder.setText(dir); } } } ); // Create destination folder/parent folder wlCreateMoveToFolder = new Label(wMoveToGroup, SWT.RIGHT); wlCreateMoveToFolder.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.CreateMoveToFolder.Label")); props.setLook(wlCreateMoveToFolder); fdlCreateMoveToFolder = new FormData(); fdlCreateMoveToFolder.left = new FormAttachment(0, 0); fdlCreateMoveToFolder.top = new FormAttachment(wDestinationFolder, margin); fdlCreateMoveToFolder.right = new FormAttachment(middle, -margin); wlCreateMoveToFolder.setLayoutData(fdlCreateMoveToFolder); wCreateMoveToFolder = new Button(wMoveToGroup, SWT.CHECK); props.setLook(wCreateMoveToFolder); wCreateMoveToFolder.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.CreateMoveToFolder.Tooltip")); fdCreateMoveToFolder = new FormData(); fdCreateMoveToFolder.left = new FormAttachment(middle, 0); fdCreateMoveToFolder.top = new FormAttachment(wDestinationFolder, margin); fdCreateMoveToFolder.right = new FormAttachment(100, 0); wCreateMoveToFolder.setLayoutData(fdCreateMoveToFolder); wCreateMoveToFolder.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } }); // Create multi-part file? wlAddMovedDate=new Label(wMoveToGroup, SWT.RIGHT); wlAddMovedDate.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddMovedDate.Label")); props.setLook(wlAddMovedDate); fdlAddMovedDate=new FormData(); fdlAddMovedDate.left = new FormAttachment(0, 0); fdlAddMovedDate.top = new FormAttachment(wCreateMoveToFolder, margin); fdlAddMovedDate.right= new FormAttachment(middle, -margin); wlAddMovedDate.setLayoutData(fdlAddMovedDate); wAddMovedDate=new Button(wMoveToGroup, SWT.CHECK); props.setLook(wAddMovedDate); wAddMovedDate.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddMovedDate.Tooltip")); fdAddMovedDate=new FormData(); fdAddMovedDate.left = new FormAttachment(middle, 0); fdAddMovedDate.top = new FormAttachment(wCreateMoveToFolder, margin); fdAddMovedDate.right= new FormAttachment(100, 0); wAddMovedDate.setLayoutData(fdAddMovedDate); wAddMovedDate.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); setAddMovedDateBeforeExtension(); } } ); // Create multi-part file? wlAddMovedTime=new Label(wMoveToGroup, SWT.RIGHT); wlAddMovedTime.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddMovedTime.Label")); props.setLook(wlAddMovedTime); fdlAddMovedTime=new FormData(); fdlAddMovedTime.left = new FormAttachment(0, 0); fdlAddMovedTime.top = new FormAttachment(wAddMovedDate, margin); fdlAddMovedTime.right= new FormAttachment(middle, -margin); wlAddMovedTime.setLayoutData(fdlAddMovedTime); wAddMovedTime=new Button(wMoveToGroup, SWT.CHECK); props.setLook(wAddMovedTime); wAddMovedTime.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddMovedTime.Tooltip")); fdAddMovedTime=new FormData(); fdAddMovedTime.left = new FormAttachment(middle, 0); fdAddMovedTime.top = new FormAttachment(wAddMovedDate, margin); fdAddMovedTime.right= new FormAttachment(100, 0); wAddMovedTime.setLayoutData(fdAddMovedTime); wAddMovedTime.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); setAddMovedDateBeforeExtension(); } } ); // Specify date time format? wlSpecifyMoveFormat=new Label(wMoveToGroup, SWT.RIGHT); wlSpecifyMoveFormat.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SpecifyMoveFormat.Label")); props.setLook(wlSpecifyMoveFormat); fdlSpecifyMoveFormat=new FormData(); fdlSpecifyMoveFormat.left = new FormAttachment(0, 0); fdlSpecifyMoveFormat.top = new FormAttachment(wAddMovedTime, margin); fdlSpecifyMoveFormat.right= new FormAttachment(middle, -margin); wlSpecifyMoveFormat.setLayoutData(fdlSpecifyMoveFormat); wSpecifyMoveFormat=new Button(wMoveToGroup, SWT.CHECK); props.setLook(wSpecifyMoveFormat); wSpecifyMoveFormat.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SpecifyMoveFormat.Tooltip")); fdSpecifyMoveFormat=new FormData(); fdSpecifyMoveFormat.left = new FormAttachment(middle, 0); fdSpecifyMoveFormat.top = new FormAttachment(wAddMovedTime, margin); fdSpecifyMoveFormat.right= new FormAttachment(100, 0); wSpecifyMoveFormat.setLayoutData(fdSpecifyMoveFormat); wSpecifyMoveFormat.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); setMovedDateTimeFormat(); setAddMovedDateBeforeExtension(); } } ); // Moved DateTimeFormat wlMovedDateTimeFormat=new Label(wMoveToGroup, SWT.RIGHT); wlMovedDateTimeFormat.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.MovedDateTimeFormat.Label")); props.setLook(wlMovedDateTimeFormat); fdlMovedDateTimeFormat=new FormData(); fdlMovedDateTimeFormat.left = new FormAttachment(0, 0); fdlMovedDateTimeFormat.top = new FormAttachment(wSpecifyMoveFormat, margin); fdlMovedDateTimeFormat.right= new FormAttachment(middle, -margin); wlMovedDateTimeFormat.setLayoutData(fdlMovedDateTimeFormat); wMovedDateTimeFormat=new CCombo(wMoveToGroup, SWT.BORDER | SWT.READ_ONLY); wMovedDateTimeFormat.setEditable(true); props.setLook(wMovedDateTimeFormat); wMovedDateTimeFormat.addModifyListener(lsMod); fdMovedDateTimeFormat=new FormData(); fdMovedDateTimeFormat.left = new FormAttachment(middle, 0); fdMovedDateTimeFormat.top = new FormAttachment(wSpecifyMoveFormat, margin); fdMovedDateTimeFormat.right= new FormAttachment(100, 0); wMovedDateTimeFormat.setLayoutData(fdMovedDateTimeFormat); for (int x=0;x<dats.length;x++) wMovedDateTimeFormat.add(dats[x]); // Add Date before extension? wlAddMovedDateBeforeExtension = new Label(wMoveToGroup, SWT.RIGHT); wlAddMovedDateBeforeExtension.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddMovedDateBeforeExtension.Label")); props.setLook(wlAddMovedDateBeforeExtension); fdlAddMovedDateBeforeExtension = new FormData(); fdlAddMovedDateBeforeExtension.left = new FormAttachment(0, 0); fdlAddMovedDateBeforeExtension.top = new FormAttachment(wMovedDateTimeFormat, margin); fdlAddMovedDateBeforeExtension.right = new FormAttachment(middle, -margin); wlAddMovedDateBeforeExtension.setLayoutData(fdlAddMovedDateBeforeExtension); wAddMovedDateBeforeExtension = new Button(wMoveToGroup, SWT.CHECK); props.setLook(wAddMovedDateBeforeExtension); wAddMovedDateBeforeExtension.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddMovedDateBeforeExtension.Tooltip")); fdAddMovedDateBeforeExtension = new FormData(); fdAddMovedDateBeforeExtension.left = new FormAttachment(middle, 0); fdAddMovedDateBeforeExtension.top = new FormAttachment(wMovedDateTimeFormat, margin); fdAddMovedDateBeforeExtension.right = new FormAttachment(100, 0); wAddMovedDateBeforeExtension.setLayoutData(fdAddMovedDateBeforeExtension); wAddMovedDateBeforeExtension.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } }); // If moved File Exists wlIfMovedFileExists = new Label(wMoveToGroup, SWT.RIGHT); wlIfMovedFileExists.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.IfMovedFileExists.Label")); props.setLook(wlIfMovedFileExists); fdlIfMovedFileExists = new FormData(); fdlIfMovedFileExists.left = new FormAttachment(0, 0); fdlIfMovedFileExists.right = new FormAttachment(middle, 0); fdlIfMovedFileExists.top = new FormAttachment(wAddMovedDateBeforeExtension, margin); wlIfMovedFileExists.setLayoutData(fdlIfMovedFileExists); wIfMovedFileExists = new CCombo(wMoveToGroup, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER); wIfMovedFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Do_Nothing_IfMovedFileExists.Label")); wIfMovedFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Overwrite_Filename_IffMovedFileExists.Label")); wIfMovedFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.UniqueName_IfMovedFileExists.Label")); wIfMovedFileExists.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Fail_IfMovedFileExists.Label")); wIfMovedFileExists.select(0); // +1: starts at -1 props.setLook(wIfMovedFileExists); fdIfMovedFileExists= new FormData(); fdIfMovedFileExists.left = new FormAttachment(middle, 0); fdIfMovedFileExists.top = new FormAttachment(wAddMovedDateBeforeExtension, margin); fdIfMovedFileExists.right = new FormAttachment(100, 0); wIfMovedFileExists.setLayoutData(fdIfMovedFileExists); fdIfMovedFileExists = new FormData(); fdIfMovedFileExists.left = new FormAttachment(middle, 0); fdIfMovedFileExists.top = new FormAttachment(wAddMovedDateBeforeExtension, margin); fdIfMovedFileExists.right = new FormAttachment(100, 0); wIfMovedFileExists.setLayoutData(fdIfMovedFileExists); fdMoveToGroup = new FormData(); fdMoveToGroup.left = new FormAttachment(0, margin); fdMoveToGroup.top = new FormAttachment(wDestinationFile, margin); fdMoveToGroup.right = new FormAttachment(100, -margin); wMoveToGroup.setLayoutData(fdMoveToGroup); // /////////////////////////////////////////////////////////// // / END OF MoveToGroup GROUP // /////////////////////////////////////////////////////////// fdDestinationFileComp = new FormData(); fdDestinationFileComp.left = new FormAttachment(0, 0); fdDestinationFileComp.top = new FormAttachment(0, 0); fdDestinationFileComp.right = new FormAttachment(100, 0); fdDestinationFileComp.bottom= new FormAttachment(100, 0); wDestinationFileComp.setLayoutData(wDestinationFileComp); wDestinationFileComp.layout(); wDestinationFileTab.setControl(wDestinationFileComp); ///////////////////////////////////////////////////////////// /// END OF DESTINATION FILETAB ///////////////////////////////////////////////////////////// ////////////////////////////////////// // START OF ADVANCED TAB /// ///////////////////////////////////// wAdvancedTab=new CTabItem(wTabFolder, SWT.NONE); wAdvancedTab.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.Tab.Advanced.Label")); FormLayout contentLayout = new FormLayout (); contentLayout.marginWidth = 3; contentLayout.marginHeight = 3; wAdvancedComp = new Composite(wTabFolder, SWT.NONE); props.setLook(wAdvancedComp); wAdvancedComp.setLayout(contentLayout); // SuccessOngrouping? // //////////////////////// // START OF SUCCESS ON GROUP/// // / wSuccessOn= new Group(wAdvancedComp, SWT.SHADOW_NONE); props.setLook(wSuccessOn); wSuccessOn.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SuccessOn.Group.Label")); FormLayout successongroupLayout = new FormLayout(); successongroupLayout.marginWidth = 10; successongroupLayout.marginHeight = 10; wSuccessOn.setLayout(successongroupLayout); //Success Condition wlSuccessCondition = new Label(wSuccessOn, SWT.RIGHT); wlSuccessCondition.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SuccessCondition.Label")); props.setLook(wlSuccessCondition); fdlSuccessCondition = new FormData(); fdlSuccessCondition.left = new FormAttachment(0, 0); fdlSuccessCondition.right = new FormAttachment(middle, 0); fdlSuccessCondition.top = new FormAttachment(0, margin); wlSuccessCondition.setLayoutData(fdlSuccessCondition); wSuccessCondition = new CCombo(wSuccessOn, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER); wSuccessCondition.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SuccessWhenAllWorksFine.Label")); wSuccessCondition.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SuccessWhenAtLeat.Label")); wSuccessCondition.add(BaseMessages.getString(PKG, "JobPGPDecryptFiles.SuccessWhenErrorsLessThan.Label")); wSuccessCondition.select(0); // +1: starts at -1 props.setLook(wSuccessCondition); fdSuccessCondition= new FormData(); fdSuccessCondition.left = new FormAttachment(middle, 0); fdSuccessCondition.top = new FormAttachment(0, margin); fdSuccessCondition.right = new FormAttachment(100, 0); wSuccessCondition.setLayoutData(fdSuccessCondition); wSuccessCondition.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { activeSuccessCondition(); } }); // Success when number of errors less than wlNrErrorsLessThan= new Label(wSuccessOn, SWT.RIGHT); wlNrErrorsLessThan.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.NrErrorsLessThan.Label")); props.setLook(wlNrErrorsLessThan); fdlNrErrorsLessThan= new FormData(); fdlNrErrorsLessThan.left = new FormAttachment(0, 0); fdlNrErrorsLessThan.top = new FormAttachment(wSuccessCondition, margin); fdlNrErrorsLessThan.right = new FormAttachment(middle, -margin); wlNrErrorsLessThan.setLayoutData(fdlNrErrorsLessThan); wNrErrorsLessThan= new TextVar(jobMeta, wSuccessOn, SWT.SINGLE | SWT.LEFT | SWT.BORDER, BaseMessages.getString(PKG, "JobPGPDecryptFiles.NrErrorsLessThan.Tooltip")); props.setLook(wNrErrorsLessThan); wNrErrorsLessThan.addModifyListener(lsMod); fdNrErrorsLessThan= new FormData(); fdNrErrorsLessThan.left = new FormAttachment(middle, 0); fdNrErrorsLessThan.top = new FormAttachment(wSuccessCondition, margin); fdNrErrorsLessThan.right = new FormAttachment(100, -margin); wNrErrorsLessThan.setLayoutData(fdNrErrorsLessThan); fdSuccessOn= new FormData(); fdSuccessOn.left = new FormAttachment(0, margin); fdSuccessOn.top = new FormAttachment(wDestinationFile, margin); fdSuccessOn.right = new FormAttachment(100, -margin); wSuccessOn.setLayoutData(fdSuccessOn); // /////////////////////////////////////////////////////////// // / END OF Success ON GROUP // /////////////////////////////////////////////////////////// // fileresult grouping? // //////////////////////// // START OF LOGGING GROUP/// // / wFileResult = new Group(wAdvancedComp, SWT.SHADOW_NONE); props.setLook(wFileResult); wFileResult.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.FileResult.Group.Label")); FormLayout fileresultgroupLayout = new FormLayout(); fileresultgroupLayout.marginWidth = 10; fileresultgroupLayout.marginHeight = 10; wFileResult.setLayout(fileresultgroupLayout); //Add file to result wlAddFileToResult = new Label(wFileResult, SWT.RIGHT); wlAddFileToResult.setText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddFileToResult.Label")); props.setLook(wlAddFileToResult); fdlAddFileToResult = new FormData(); fdlAddFileToResult.left = new FormAttachment(0, 0); fdlAddFileToResult.top = new FormAttachment(wSuccessOn, margin); fdlAddFileToResult.right = new FormAttachment(middle, -margin); wlAddFileToResult.setLayoutData(fdlAddFileToResult); wAddFileToResult = new Button(wFileResult, SWT.CHECK); props.setLook(wAddFileToResult); wAddFileToResult.setToolTipText(BaseMessages.getString(PKG, "JobPGPDecryptFiles.AddFileToResult.Tooltip")); fdAddFileToResult = new FormData(); fdAddFileToResult.left = new FormAttachment(middle, 0); fdAddFileToResult.top = new FormAttachment(wSuccessOn, margin); fdAddFileToResult.right = new FormAttachment(100, 0); wAddFileToResult.setLayoutData(fdAddFileToResult); wAddFileToResult.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { jobEntry.setChanged(); } }); fdFileResult = new FormData(); fdFileResult.left = new FormAttachment(0, margin); fdFileResult.top = new FormAttachment(wSuccessOn, margin); fdFileResult.right = new FormAttachment(100, -margin); wFileResult.setLayoutData(fdFileResult); // /////////////////////////////////////////////////////////// // / END OF FilesResult GROUP // /////////////////////////////////////////////////////////// fdAdvancedComp = new FormData(); fdAdvancedComp.left = new FormAttachment(0, 0); fdAdvancedComp.top = new FormAttachment(0, 0); fdAdvancedComp.right = new FormAttachment(100, 0); fdAdvancedComp.bottom= new FormAttachment(100, 0); wAdvancedComp.setLayoutData(wAdvancedComp); wAdvancedComp.layout(); wAdvancedTab.setControl(wAdvancedComp); ///////////////////////////////////////////////////////////// /// END OF ADVANCED TAB ///////////////////////////////////////////////////////////// fdTabFolder = new FormData(); fdTabFolder.left = new FormAttachment(0, 0); fdTabFolder.top = new FormAttachment(wName, margin); fdTabFolder.right = new FormAttachment(100, 0); fdTabFolder.bottom= new FormAttachment(100, -50); wTabFolder.setLayoutData(fdTabFolder); wOK = new Button(shell, SWT.PUSH); wOK.setText(BaseMessages.getString(PKG, "System.Button.OK")); wCancel = new Button(shell, SWT.PUSH); wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); BaseStepDialog.positionBottomButtons(shell, new Button[] { wOK, wCancel }, margin, wTabFolder); // Add listeners lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } }; lsOK = new Listener() { public void handleEvent(Event e) { ok(); } }; wCancel.addListener(SWT.Selection, lsCancel); wOK.addListener (SWT.Selection, lsOK ); lsDef=new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { ok(); } }; wName.addSelectionListener( lsDef ); wSourceFileFolder.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { cancel(); } } ); getData(); CheckIncludeSubFolders(); activeSuccessCondition(); setDateTimeFormat(); activeSuccessCondition(); activeDestinationFolder(); setMovedDateTimeFormat(); setAddDateBeforeExtension(); setAddMovedDateBeforeExtension(); wTabFolder.setSelection(0); BaseStepDialog.setSize(shell); shell.open(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) display.sleep(); } return jobEntry; } private void activeDestinationFolder() { wbDestinationFolder.setEnabled(wIfFileExists.getSelectionIndex()==4); wlDestinationFolder.setEnabled(wIfFileExists.getSelectionIndex()==4); wDestinationFolder.setEnabled(wIfFileExists.getSelectionIndex()==4); wlMovedDateTimeFormat.setEnabled(wIfFileExists.getSelectionIndex()==4); wMovedDateTimeFormat.setEnabled(wIfFileExists.getSelectionIndex()==4); wIfMovedFileExists.setEnabled(wIfFileExists.getSelectionIndex()==4); wlIfMovedFileExists.setEnabled(wIfFileExists.getSelectionIndex()==4); wlAddMovedDateBeforeExtension.setEnabled(wIfFileExists.getSelectionIndex()==4); wAddMovedDateBeforeExtension.setEnabled(wIfFileExists.getSelectionIndex()==4); wlAddMovedDate.setEnabled(wIfFileExists.getSelectionIndex()==4); wAddMovedDate.setEnabled(wIfFileExists.getSelectionIndex()==4); wlAddMovedTime.setEnabled(wIfFileExists.getSelectionIndex()==4); wAddMovedTime.setEnabled(wIfFileExists.getSelectionIndex()==4); wlSpecifyMoveFormat.setEnabled(wIfFileExists.getSelectionIndex()==4); wSpecifyMoveFormat.setEnabled(wIfFileExists.getSelectionIndex()==4); wlCreateMoveToFolder.setEnabled(wIfFileExists.getSelectionIndex()==4); wCreateMoveToFolder.setEnabled(wIfFileExists.getSelectionIndex()==4); } private void activeSuccessCondition() { wlNrErrorsLessThan.setEnabled(wSuccessCondition.getSelectionIndex()!=0); wNrErrorsLessThan.setEnabled(wSuccessCondition.getSelectionIndex()!=0); } private void setAddDateBeforeExtension() { wlAddDateBeforeExtension.setEnabled(wAddDate.getSelection()||wAddTime.getSelection()||wSpecifyFormat.getSelection() ); wAddDateBeforeExtension.setEnabled(wAddDate.getSelection()||wAddTime.getSelection()||wSpecifyFormat.getSelection() ); if(!wAddDate.getSelection()&& !wAddTime.getSelection()&& !wSpecifyFormat.getSelection()) wAddDateBeforeExtension.setSelection(false); } private void setAddMovedDateBeforeExtension() { wlAddMovedDateBeforeExtension.setEnabled(wAddMovedDate.getSelection()||wAddMovedTime.getSelection()||wSpecifyMoveFormat.getSelection() ); wAddMovedDateBeforeExtension.setEnabled(wAddMovedDate.getSelection()||wAddMovedTime.getSelection()||wSpecifyMoveFormat.getSelection() ); if(!wAddMovedDate.getSelection()&& !wAddMovedTime.getSelection()&& !wSpecifyMoveFormat.getSelection()) wAddMovedDateBeforeExtension.setSelection(false); } private void setDateTimeFormat() { if(wSpecifyFormat.getSelection()) { wAddDate.setSelection(false); wAddTime.setSelection(false); } wDateTimeFormat.setEnabled(wSpecifyFormat.getSelection()); wlDateTimeFormat.setEnabled(wSpecifyFormat.getSelection()); wAddDate.setEnabled(!wSpecifyFormat.getSelection()); wlAddDate.setEnabled(!wSpecifyFormat.getSelection()); wAddTime.setEnabled(!wSpecifyFormat.getSelection()); wlAddTime.setEnabled(!wSpecifyFormat.getSelection()); } private void setMovedDateTimeFormat() { if(wSpecifyMoveFormat.getSelection()) { wAddMovedDate.setSelection(false); wAddMovedTime.setSelection(false); } wlMovedDateTimeFormat.setEnabled(wSpecifyMoveFormat.getSelection()); wMovedDateTimeFormat.setEnabled(wSpecifyMoveFormat.getSelection()); //wAddMovedDate.setEnabled(!wSpecifyMoveFormat.getSelection()); //wlAddMovedDate.setEnabled(!wSpecifyMoveFormat.getSelection()); //wAddMovedTime.setEnabled(!wSpecifyMoveFormat.getSelection()); //wlAddMovedTime.setEnabled(!wSpecifyMoveFormat.getSelection()); } private void RefreshArgFromPrevious() { wlFields.setEnabled(!wPrevious.getSelection()); wFields.setEnabled(!wPrevious.getSelection()); wbdSourceFileFolder.setEnabled(!wPrevious.getSelection()); wbeSourceFileFolder.setEnabled(!wPrevious.getSelection()); wbSourceFileFolder.setEnabled(!wPrevious.getSelection()); wbaSourceFileFolder.setEnabled(!wPrevious.getSelection()); wbDestinationFileFolder.setEnabled(!wPrevious.getSelection()); wlDestinationFileFolder.setEnabled(!wPrevious.getSelection()); wDestinationFileFolder.setEnabled(!wPrevious.getSelection()); wlSourceFileFolder.setEnabled(!wPrevious.getSelection()); wSourceFileFolder.setEnabled(!wPrevious.getSelection()); wlWildcard.setEnabled(!wPrevious.getSelection()); wWildcard.setEnabled(!wPrevious.getSelection()); wbSourceDirectory.setEnabled(!wPrevious.getSelection()); wbDestinationDirectory.setEnabled(!wPrevious.getSelection()); } public void dispose() { WindowProperty winprop = new WindowProperty(shell); props.setScreen(winprop); shell.dispose(); } private void CheckIncludeSubFolders() { wlDoNotKeepFolderStructure.setEnabled(wIncludeSubfolders.getSelection()); wDoNotKeepFolderStructure.setEnabled(wIncludeSubfolders.getSelection()); if(!wIncludeSubfolders.getSelection()) { wDoNotKeepFolderStructure.setSelection(false); } } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { if (jobEntry.getName() != null) wName.setText( jobEntry.getName() ); wName.selectAll(); if (jobEntry.source_filefolder != null) { for (int i = 0; i < jobEntry.source_filefolder.length; i++) { TableItem ti = wFields.table.getItem(i); if (jobEntry.source_filefolder[i] != null) ti.setText(1, jobEntry.source_filefolder[i]); if (jobEntry.wildcard[i] != null) ti.setText(2, jobEntry.wildcard[i]); if (jobEntry.passphrase[i] != null) ti.setText(3, jobEntry.passphrase[i]); if (jobEntry.destination_filefolder[i] != null) ti.setText(4, jobEntry.destination_filefolder[i]); } wFields.setRowNums(); wFields.optWidth(true); } wPrevious.setSelection(jobEntry.arg_from_previous); wIncludeSubfolders.setSelection(jobEntry.include_subfolders); wDestinationIsAFile.setSelection(jobEntry.destination_is_a_file); wCreateDestinationFolder.setSelection(jobEntry.create_destination_folder); wAddFileToResult.setSelection(jobEntry.add_result_filesname); wCreateMoveToFolder.setSelection(jobEntry.create_move_to_folder); if (jobEntry.getNrErrorsLessThan()!= null) wNrErrorsLessThan.setText( jobEntry.getNrErrorsLessThan() ); else wNrErrorsLessThan.setText("10"); if(jobEntry.getSuccessCondition()!=null) { if(jobEntry.getSuccessCondition().equals(jobEntry.SUCCESS_IF_AT_LEAST_X_FILES_UN_ZIPPED)) wSuccessCondition.select(1); else if(jobEntry.getSuccessCondition().equals(jobEntry.SUCCESS_IF_ERRORS_LESS)) wSuccessCondition.select(2); else wSuccessCondition.select(0); }else wSuccessCondition.select(0); if(jobEntry.getIfFileExists()!=null) { if(jobEntry.getIfFileExists().equals("overwrite_file")) wIfFileExists.select(1); else if(jobEntry.getIfFileExists().equals("unique_name")) wIfFileExists.select(2); else if(jobEntry.getIfFileExists().equals("delete_file")) wIfFileExists.select(3); else if(jobEntry.getIfFileExists().equals("move_file")) wIfFileExists.select(4); else if(jobEntry.getIfFileExists().equals("fail")) wIfFileExists.select(5); else wIfFileExists.select(0); }else wIfFileExists.select(0); if(jobEntry.getDestinationFolder()!=null) wDestinationFolder.setText(jobEntry.getDestinationFolder()); if(jobEntry.getIfMovedFileExists()!=null) { if(jobEntry.getIfMovedFileExists().equals("overwrite_file")) wIfMovedFileExists.select(1); else if(jobEntry.getIfMovedFileExists().equals("unique_name")) wIfMovedFileExists.select(2); else if(jobEntry.getIfMovedFileExists().equals("fail")) wIfMovedFileExists.select(3); else wIfMovedFileExists.select(0); }else wIfMovedFileExists.select(0); wDoNotKeepFolderStructure.setSelection(jobEntry.isDoNotKeepFolderStructure()); wAddDateBeforeExtension.setSelection(jobEntry.isAddDateBeforeExtension()); wAddDate.setSelection(jobEntry.isAddDate()); wAddTime.setSelection(jobEntry.isAddTime()); wSpecifyFormat.setSelection(jobEntry.isSpecifyFormat()); if (jobEntry.getDateTimeFormat()!= null) wDateTimeFormat.setText( jobEntry.getDateTimeFormat() ); if (jobEntry.getGPGLocation()!= null) wGpgExe.setText( jobEntry.getGPGLocation() ); wAddMovedDate.setSelection(jobEntry.isAddMovedDate()); wAddMovedTime.setSelection(jobEntry.isAddMovedTime()); wSpecifyMoveFormat.setSelection(jobEntry.isSpecifyMoveFormat()); if (jobEntry.getMovedDateTimeFormat()!= null) wMovedDateTimeFormat.setText( jobEntry.getMovedDateTimeFormat() ); wAddMovedDateBeforeExtension.setSelection(jobEntry.isAddMovedDateBeforeExtension()); } private void cancel() { jobEntry.setChanged(changed); jobEntry=null; dispose(); } private void ok() { if(Const.isEmpty(wName.getText())) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage("Please give this job entry a name!"); mb.setText("Enter a name"); mb.open(); return; } jobEntry.setName(wName.getText()); jobEntry.setIncludeSubfolders(wIncludeSubfolders.getSelection()); jobEntry.setArgFromPrevious(wPrevious.getSelection()); jobEntry.setAddresultfilesname(wAddFileToResult.getSelection()); jobEntry.setDestinationIsAFile(wDestinationIsAFile.getSelection()); jobEntry.setCreateDestinationFolder(wCreateDestinationFolder.getSelection()); jobEntry.setNrErrorsLessThan(wNrErrorsLessThan.getText()); jobEntry.setCreateMoveToFolder(wCreateMoveToFolder.getSelection()); if(wSuccessCondition.getSelectionIndex()==1) jobEntry.setSuccessCondition(jobEntry.SUCCESS_IF_AT_LEAST_X_FILES_UN_ZIPPED); else if(wSuccessCondition.getSelectionIndex()==2) jobEntry.setSuccessCondition(jobEntry.SUCCESS_IF_ERRORS_LESS); else jobEntry.setSuccessCondition(jobEntry.SUCCESS_IF_NO_ERRORS); if(wIfFileExists.getSelectionIndex()==1) jobEntry.setIfFileExists("overwrite_file"); else if(wIfFileExists.getSelectionIndex()==2) jobEntry.setIfFileExists("unique_name"); else if(wIfFileExists.getSelectionIndex()==3) jobEntry.setIfFileExists("delete_file"); else if(wIfFileExists.getSelectionIndex()==4) jobEntry.setIfFileExists("move_file"); else if(wIfFileExists.getSelectionIndex()==5) jobEntry.setIfFileExists("fail"); else jobEntry.setIfFileExists("do_nothing"); jobEntry.setDestinationFolder(wDestinationFolder.getText()); jobEntry.setGPGPLocation(wGpgExe.getText()); if(wIfMovedFileExists.getSelectionIndex()==1) jobEntry.setIfMovedFileExists("overwrite_file"); else if(wIfMovedFileExists.getSelectionIndex()==2) jobEntry.setIfMovedFileExists("unique_name"); else if(wIfMovedFileExists.getSelectionIndex()==3) jobEntry.setIfMovedFileExists("fail"); else jobEntry.setIfMovedFileExists("do_nothing"); jobEntry.setDoNotKeepFolderStructure(wDoNotKeepFolderStructure.getSelection()); jobEntry.setAddDate(wAddDate.getSelection()); jobEntry.setAddTime(wAddTime.getSelection()); jobEntry.setSpecifyFormat(wSpecifyFormat.getSelection()); jobEntry.setDateTimeFormat(wDateTimeFormat.getText()); jobEntry.setAddDateBeforeExtension(wAddDateBeforeExtension.getSelection()); jobEntry.setAddMovedDate(wAddMovedDate.getSelection()); jobEntry.setAddMovedTime(wAddMovedTime.getSelection()); jobEntry.setSpecifyMoveFormat(wSpecifyMoveFormat.getSelection()); jobEntry.setMovedDateTimeFormat(wMovedDateTimeFormat.getText()); jobEntry.setAddMovedDateBeforeExtension(wAddMovedDateBeforeExtension.getSelection()); int nritems = wFields.nrNonEmpty(); int nr = 0; for (int i = 0; i < nritems; i++) { String arg = wFields.getNonEmpty(i).getText(1); if (arg != null && arg.length() != 0) nr++; } jobEntry.source_filefolder = new String[nr]; jobEntry.passphrase = new String[nr]; jobEntry.destination_filefolder = new String[nr]; jobEntry.wildcard = new String[nr]; nr = 0; for (int i = 0; i < nritems; i++) { String source = wFields.getNonEmpty(i).getText(1); String wild = wFields.getNonEmpty(i).getText(2); String passphrase = wFields.getNonEmpty(i).getText(3); String dest = wFields.getNonEmpty(i).getText(4); if (source != null && source.length() != 0) { jobEntry.source_filefolder[nr] = source; jobEntry.wildcard[nr] = wild; jobEntry.passphrase[nr] = passphrase; jobEntry.destination_filefolder[nr] = dest; nr++; } } dispose(); } public String toString() { return this.getClass().getName(); } public boolean evaluates() { return true; } public boolean isUnconditional() { return false; } }
/** */ package CIM15.IEC61968.Metering; import CIM15.IEC61970.Core.IdentifiedObject; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Register</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link CIM15.IEC61968.Metering.Register#getReadingType <em>Reading Type</em>}</li> * <li>{@link CIM15.IEC61968.Metering.Register#getLeftDigitCount <em>Left Digit Count</em>}</li> * <li>{@link CIM15.IEC61968.Metering.Register#getRightDigitCount <em>Right Digit Count</em>}</li> * <li>{@link CIM15.IEC61968.Metering.Register#getEndDeviceFunction <em>End Device Function</em>}</li> * </ul> * </p> * * @generated */ public class Register extends IdentifiedObject { /** * The cached value of the '{@link #getReadingType() <em>Reading Type</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getReadingType() * @generated * @ordered */ protected ReadingType readingType; /** * The default value of the '{@link #getLeftDigitCount() <em>Left Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLeftDigitCount() * @generated * @ordered */ protected static final int LEFT_DIGIT_COUNT_EDEFAULT = 0; /** * The cached value of the '{@link #getLeftDigitCount() <em>Left Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLeftDigitCount() * @generated * @ordered */ protected int leftDigitCount = LEFT_DIGIT_COUNT_EDEFAULT; /** * This is true if the Left Digit Count attribute has been set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ protected boolean leftDigitCountESet; /** * The default value of the '{@link #getRightDigitCount() <em>Right Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRightDigitCount() * @generated * @ordered */ protected static final int RIGHT_DIGIT_COUNT_EDEFAULT = 0; /** * The cached value of the '{@link #getRightDigitCount() <em>Right Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRightDigitCount() * @generated * @ordered */ protected int rightDigitCount = RIGHT_DIGIT_COUNT_EDEFAULT; /** * This is true if the Right Digit Count attribute has been set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ protected boolean rightDigitCountESet; /** * The cached value of the '{@link #getEndDeviceFunction() <em>End Device Function</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getEndDeviceFunction() * @generated * @ordered */ protected EndDeviceFunction endDeviceFunction; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Register() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return MeteringPackage.Literals.REGISTER; } /** * Returns the value of the '<em><b>Reading Type</b></em>' reference. * It is bidirectional and its opposite is '{@link CIM15.IEC61968.Metering.ReadingType#getRegister <em>Register</em>}'. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Reading Type</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Reading Type</em>' reference. * @see #setReadingType(ReadingType) * @see CIM15.IEC61968.Metering.ReadingType#getRegister * @generated */ public ReadingType getReadingType() { if (readingType != null && readingType.eIsProxy()) { InternalEObject oldReadingType = (InternalEObject)readingType; readingType = (ReadingType)eResolveProxy(oldReadingType); if (readingType != oldReadingType) { } } return readingType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ReadingType basicGetReadingType() { return readingType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetReadingType(ReadingType newReadingType, NotificationChain msgs) { ReadingType oldReadingType = readingType; readingType = newReadingType; return msgs; } /** * Sets the value of the '{@link CIM15.IEC61968.Metering.Register#getReadingType <em>Reading Type</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Reading Type</em>' reference. * @see #getReadingType() * @generated */ public void setReadingType(ReadingType newReadingType) { if (newReadingType != readingType) { NotificationChain msgs = null; if (readingType != null) msgs = ((InternalEObject)readingType).eInverseRemove(this, MeteringPackage.READING_TYPE__REGISTER, ReadingType.class, msgs); if (newReadingType != null) msgs = ((InternalEObject)newReadingType).eInverseAdd(this, MeteringPackage.READING_TYPE__REGISTER, ReadingType.class, msgs); msgs = basicSetReadingType(newReadingType, msgs); if (msgs != null) msgs.dispatch(); } } /** * Returns the value of the '<em><b>Left Digit Count</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Left Digit Count</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Left Digit Count</em>' attribute. * @see #isSetLeftDigitCount() * @see #unsetLeftDigitCount() * @see #setLeftDigitCount(int) * @generated */ public int getLeftDigitCount() { return leftDigitCount; } /** * Sets the value of the '{@link CIM15.IEC61968.Metering.Register#getLeftDigitCount <em>Left Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Left Digit Count</em>' attribute. * @see #isSetLeftDigitCount() * @see #unsetLeftDigitCount() * @see #getLeftDigitCount() * @generated */ public void setLeftDigitCount(int newLeftDigitCount) { leftDigitCount = newLeftDigitCount; leftDigitCountESet = true; } /** * Unsets the value of the '{@link CIM15.IEC61968.Metering.Register#getLeftDigitCount <em>Left Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isSetLeftDigitCount() * @see #getLeftDigitCount() * @see #setLeftDigitCount(int) * @generated */ public void unsetLeftDigitCount() { leftDigitCount = LEFT_DIGIT_COUNT_EDEFAULT; leftDigitCountESet = false; } /** * Returns whether the value of the '{@link CIM15.IEC61968.Metering.Register#getLeftDigitCount <em>Left Digit Count</em>}' attribute is set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return whether the value of the '<em>Left Digit Count</em>' attribute is set. * @see #unsetLeftDigitCount() * @see #getLeftDigitCount() * @see #setLeftDigitCount(int) * @generated */ public boolean isSetLeftDigitCount() { return leftDigitCountESet; } /** * Returns the value of the '<em><b>Right Digit Count</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Right Digit Count</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Right Digit Count</em>' attribute. * @see #isSetRightDigitCount() * @see #unsetRightDigitCount() * @see #setRightDigitCount(int) * @generated */ public int getRightDigitCount() { return rightDigitCount; } /** * Sets the value of the '{@link CIM15.IEC61968.Metering.Register#getRightDigitCount <em>Right Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Right Digit Count</em>' attribute. * @see #isSetRightDigitCount() * @see #unsetRightDigitCount() * @see #getRightDigitCount() * @generated */ public void setRightDigitCount(int newRightDigitCount) { rightDigitCount = newRightDigitCount; rightDigitCountESet = true; } /** * Unsets the value of the '{@link CIM15.IEC61968.Metering.Register#getRightDigitCount <em>Right Digit Count</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isSetRightDigitCount() * @see #getRightDigitCount() * @see #setRightDigitCount(int) * @generated */ public void unsetRightDigitCount() { rightDigitCount = RIGHT_DIGIT_COUNT_EDEFAULT; rightDigitCountESet = false; } /** * Returns whether the value of the '{@link CIM15.IEC61968.Metering.Register#getRightDigitCount <em>Right Digit Count</em>}' attribute is set. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return whether the value of the '<em>Right Digit Count</em>' attribute is set. * @see #unsetRightDigitCount() * @see #getRightDigitCount() * @see #setRightDigitCount(int) * @generated */ public boolean isSetRightDigitCount() { return rightDigitCountESet; } /** * Returns the value of the '<em><b>End Device Function</b></em>' reference. * It is bidirectional and its opposite is '{@link CIM15.IEC61968.Metering.EndDeviceFunction#getRegisters <em>Registers</em>}'. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>End Device Function</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>End Device Function</em>' reference. * @see #setEndDeviceFunction(EndDeviceFunction) * @see CIM15.IEC61968.Metering.EndDeviceFunction#getRegisters * @generated */ public EndDeviceFunction getEndDeviceFunction() { if (endDeviceFunction != null && endDeviceFunction.eIsProxy()) { InternalEObject oldEndDeviceFunction = (InternalEObject)endDeviceFunction; endDeviceFunction = (EndDeviceFunction)eResolveProxy(oldEndDeviceFunction); if (endDeviceFunction != oldEndDeviceFunction) { } } return endDeviceFunction; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EndDeviceFunction basicGetEndDeviceFunction() { return endDeviceFunction; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetEndDeviceFunction(EndDeviceFunction newEndDeviceFunction, NotificationChain msgs) { EndDeviceFunction oldEndDeviceFunction = endDeviceFunction; endDeviceFunction = newEndDeviceFunction; return msgs; } /** * Sets the value of the '{@link CIM15.IEC61968.Metering.Register#getEndDeviceFunction <em>End Device Function</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>End Device Function</em>' reference. * @see #getEndDeviceFunction() * @generated */ public void setEndDeviceFunction(EndDeviceFunction newEndDeviceFunction) { if (newEndDeviceFunction != endDeviceFunction) { NotificationChain msgs = null; if (endDeviceFunction != null) msgs = ((InternalEObject)endDeviceFunction).eInverseRemove(this, MeteringPackage.END_DEVICE_FUNCTION__REGISTERS, EndDeviceFunction.class, msgs); if (newEndDeviceFunction != null) msgs = ((InternalEObject)newEndDeviceFunction).eInverseAdd(this, MeteringPackage.END_DEVICE_FUNCTION__REGISTERS, EndDeviceFunction.class, msgs); msgs = basicSetEndDeviceFunction(newEndDeviceFunction, msgs); if (msgs != null) msgs.dispatch(); } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeteringPackage.REGISTER__READING_TYPE: if (readingType != null) msgs = ((InternalEObject)readingType).eInverseRemove(this, MeteringPackage.READING_TYPE__REGISTER, ReadingType.class, msgs); return basicSetReadingType((ReadingType)otherEnd, msgs); case MeteringPackage.REGISTER__END_DEVICE_FUNCTION: if (endDeviceFunction != null) msgs = ((InternalEObject)endDeviceFunction).eInverseRemove(this, MeteringPackage.END_DEVICE_FUNCTION__REGISTERS, EndDeviceFunction.class, msgs); return basicSetEndDeviceFunction((EndDeviceFunction)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case MeteringPackage.REGISTER__READING_TYPE: return basicSetReadingType(null, msgs); case MeteringPackage.REGISTER__END_DEVICE_FUNCTION: return basicSetEndDeviceFunction(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case MeteringPackage.REGISTER__READING_TYPE: if (resolve) return getReadingType(); return basicGetReadingType(); case MeteringPackage.REGISTER__LEFT_DIGIT_COUNT: return getLeftDigitCount(); case MeteringPackage.REGISTER__RIGHT_DIGIT_COUNT: return getRightDigitCount(); case MeteringPackage.REGISTER__END_DEVICE_FUNCTION: if (resolve) return getEndDeviceFunction(); return basicGetEndDeviceFunction(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case MeteringPackage.REGISTER__READING_TYPE: setReadingType((ReadingType)newValue); return; case MeteringPackage.REGISTER__LEFT_DIGIT_COUNT: setLeftDigitCount((Integer)newValue); return; case MeteringPackage.REGISTER__RIGHT_DIGIT_COUNT: setRightDigitCount((Integer)newValue); return; case MeteringPackage.REGISTER__END_DEVICE_FUNCTION: setEndDeviceFunction((EndDeviceFunction)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case MeteringPackage.REGISTER__READING_TYPE: setReadingType((ReadingType)null); return; case MeteringPackage.REGISTER__LEFT_DIGIT_COUNT: unsetLeftDigitCount(); return; case MeteringPackage.REGISTER__RIGHT_DIGIT_COUNT: unsetRightDigitCount(); return; case MeteringPackage.REGISTER__END_DEVICE_FUNCTION: setEndDeviceFunction((EndDeviceFunction)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case MeteringPackage.REGISTER__READING_TYPE: return readingType != null; case MeteringPackage.REGISTER__LEFT_DIGIT_COUNT: return isSetLeftDigitCount(); case MeteringPackage.REGISTER__RIGHT_DIGIT_COUNT: return isSetRightDigitCount(); case MeteringPackage.REGISTER__END_DEVICE_FUNCTION: return endDeviceFunction != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (leftDigitCount: "); if (leftDigitCountESet) result.append(leftDigitCount); else result.append("<unset>"); result.append(", rightDigitCount: "); if (rightDigitCountESet) result.append(rightDigitCount); else result.append("<unset>"); result.append(')'); return result.toString(); } } // Register
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recommender/v1beta1/recommender_service.proto package com.google.cloud.recommender.v1beta1; /** * * * <pre> * Request to the `GetInsight` method. * </pre> * * Protobuf type {@code google.cloud.recommender.v1beta1.GetInsightRequest} */ public final class GetInsightRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.recommender.v1beta1.GetInsightRequest) GetInsightRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetInsightRequest.newBuilder() to construct. private GetInsightRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetInsightRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetInsightRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetInsightRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.recommender.v1beta1.RecommenderProto .internal_static_google_cloud_recommender_v1beta1_GetInsightRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.recommender.v1beta1.RecommenderProto .internal_static_google_cloud_recommender_v1beta1_GetInsightRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.recommender.v1beta1.GetInsightRequest.class, com.google.cloud.recommender.v1beta1.GetInsightRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; private volatile java.lang.Object name_; /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.recommender.v1beta1.GetInsightRequest)) { return super.equals(obj); } com.google.cloud.recommender.v1beta1.GetInsightRequest other = (com.google.cloud.recommender.v1beta1.GetInsightRequest) obj; if (!getName().equals(other.getName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.recommender.v1beta1.GetInsightRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to the `GetInsight` method. * </pre> * * Protobuf type {@code google.cloud.recommender.v1beta1.GetInsightRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1beta1.GetInsightRequest) com.google.cloud.recommender.v1beta1.GetInsightRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.recommender.v1beta1.RecommenderProto .internal_static_google_cloud_recommender_v1beta1_GetInsightRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.recommender.v1beta1.RecommenderProto .internal_static_google_cloud_recommender_v1beta1_GetInsightRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.recommender.v1beta1.GetInsightRequest.class, com.google.cloud.recommender.v1beta1.GetInsightRequest.Builder.class); } // Construct using com.google.cloud.recommender.v1beta1.GetInsightRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); name_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.recommender.v1beta1.RecommenderProto .internal_static_google_cloud_recommender_v1beta1_GetInsightRequest_descriptor; } @java.lang.Override public com.google.cloud.recommender.v1beta1.GetInsightRequest getDefaultInstanceForType() { return com.google.cloud.recommender.v1beta1.GetInsightRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.recommender.v1beta1.GetInsightRequest build() { com.google.cloud.recommender.v1beta1.GetInsightRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.recommender.v1beta1.GetInsightRequest buildPartial() { com.google.cloud.recommender.v1beta1.GetInsightRequest result = new com.google.cloud.recommender.v1beta1.GetInsightRequest(this); result.name_ = name_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.recommender.v1beta1.GetInsightRequest) { return mergeFrom((com.google.cloud.recommender.v1beta1.GetInsightRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.recommender.v1beta1.GetInsightRequest other) { if (other == com.google.cloud.recommender.v1beta1.GetInsightRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.recommender.v1beta1.GetInsightRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.recommender.v1beta1.GetInsightRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * * * <pre> * Required. Name of the insight. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1beta1.GetInsightRequest) } // @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.GetInsightRequest) private static final com.google.cloud.recommender.v1beta1.GetInsightRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.recommender.v1beta1.GetInsightRequest(); } public static com.google.cloud.recommender.v1beta1.GetInsightRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetInsightRequest> PARSER = new com.google.protobuf.AbstractParser<GetInsightRequest>() { @java.lang.Override public GetInsightRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetInsightRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetInsightRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetInsightRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.recommender.v1beta1.GetInsightRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2011 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.plugins.project2008; import java.io.File; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.pathvisio.data.DataDerby; import org.pathvisio.data.DataException; import org.pathvisio.data.Gdb; import org.pathvisio.data.SimpleGdbFactory; import org.pathvisio.debug.Logger; import org.pathvisio.model.ConverterException; import org.pathvisio.model.DataSource; import org.pathvisio.model.Xref; import org.pathvisio.model.XrefWithSymbol; import org.pathvisio.util.FileUtils; import org.pathvisio.util.PathwayParser; import org.pathvisio.util.PathwayParser.ParseException; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; /** * In the Gene counter the percentage of genes that are known in the Ensembl database that * also exist in the pathways at wikipathways.org is returned. * Also a matrix is returned that contains the overlap between all possible pairs off two * pathways. */ public class GeneCounter { public static void main(String[] args) throws DataException, ConverterException{ /** * in the String[] args, 2 arguments are given: * in example: * "C:\\databases\\" * "C:\pathways" * * The first one is the directory that contains the databases. * The second one is the directory that contains the pathway cache. * * Check if the String[] args is given, and make Files containing the directories to * the pathways and databases. */ String dbDir = null; File pwDir = null; try { dbDir = new String(args[0]+"Rn_39_34i.pgdb"); pwDir = new File(args[1]+"\\Rattus_norvegicus"); } catch(ArrayIndexOutOfBoundsException e) { System.out.println("String[] args not given!"); System.exit(0); } List<Set<Xref>> refPWarray=getSets(dbDir,pwDir); /** * Here the percentage is calculated of genes that are known in the Ensembl database * and also exist in the pathways at wikipathways.org. */ int numberOfGenesEN=getNumberOFGenesEN(); int usedgenes=refPWarray.get(refPWarray.size()-1).size(); double percentageUsedGenes=(double)usedgenes/(double)numberOfGenesEN*100.0; percentageUsedGenes=(long)Math.round(percentageUsedGenes*1000.0)/1000.0; System.out.println("Percentage of used genes at http://www.wikipathways.org = "+percentageUsedGenes+"%"); /** * Here the matrix is calculated with the overlap between the pathways. */ refPWarray.remove(refPWarray.size()-1); Double[][] overlap=getPercentage(refPWarray); } /** * In the method 'getSets' a List is created that contains a set with all the Xref's. * The properties you have to enter are: * 'dbDir' (the direction that contains the databases) and * 'pwDir' (the direction to the file that contains the pathways). * * First the method "getFileListing" is executed. In this method all files that are * stored in the list of files is created, so that each file can easily be loaded. * In the for-loop the information from all different pathways is loaded. * In the set 'totalS', all different sets are added. So one big set is formed with * all Xref's. This set is then added to an array, so that an array can be returned * that contains all different Xref's. With this array the overlap can easily be * determined. */ public static List<Set<Xref>> getSets(String dbDir,File pwDir) throws DataException, ConverterException{ List<File> filenames = FileUtils.getFiles(pwDir, "gpml", true); Set<Xref> totalS=new HashSet<Xref>(); Gdb db= SimpleGdbFactory.createInstance(dbDir,new DataDerby(),0); List<Set<Xref>> refPWarray = new ArrayList<Set<Xref>>(); for (int i=0;i<filenames.size();i++){ File fileName=filenames.get(i); Set<Xref> setOfRefPW=getRefPW(fileName,db); refPWarray.add(setOfRefPW); totalS.addAll(setOfRefPW); } refPWarray.add(totalS); return refPWarray; } /** * In this method the total amount of genes that is known so far can be set. * This number is returned. */ public static int getNumberOFGenesEN(){ int numberOfGenesEN = 17738; return numberOfGenesEN; } /** * In the method getUsedGenes the percentage is returned of the total genes known in the * Ensembl database that are used in wikipathways. * In the set 'refPWarray' all the Xref's are stored that are used in the pathways. So the * size of this set represents the number of genes used in the pathways. * Now the percentage can be calculated. */ public static double getUsedGenes(String dbDir,File pwDir) throws DataException, ConverterException{ // Total amount of known genes in the Ensembl Database. int numberOfGenesEN = getNumberOFGenesEN(); List<Set<Xref>> refPWarray=getSets(dbDir,pwDir); int usedgenes=refPWarray.get(refPWarray.size()-1).size(); double percentageUsedGenes=(double)usedgenes/(double)numberOfGenesEN*100.0; percentageUsedGenes=(long)Math.round(percentageUsedGenes*1000.0)/1000.0; System.out.println("Percentage of used genes at http://www.wikipathways.org = "+percentageUsedGenes+"%"); return percentageUsedGenes; } /** * In the method 'getOverlap' the overlap of genes between all pathways is calculated. * A two dimensional array is returned with the overlap between the pathways. */ public static Double[][] getOverlap(String dbDir,File pwDir) throws DataException, ConverterException{ List<Set<Xref>> refPWarray=getSets(dbDir,pwDir); refPWarray.remove(refPWarray.size()-1); Double[][] overlap=getPercentage(refPWarray); return overlap; } /** * In this method a set is created that contains all the references in a Pathway, * normalized to Ensembl. * * First, for a pathway, p, the information is loaded. * Then a list is formed that contains the elements stored in the pathway. * In the for-loop each element of the pathway that represents a Xref is stored in a set. * Only is the objectType is DATANODE, the element is a Xref. * Than each Xref is translated to a reference as stored in the Enseml databank. * At last all references are added to a set. So a set remains with all Xref't that exist * in the pathways. This set is returned. */ public static Set<Xref> getRefPW(File filename, Gdb db) { Set<Xref> s = new HashSet<Xref>(); try { Logger.log.info ("Reading pathway " + filename); XMLReader xmlReader = XMLReaderFactory.createXMLReader(); PathwayParser p = new PathwayParser(filename, xmlReader); for (XrefWithSymbol gene : p.getGenes()) { List<Xref> cRef = db.getCrossRefs(gene.asXref(),DataSource.ENSEMBL); s.addAll(cRef); } } catch (ParseException e) { Logger.log.error ("Ignoring Pathway"); } catch (DataException e) { Logger.log.error ("Ignoring Pathway"); } catch (SAXException e) { Logger.log.error ("Couldn't create XML reader"); } return s; } /** * In the method 'getPercentage' the overlap between all the pathways is calculated in * percentages. For each possible pair of two pathways, it is calculated what percentage * of genes that exist in the first pathway also exists in the second pathway. A matrix * is returned with these percentages. */ public static Double[][] getPercentage(List<Set<Xref>> refPWarray){ int numberOfPathways=refPWarray.size(); Double[][] overlap=new Double[numberOfPathways][]; int[][] a=getOverlapMatrix(refPWarray); int[] numberOfGenes=getSizeVector(refPWarray); for(int j=0;j<numberOfPathways;j++){ overlap[j]=new Double[numberOfPathways]; for(int k=0;k<numberOfPathways;k++){ overlap[j][k]=(double)a[j][k]/(double)numberOfGenes[j]*100.0; overlap[j][k]=(long)Math.round(overlap[j][k]*1000.0)/1000.0; } } return overlap; } /** * In the method 'getOverlapMatrix' the overlap between all the pathways is calculated in * numbers. These numbers are calculated into percentages in another method. * In two for-loops for all pathways it is checked how many genes in that pathway also exist * in another pathway. These numbers are returned. */ public static int[][] getOverlapMatrix(List<Set<Xref>> refPWarray){ int numberOfPathways=refPWarray.size(); int[][] a=new int[numberOfPathways][]; boolean m; for(int j=0;j<numberOfPathways;j++){ a[j]=new int[numberOfPathways]; for(int k=0;k<j+1;k++){ Set<Xref> refSet=refPWarray.get(j); int overeenkomsten=0; for(Xref l:refSet){ m=refPWarray.get(k).contains(l); if(m==true){ overeenkomsten++; } } a[j][k]=overeenkomsten; a[k][j]=overeenkomsten; } } return a; } /** * In the method 'getSizeVector' the number of genes are returned that are stored in the * List<Set<Xref>> refPWarray. */ public static int[] getSizeVector(List<Set<Xref>> refPWarray){ int numberOfPathways=refPWarray.size(); int[] numberOfGenes=new int[numberOfPathways]; for(int j=0;j<numberOfPathways;j++){ numberOfGenes[j]=refPWarray.get(j).size(); } return numberOfGenes; } }
/** Copyright 2017 Andrea "Stock" Stocchero Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.pepstock.charba.client.utils.toast; import org.pepstock.charba.client.commons.AbstractNode; import org.pepstock.charba.client.commons.Key; import org.pepstock.charba.client.commons.NativeObject; import org.pepstock.charba.client.dom.elements.Img; import org.pepstock.charba.client.enums.ModifierKey; import org.pepstock.charba.client.items.Undefined; import org.pepstock.charba.client.utils.toast.enums.Align; import org.pepstock.charba.client.utils.toast.enums.DefaultProgressBarType; import org.pepstock.charba.client.utils.toast.enums.DefaultToastType; /** * Entity to expose the configuration of a toast in read only mode.<br> * It is the base for all toast options. * * @author Andrea "Stock" Stocchero * */ abstract class AbstractReadOnlyToastOptions extends AbstractNode implements IsDefaultToastOptions { /** * Name of properties of native object. */ enum Property implements Key { ALIGN("align"), AUTO_HIDE("autoHide"), BORDER_RADIUS("borderRadius"), HIDE_PROGRESS_BAR("hideProgressBar"), HIDE_SHADOW("hideShadow"), ICON("icon"), MODIFIER_KEY("modifierKey"), PROGRESS_BAR_HEIGHT("progressBarHeight"), PROGRESS_BAR_TYPE("progressBarType"), TIMEOUT("timeout"), TYPE("type"), // inner elements TITLE("title"), LABEL("label"), ACTION("action"); // name value of property private final String value; /** * Creates with the property value to use in the native object. * * @param value value of property name */ private Property(String value) { this.value = value; } /* * (non-Javadoc) * * @see org.pepstock.charba.client.commons.Key#value() */ @Override public String value() { return value; } } // defaults instance private final IsDefaultToastOptions defaultValues; // title instance private final Title title; // label instance private final Label label; // actions instance private final Action action; /** * Creates the configuration with native object instance to be wrapped. * * @param nativeObject native object instance to be wrapped. * @param defaultValues defaults instance */ AbstractReadOnlyToastOptions(NativeObject nativeObject, IsDefaultToastOptions defaultValues) { super(nativeObject); this.defaultValues = checkDefaultValuesArgument(defaultValues); // gets inner element this.title = new Title(this, Property.TITLE, this.defaultValues.getTitle(), getValue(Property.TITLE)); this.label = new Label(this, Property.LABEL, this.defaultValues.getLabel(), getValue(Property.LABEL)); this.action = new Action(this, Property.ACTION, this.defaultValues.getAction(), getValue(Property.ACTION)); } /** * Returns the default values. * * @return the default values */ final IsDefaultToastOptions getDefaultValues() { return defaultValues; } /** * Returns the title of the toast. * * @return the title of the toast */ @Override public IsDefaultContentElement getTitle() { return title; } /** * Returns the action of the toast. * * @return the action of the toast */ @Override public IsDefaultAction getAction() { return action; } /** * Returns the label of the toast. * * @return the label of the toast */ @Override public IsDefaultContentElement getLabel() { return label; } /** * Returns the type of the toast. * * @return the type of the toast */ @Override public final IsToastType getType() { // search for default IsToastType type = getValue(Property.TYPE, DefaultToastType.values(), null); // checks if not consistent if (type == null) { // searches in the map stored in the builder type = ToastTypeBuilder.get(getValue(Property.TYPE, Undefined.STRING)); // checks is still null // then returns the default if (type == null) { return DefaultToastType.DEFAULT; } } return type; } /** * Returns the type of the toast progress bar. * * @return the type of the toast progress bar */ @Override public final IsProgressBarType getProgressBarType() { // search for default IsProgressBarType type = getValue(Property.PROGRESS_BAR_TYPE, DefaultProgressBarType.values(), null); // checks if not consistent if (type == null) { // searches in the map stored in the builder type = ProgressBarTypeBuilder.get(getValue(Property.PROGRESS_BAR_TYPE, Undefined.STRING)); // checks is still null // then returns the default if (type == null) { return DefaultProgressBarType.DEFAULT; } } return type; } /** * Returns the height (in pixels) of the toast progress bar. * * @return the height (in pixels) of the toast progress bar */ @Override public final int getProgressBarHeight() { return getValue(Property.PROGRESS_BAR_HEIGHT, defaultValues.getProgressBarHeight()); } /** * Returns <code>true</code> whether to hide the progress bar. * * @return <code>true</code> whether to hide the progress bar */ @Override public final boolean isHideProgressBar() { return getValue(Property.HIDE_PROGRESS_BAR, defaultValues.isHideProgressBar()); } /** * Returns <code>true</code> whether to hide the shadow of toast. * * @return <code>true</code> whether to hide the shadow of toast */ @Override public final boolean isHideShadow() { return getValue(Property.HIDE_SHADOW, defaultValues.isHideShadow()); } /** * Returns whether to make the toast notification sticky, which means that the toast notification will never auto dismiss until clicked. * * @return whether to make the toast notification sticky, which means that the toast notification will never auto dismiss until clicked */ @Override public final boolean isAutoHide() { return getValue(Property.AUTO_HIDE, defaultValues.isAutoHide()); } /** * Returns how long the toast notification should last. * * @return how long the toast notification should last */ @Override public final int getTimeout() { return getValue(Property.TIMEOUT, defaultValues.getTimeout()); } /** * Returns the icon image set for toast. * * @return the icon image set for toast */ @Override public final Img getIcon() { return getValue(Property.ICON, defaultValues.getIcon()); } /** * Returns the border radius (in pixels). * * @return the border radius (in pixels). */ @Override public int getBorderRadius() { return getValue(Property.BORDER_RADIUS, defaultValues.getBorderRadius()); } /** * Returns the modifier key to close the toast by clicking on it. * * @return the modifier key to close the toast by clicking on it */ @Override public ModifierKey getModifierKey() { return getValue(Property.MODIFIER_KEY, ModifierKey.values(), defaultValues.getModifierKey()); } /** * Returns the alignment of the toast action. * * @return the alignment of the toast action */ @Override public final Align getAlign() { return getValue(Property.ALIGN, Align.values(), defaultValues.getAlign()); } /** * Returns the native object instance. * * @return the native object instance. */ final NativeObject nativeObject() { return getNativeObject(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.webproxy; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.ObjectInputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.Arrays; import java.util.EnumSet; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.UriBuilder; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.webproxy.AppReportFetcher.AppReportSource; import org.apache.hadoop.yarn.server.webproxy.AppReportFetcher.FetchedAppReport; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.TrackingUriPlugin; import org.apache.hadoop.yarn.webapp.MimeType; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.params.ClientPNames; import org.apache.http.client.params.CookiePolicy; import org.apache.http.client.utils.URLEncodedUtils; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class WebAppProxyServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger( WebAppProxyServlet.class); private static final Set<String> passThroughHeaders = new HashSet<>(Arrays.asList( "User-Agent", "Accept", "Accept-Encoding", "Accept-Language", "Accept-Charset", "Content-Type", "Origin", "Access-Control-Request-Method", "Access-Control-Request-Headers")); public static final String PROXY_USER_COOKIE_NAME = "proxy-user"; private transient List<TrackingUriPlugin> trackingUriPlugins; private final String rmAppPageUrlBase; private final String ahsAppPageUrlBase; private transient YarnConfiguration conf; /** * HTTP methods. */ private enum HTTP { GET, POST, HEAD, PUT, DELETE }; /** * Empty Hamlet class. */ private static class _ implements Hamlet._ { //Empty } private static class Page extends Hamlet { Page(PrintWriter out) { super(out, 0, false); } public HTML<WebAppProxyServlet._> html() { return new HTML<>("html", null, EnumSet.of(EOpt.ENDTAG)); } } /** * Default constructor */ public WebAppProxyServlet() { super(); conf = new YarnConfiguration(); this.trackingUriPlugins = conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR, TrackingUriPlugin.class); this.rmAppPageUrlBase = StringHelper.pjoin( WebAppUtils.getResolvedRMWebAppURLWithScheme(conf), "cluster", "app"); this.ahsAppPageUrlBase = StringHelper.pjoin( WebAppUtils.getHttpSchemePrefix(conf) + WebAppUtils .getAHSWebAppURLWithoutScheme(conf), "applicationhistory", "apps"); } /** * Output 404 with appropriate message. * @param resp the http response. * @param message the message to include on the page. * @throws IOException on any error. */ private static void notFound(HttpServletResponse resp, String message) throws IOException { ProxyUtils.notFound(resp, message); } /** * Warn the user that the link may not be safe! * @param resp the http response * @param link the link to point to * @param user the user that owns the link. * @throws IOException on any error. */ private static void warnUserPage(HttpServletResponse resp, String link, String user, ApplicationId id) throws IOException { //Set the cookie when we warn which overrides the query parameter //This is so that if a user passes in the approved query parameter without //having first visited this page then this page will still be displayed resp.addCookie(makeCheckCookie(id, false)); resp.setContentType(MimeType.HTML); Page p = new Page(resp.getWriter()); p.html(). h1("WARNING: The following page may not be safe!"). h3(). _("click ").a(link, "here"). _(" to continue to an Application Master web interface owned by ", user). _(). _(); } /** * Download link and have it be the response. * @param req the http request * @param resp the http response * @param link the link to download * @param c the cookie to set if any * @param proxyHost the proxy host * @param method the http method * @throws IOException on any error. */ private static void proxyLink(final HttpServletRequest req, final HttpServletResponse resp, final URI link, final Cookie c, final String proxyHost, final HTTP method) throws IOException { DefaultHttpClient client = new DefaultHttpClient(); client .getParams() .setParameter(ClientPNames.COOKIE_POLICY, CookiePolicy.BROWSER_COMPATIBILITY) .setBooleanParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, true); // Make sure we send the request from the proxy address in the config // since that is what the AM filter checks against. IP aliasing or // similar could cause issues otherwise. InetAddress localAddress = InetAddress.getByName(proxyHost); if (LOG.isDebugEnabled()) { LOG.debug("local InetAddress for proxy host: {}", localAddress); } client.getParams() .setParameter(ConnRoutePNames.LOCAL_ADDRESS, localAddress); HttpRequestBase base = null; if (method.equals(HTTP.GET)) { base = new HttpGet(link); } else if (method.equals(HTTP.PUT)) { base = new HttpPut(link); StringBuilder sb = new StringBuilder(); BufferedReader reader = new BufferedReader( new InputStreamReader(req.getInputStream(), "UTF-8")); String line; while ((line = reader.readLine()) != null) { sb.append(line); } ((HttpPut) base).setEntity(new StringEntity(sb.toString())); } else { resp.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } @SuppressWarnings("unchecked") Enumeration<String> names = req.getHeaderNames(); while(names.hasMoreElements()) { String name = names.nextElement(); if(passThroughHeaders.contains(name)) { String value = req.getHeader(name); if (LOG.isDebugEnabled()) { LOG.debug("REQ HEADER: {} : {}", name, value); } base.setHeader(name, value); } } String user = req.getRemoteUser(); if (user != null && !user.isEmpty()) { base.setHeader("Cookie", PROXY_USER_COOKIE_NAME + "=" + URLEncoder.encode(user, "ASCII")); } OutputStream out = resp.getOutputStream(); try { HttpResponse httpResp = client.execute(base); resp.setStatus(httpResp.getStatusLine().getStatusCode()); for (Header header : httpResp.getAllHeaders()) { resp.setHeader(header.getName(), header.getValue()); } if (c != null) { resp.addCookie(c); } InputStream in = httpResp.getEntity().getContent(); if (in != null) { IOUtils.copyBytes(in, out, 4096, true); } } finally { base.releaseConnection(); } } private static String getCheckCookieName(ApplicationId id){ return "checked_"+id; } private static Cookie makeCheckCookie(ApplicationId id, boolean isSet) { Cookie c = new Cookie(getCheckCookieName(id),String.valueOf(isSet)); c.setPath(ProxyUriUtils.getPath(id)); c.setMaxAge(60 * 60 * 2); //2 hours in seconds return c; } private boolean isSecurityEnabled() { Boolean b = (Boolean) getServletContext() .getAttribute(WebAppProxy.IS_SECURITY_ENABLED_ATTRIBUTE); return b != null ? b : false; } private FetchedAppReport getApplicationReport(ApplicationId id) throws IOException, YarnException { return ((AppReportFetcher) getServletContext() .getAttribute(WebAppProxy.FETCHER_ATTRIBUTE)).getApplicationReport(id); } private String getProxyHost() throws IOException { return ((String) getServletContext() .getAttribute(WebAppProxy.PROXY_HOST_ATTRIBUTE)); } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { methodAction(req, resp, HTTP.GET); } @Override protected final void doPut(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException { methodAction(req, resp, HTTP.PUT); } /** * The action against the HTTP method. * @param req the HttpServletRequest * @param resp the HttpServletResponse * @param method the HTTP method * @throws ServletException * @throws IOException */ private void methodAction(final HttpServletRequest req, final HttpServletResponse resp, final HTTP method) throws ServletException, IOException { try { String userApprovedParamS = req.getParameter(ProxyUriUtils.PROXY_APPROVAL_PARAM); boolean userWasWarned = false; boolean userApproved = Boolean.valueOf(userApprovedParamS); boolean securityEnabled = isSecurityEnabled(); final String remoteUser = req.getRemoteUser(); final String pathInfo = req.getPathInfo(); String[] parts = null; if (pathInfo != null) { parts = pathInfo.split("/", 3); } if(parts == null || parts.length < 2) { LOG.warn("{} gave an invalid proxy path {}", remoteUser, pathInfo); notFound(resp, "Your path appears to be formatted incorrectly."); return; } //parts[0] is empty because path info always starts with a / String appId = parts[1]; String rest = parts.length > 2 ? parts[2] : ""; ApplicationId id = Apps.toAppID(appId); if(id == null) { LOG.warn("{} attempting to access {} that is invalid", remoteUser, appId); notFound(resp, appId + " appears to be formatted incorrectly."); return; } if(securityEnabled) { String cookieName = getCheckCookieName(id); Cookie[] cookies = req.getCookies(); if (cookies != null) { for (Cookie c : cookies) { if (cookieName.equals(c.getName())) { userWasWarned = true; userApproved = userApproved || Boolean.valueOf(c.getValue()); break; } } } } boolean checkUser = securityEnabled && (!userWasWarned || !userApproved); FetchedAppReport fetchedAppReport = null; ApplicationReport applicationReport = null; try { fetchedAppReport = getApplicationReport(id); if (fetchedAppReport != null) { if (fetchedAppReport.getAppReportSource() != AppReportSource.RM && fetchedAppReport.getAppReportSource() != AppReportSource.AHS) { throw new UnsupportedOperationException("Application report not " + "fetched from RM or history server."); } applicationReport = fetchedAppReport.getApplicationReport(); } } catch (ApplicationNotFoundException e) { applicationReport = null; } if(applicationReport == null) { LOG.warn("{} attempting to access {} that was not found", remoteUser, id); URI toFetch = ProxyUriUtils .getUriFromTrackingPlugins(id, this.trackingUriPlugins); if (toFetch != null) { ProxyUtils.sendRedirect(req, resp, toFetch.toString()); return; } notFound(resp, "Application " + appId + " could not be found " + "in RM or history server"); return; } String original = applicationReport.getOriginalTrackingUrl(); URI trackingUri; if (original == null || original.equals("N/A") || original.equals("")) { if (fetchedAppReport.getAppReportSource() == AppReportSource.RM) { // fallback to ResourceManager's app page if no tracking URI provided // and Application Report was fetched from RM LOG.debug("Original tracking url is '{}'. Redirecting to RM app page", original == null? "NULL" : original); ProxyUtils.sendRedirect(req, resp, StringHelper.pjoin(rmAppPageUrlBase, id.toString())); } else if (fetchedAppReport.getAppReportSource() == AppReportSource.AHS) { // fallback to Application History Server app page if the application // report was fetched from AHS LOG.debug("Original tracking url is '{}'. Redirecting to AHS app page" , original == null? "NULL" : original); ProxyUtils.sendRedirect(req, resp, StringHelper.pjoin(ahsAppPageUrlBase, id.toString())); } return; } else { if (ProxyUriUtils.getSchemeFromUrl(original).isEmpty()) { trackingUri = ProxyUriUtils.getUriFromAMUrl( WebAppUtils.getHttpSchemePrefix(conf), original); } else { trackingUri = new URI(original); } } String runningUser = applicationReport.getUser(); if(checkUser && !runningUser.equals(remoteUser)) { LOG.info("Asking {} if they want to connect to the " + "app master GUI of {} owned by {}", remoteUser, appId, runningUser); warnUserPage(resp, ProxyUriUtils.getPathAndQuery(id, rest, req.getQueryString(), true), runningUser, id); return; } // Append the user-provided path and query parameter to the original // tracking url. List<NameValuePair> queryPairs = URLEncodedUtils.parse(req.getQueryString(), null); UriBuilder builder = UriBuilder.fromUri(trackingUri); for (NameValuePair pair : queryPairs) { builder.queryParam(pair.getName(), pair.getValue()); } URI toFetch = builder.path(rest).build(); LOG.info("{} is accessing unchecked {}" + " which is the app master GUI of {} owned by {}", remoteUser, toFetch, appId, runningUser); switch (applicationReport.getYarnApplicationState()) { case KILLED: case FINISHED: case FAILED: ProxyUtils.sendRedirect(req, resp, toFetch.toString()); return; default: // fall out of the switch } Cookie c = null; if (userWasWarned && userApproved) { c = makeCheckCookie(id, true); } proxyLink(req, resp, toFetch, c, getProxyHost(), method); } catch(URISyntaxException | YarnException e) { throw new IOException(e); } } /** * This method is used by Java object deserialization, to fill in the * transient {@link #trackingUriPlugins} field. * See {@link ObjectInputStream#defaultReadObject()} * <p> * <I>Do not remove</I> * <p> * Yarn isn't currently serializing this class, but findbugs * complains in its absence. * * * @param input source * @throws IOException IO failure * @throws ClassNotFoundException classloader fun */ private void readObject(ObjectInputStream input) throws IOException, ClassNotFoundException { input.defaultReadObject(); conf = new YarnConfiguration(); this.trackingUriPlugins = conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR, TrackingUriPlugin.class); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.query.continuous; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import javax.cache.configuration.Factory; import javax.cache.configuration.FactoryBuilder; import javax.cache.configuration.MutableCacheEntryListenerConfiguration; import javax.cache.event.CacheEntryCreatedListener; import javax.cache.event.CacheEntryEvent; import javax.cache.event.CacheEntryExpiredListener; import javax.cache.event.CacheEntryListener; import javax.cache.event.CacheEntryListenerException; import javax.cache.event.CacheEntryRemovedListener; import javax.cache.event.CacheEntryUpdatedListener; import javax.cache.event.EventType; import javax.cache.processor.EntryProcessor; import javax.cache.processor.MutableEntry; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.cache.CacheEntryEventSerializableFilter; import org.apache.ignite.cache.query.ContinuousQuery; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.binary.BinaryMarshaller; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.G; import org.apache.ignite.internal.util.typedef.PA; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.lang.IgniteAsyncCallback; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.IgniteCacheConfigVariationsAbstractTest; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.Nullable; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static javax.cache.event.EventType.CREATED; import static javax.cache.event.EventType.REMOVED; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.internal.processors.cache.query.continuous.CacheContinuousQueryVariationsTest.SerializableFilter.isAccepted; import static org.apache.ignite.testframework.junits.IgniteConfigVariationsAbstractTest.DataMode.EXTERNALIZABLE; import static org.apache.ignite.transactions.TransactionIsolation.READ_COMMITTED; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; import static org.apache.ignite.transactions.TransactionIsolation.SERIALIZABLE; /** * */ public class CacheContinuousQueryVariationsTest extends IgniteCacheConfigVariationsAbstractTest { /** */ private static final int ITERATION_CNT = 20; /** */ private static final int KEYS = 50; /** */ private static final int VALS = 10; /** */ public static boolean singleNode = false; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setClientMode(igniteInstanceName.endsWith("0") && !singleNode); return cfg; } /** * @throws Exception If failed. */ public void testRandomOperationJCacheApiKeepBinary() throws Exception { testRandomOperation(true, false, false, false, true); } /** * @throws Exception If failed. */ public void testRandomOperationJCacheApiAsyncCallback() throws Exception { testRandomOperation(true, false, false, true, false); } /** * @throws Exception If failed. */ public void testRandomOperationJCacheApiWithFilter() throws Exception { testRandomOperation(true, false, true, false, false); } /** * @throws Exception If failed. */ public void testRandomOperationJCacheApiWithFilterAsyncCallback() throws Exception { testRandomOperation(true, false, true, true, false); } /** * @throws Exception If failed. */ public void testRandomOperationJCacheApiSyncWithFilter() throws Exception { testRandomOperation(true, true, true, false, false); } /** * @throws Exception If failed. */ public void testRandomOperation() throws Exception { testRandomOperation(true, true, false, false, false); } /** * @throws Exception If failed. */ public void testRandomOperationWithKeepBinary() throws Exception { testRandomOperation(true, true, false, false, true); } /** * @throws Exception If failed. */ public void testRandomOperationWithAsyncCallback() throws Exception { testRandomOperation(true, true, false, true, false); } /** * @throws Exception If failed. */ public void testRandomOperationWithFilter() throws Exception { testRandomOperation(true, true, true, false, false); } /** * @throws Exception If failed. */ public void testRandomOperationWithFilterWithKeepBinary() throws Exception { testRandomOperation(true, true, true, false, true); } /** * @throws Exception If failed. */ public void testRandomOperationWithFilterAsyncCallback() throws Exception { testRandomOperation(true, true, true, true, false); } /** * @param jcacheApi Use JCache API. * @param syncNtf Use sync notification. * @param withFilter Use filter. * @param asyncCallback Filter is annotated IgniteAsyncCallback * @param keepBinary Keep binary. * @throws Exception If failed. */ private void testRandomOperation(final boolean jcacheApi, final boolean syncNtf, final boolean withFilter, final boolean asyncCallback, final boolean keepBinary) throws Exception { if (keepBinary && !(getConfiguration().getMarshaller() == null || getConfiguration().getMarshaller().getClass() == BinaryMarshaller.class)) return; runInAllDataModes(new TestRunnable() { @Override public void run() throws Exception { long seed = System.currentTimeMillis(); Random rnd = new Random(seed); log.info("Random seed: " + seed); // Register listener on all nodes. List<BlockingQueue<CacheEntryEvent<?, ?>>> evtsQueues = new ArrayList<>(); Collection<QueryCursor<?>> curs = new ArrayList<>(); Collection<MutableCacheEntryListenerConfiguration> lsnrCfgs = new ArrayList<>(); for (int idx = 0; idx < G.allGrids().size(); idx++) { final BlockingQueue<CacheEntryEvent<?, ?>> evtsQueue = new ArrayBlockingQueue<>(50_000); CI1<Iterable<CacheEntryEvent<?, ?>>> clsr = new CI1<Iterable<CacheEntryEvent<?, ?>>>() { @Override public void apply(Iterable<CacheEntryEvent<?, ?>> evts) { for (CacheEntryEvent<?, ?> evt : evts) evtsQueue.add(evt); } }; final CacheEntryUpdatedListener<Object, Object> lsnr = asyncCallback ? new AsyncLocalNonSerializableListener(clsr): new LocalNonSerializableListener(clsr); IgniteCache<Object, Object> jcache = keepBinary ? jcache(idx).withKeepBinary() : jcache(idx); if (jcacheApi) { MutableCacheEntryListenerConfiguration<Object, Object> lsnrCfg = new MutableCacheEntryListenerConfiguration<>( new Factory<CacheEntryListener<? super Object, ? super Object>>() { @Override public CacheEntryListener<? super Object, ? super Object> create() { return lsnr; } }, withFilter ? FactoryBuilder.factoryOf( asyncCallback ? new AsyncSerializableFilter(keepBinary, dataMode) : new SerializableFilter(keepBinary, dataMode)) : null, true, syncNtf ); jcache.registerCacheEntryListener(lsnrCfg); lsnrCfgs.add(lsnrCfg); evtsQueues.add(evtsQueue); } else { ContinuousQuery<Object, Object> qry = new ContinuousQuery<>(); qry.setLocalListener(lsnr); qry.setRemoteFilterFactory(withFilter ? FactoryBuilder.factoryOf( asyncCallback ? new AsyncSerializableFilter(keepBinary, dataMode) : new SerializableFilter(keepBinary, dataMode)) : null); curs.add(jcache.query(qry)); evtsQueues.add(evtsQueue); } } ConcurrentMap<Object, Object> expData = new ConcurrentHashMap<>(); try { for (int i = 0; i < ITERATION_CNT; i++) { if (i % 5 == 0) log.info("Iteration: " + i); for (int idx = 0; idx < G.allGrids().size(); idx++) randomUpdate(rnd, evtsQueues, expData, keepBinary ? jcache(idx).withKeepBinary() : jcache(idx), keepBinary, withFilter); } } catch (Exception e) { log.error("Got unexpected error: ", e); throw e; } finally { for (QueryCursor<?> cur : curs) cur.close(); for (int i = 0; i < G.allGrids().size(); i++) { for (MutableCacheEntryListenerConfiguration cfg : lsnrCfgs) jcache(i).deregisterCacheEntryListener(cfg); } } } }); } /** * @param rnd Random generator. * @param evtsQueues Events queue. * @param expData Expected cache data. * @param cache Cache. * @throws Exception If failed. */ private void randomUpdate( Random rnd, List<BlockingQueue<CacheEntryEvent<?, ?>>> evtsQueues, ConcurrentMap<Object, Object> expData, IgniteCache<Object, Object> cache, boolean keepBinary, boolean withFilter ) throws Exception { Object key = key(rnd.nextInt(KEYS)); Object newVal = value(rnd.nextInt()); Object oldVal = expData.get(key); int op = rnd.nextInt(11); Ignite ignite = cache.unwrap(Ignite.class); Transaction tx = null; if (cache.getConfiguration(CacheConfiguration.class).getAtomicityMode() == TRANSACTIONAL && rnd.nextBoolean()) tx = ignite.transactions().txStart(txRandomConcurrency(rnd), txRandomIsolation(rnd)); try { // log.info("Random operation [key=" + key + ", op=" + op + ']'); switch (op) { case 0: { cache.put(key, newVal); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, newVal, oldVal, keepBinary, withFilter); expData.put(key, newVal); break; } case 1: { cache.getAndPut(key, newVal); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, newVal, oldVal, keepBinary, withFilter); expData.put(key, newVal); break; } case 2: { cache.remove(key); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, oldVal, oldVal, keepBinary, withFilter); expData.remove(key); break; } case 3: { cache.getAndRemove(key); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, oldVal, oldVal, keepBinary, withFilter); expData.remove(key); break; } case 4: { cache.invoke(key, new EntrySetValueProcessor(newVal, rnd.nextBoolean())); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, newVal, oldVal, keepBinary, withFilter); expData.put(key, newVal); break; } case 5: { cache.invoke(key, new EntrySetValueProcessor(null, rnd.nextBoolean())); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, oldVal, oldVal, keepBinary, withFilter); expData.remove(key); break; } case 6: { cache.putIfAbsent(key, newVal); if (tx != null) tx.commit(); if (oldVal == null) { waitAndCheckEvent(evtsQueues, key, newVal, null, keepBinary, withFilter); expData.put(key, newVal); } else checkNoEvent(evtsQueues); break; } case 7: { cache.getAndPutIfAbsent(key, newVal); if (tx != null) tx.commit(); if (oldVal == null) { waitAndCheckEvent(evtsQueues, key, newVal, null, keepBinary, withFilter); expData.put(key, newVal); } else checkNoEvent(evtsQueues); break; } case 8: { cache.replace(key, newVal); if (tx != null) tx.commit(); if (oldVal != null) { waitAndCheckEvent(evtsQueues, key, newVal, oldVal, keepBinary, withFilter); expData.put(key, newVal); } else checkNoEvent(evtsQueues); break; } case 9: { cache.getAndReplace(key, newVal); if (tx != null) tx.commit(); if (oldVal != null) { waitAndCheckEvent(evtsQueues, key, newVal, oldVal, keepBinary, withFilter); expData.put(key, newVal); } else checkNoEvent(evtsQueues); break; } case 10: { if (oldVal != null) { Object replaceVal = value(rnd.nextInt(VALS)); boolean success = replaceVal.equals(oldVal); if (success) { cache.replace(key, replaceVal, newVal); if (tx != null) tx.commit(); waitAndCheckEvent(evtsQueues, key, newVal, oldVal, keepBinary, withFilter); expData.put(key, newVal); } else { cache.replace(key, replaceVal, newVal); if (tx != null) tx.commit(); checkNoEvent(evtsQueues); } } else { cache.replace(key, value(rnd.nextInt(VALS)), newVal); if (tx != null) tx.commit(); checkNoEvent(evtsQueues); } break; } default: fail("Op:" + op); } } finally { if (tx != null) tx.close(); } } /** {@inheritDoc} */ @Override protected long getTestTimeout() { return TimeUnit.MINUTES.toMillis(5); } /** * @param rnd {@link Random}. * @return {@link TransactionConcurrency}. */ private TransactionConcurrency txRandomConcurrency(Random rnd) { return rnd.nextBoolean() ? TransactionConcurrency.OPTIMISTIC : TransactionConcurrency.PESSIMISTIC; } /** * @param rnd {@link Random}. * @return {@link TransactionIsolation}. */ private TransactionIsolation txRandomIsolation(Random rnd) { int val = rnd.nextInt(3); if (val == 0) return READ_COMMITTED; else if (val == 1) return REPEATABLE_READ; else return SERIALIZABLE; } /** * @param evtsQueues Event queue. * @param key Key. * @param val Value. * @param oldVal Old value. * @param keepBinary Keep binary. * @param withFilter With filter. * @throws Exception If failed. */ private void waitAndCheckEvent(List<BlockingQueue<CacheEntryEvent<?, ?>>> evtsQueues, Object key, Object val, Object oldVal, boolean keepBinary, boolean withFilter) throws Exception { if (val == null && oldVal == null || (withFilter && val != null && !isAccepted(val, false, dataMode))) { checkNoEvent(evtsQueues); return; } for (BlockingQueue<CacheEntryEvent<?, ?>> evtsQueue : evtsQueues) { CacheEntryEvent<?, ?> evt = evtsQueue.poll(5, SECONDS); assertNotNull("Failed to wait for event [key=" + key + ", val=" + val + ", oldVal=" + oldVal + ']', evt); Object actKey = evt.getKey(); Object actVal = evt.getValue(); Object actOldVal = evt.getOldValue(); if (keepBinary) { actKey = checkAndGetObject(actKey); actVal = checkAndGetObject(actVal); actOldVal = checkAndGetObject(actOldVal); } assertEquals(key, actKey); assertEquals(val, actVal); assertEquals(oldVal, actOldVal); } } /** * @param obj Binary object. * @return Deserialize value. */ private Object checkAndGetObject(@Nullable Object obj) { if (obj != null) { assert obj instanceof BinaryObject || dataMode == EXTERNALIZABLE: obj; if (obj instanceof BinaryObject) obj = ((BinaryObject)obj).deserialize(); } return obj; } /** * @param evtsQueues Event queue. * @throws Exception If failed. */ private void checkNoEvent(List<BlockingQueue<CacheEntryEvent<?, ?>>> evtsQueues) throws Exception { for (BlockingQueue<CacheEntryEvent<?, ?>> evtsQueue : evtsQueues) { CacheEntryEvent<?, ?> evt = evtsQueue.poll(10, MILLISECONDS); assertNull(evt); } } /** * @throws Exception If failed. */ public void testRemoveRemoveScenario() throws Exception { runInAllDataModes(new TestRunnable() { @Override public void run() throws Exception { IgniteCache<Object, Object> cache = jcache(); ContinuousQuery<Object, Object> qry = new ContinuousQuery<>(); final List<CacheEntryEvent<?, ?>> evts = new CopyOnWriteArrayList<>(); qry.setLocalListener(new CacheEntryUpdatedListener<Object, Object>() { @Override public void onUpdated(Iterable<CacheEntryEvent<?, ?>> events) throws CacheEntryListenerException { for (CacheEntryEvent<?, ?> e : events) evts.add(e); } }); Object key = key(1); try (QueryCursor qryCur = cache.query(qry)) { for (int i = 0; i < ITERATION_CNT; i++) { log.info("Start iteration: " + i); // Not events. cache.invoke(key, new EntrySetValueProcessor(true)); // Get events. cache.put(key, value(1)); cache.remove(key); // Not events. cache.invoke(key, new EntrySetValueProcessor(null, false)); cache.invoke(key, new EntrySetValueProcessor(null, false)); cache.invoke(key, new EntrySetValueProcessor(true)); cache.remove(key); // Get events. cache.put(key, value(2)); // Not events. cache.invoke(key, new EntrySetValueProcessor(true)); // Get events. cache.invoke(key, new EntrySetValueProcessor(null, false)); // Not events. cache.remove(key); // Get events. cache.put(key, value(3)); cache.put(key, value(4)); // Not events. cache.invoke(key, new EntrySetValueProcessor(true)); cache.putIfAbsent(key, value(5)); cache.putIfAbsent(key, value(5)); cache.putIfAbsent(key, value(5)); cache.invoke(key, new EntrySetValueProcessor(true)); cache.remove(key, value(5)); // Get events. cache.remove(key, value(4)); cache.putIfAbsent(key, value(5)); // Not events. cache.replace(key, value(3), value(2)); cache.replace(key, value(3), value(2)); cache.replace(key, value(3), value(2)); // Get events. cache.replace(key, value(5), value(6)); assert GridTestUtils.waitForCondition(new PA() { @Override public boolean apply() { return evts.size() == 9; } }, 5_000); checkEvent(evts.get(0), CREATED, value(1), null); checkEvent(evts.get(1), REMOVED, value(1), value(1)); checkEvent(evts.get(2), CREATED, value(2), null); checkEvent(evts.get(3), REMOVED, value(2), value(2)); checkEvent(evts.get(4), CREATED, value(3), null); checkEvent(evts.get(5), EventType.UPDATED, value(4), value(3)); checkEvent(evts.get(6), REMOVED, value(4), value(4)); checkEvent(evts.get(7), CREATED, value(5), null); checkEvent(evts.get(8), EventType.UPDATED, value(6), value(5)); cache.remove(key); cache.remove(key); //Wait when remove event will be added to evts while (evts.size() != 10) { Thread.sleep(100); } evts.clear(); log.info("Finish iteration: " + i); } } } }); } /** * @param event Event. * @param type Event type. * @param val Value. * @param oldVal Old value. */ private void checkEvent(CacheEntryEvent<?, ?> event, EventType type, Object val, Object oldVal) { assertEquals(event.getEventType(), type); assertEquals(event.getValue(), val); assertEquals(event.getOldValue(), oldVal); } /** * */ protected static class EntrySetValueProcessor implements EntryProcessor<Object, Object, Object> { /** */ private Object val; /** */ private boolean retOld; /** */ private boolean skipModify; /** * @param skipModify If {@code true} then entry will not be modified. */ public EntrySetValueProcessor(boolean skipModify) { this.skipModify = skipModify; } /** * @param val Value to set. * @param retOld Return old value flag. */ public EntrySetValueProcessor(Object val, boolean retOld) { this.val = val; this.retOld = retOld; } /** {@inheritDoc} */ @Override public Object process(MutableEntry<Object, Object> e, Object... args) { if (skipModify) return null; Object old = retOld ? e.getValue() : null; if (val != null) e.setValue(val); else e.remove(); return old; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(EntrySetValueProcessor.class, this); } } /** * */ @IgniteAsyncCallback public static class AsyncSerializableFilter extends SerializableFilter { /** * */ public AsyncSerializableFilter() { // No-op. } /** * @param keepBinary Keep binary. * @param dataMode Data mode. */ public AsyncSerializableFilter(boolean keepBinary, DataMode dataMode) { super(keepBinary, dataMode); } } /** * */ public static class SerializableFilter implements CacheEntryEventSerializableFilter<Object, Object> { /** */ private boolean keepBinary; /** */ private DataMode dataMode; /** */ public SerializableFilter() { // No-op. } /** * @param keepBinary Keep binary. * @param dataMode Data mode. */ public SerializableFilter(boolean keepBinary, DataMode dataMode) { this.keepBinary = keepBinary; this.dataMode = dataMode; } /** {@inheritDoc} */ @Override public boolean evaluate(CacheEntryEvent<?, ?> event) throws CacheEntryListenerException { return isAccepted(event.getValue(), keepBinary, dataMode); } /** * @param val Value. * @param keepBinary Keep binary. * @param dataMode Data mode. * @return {@code True} if value is even. */ public static boolean isAccepted(Object val, boolean keepBinary, DataMode dataMode) { if (val != null) { int val0 = 0; if (val instanceof TestObject) { assert !keepBinary || dataMode == EXTERNALIZABLE : val; val0 = valueOf(val); } else if (val instanceof BinaryObject) { assert keepBinary : val; val0 = ((BinaryObject)val).field("val"); } else fail("Unexpected object: " + val); return val0 % 2 == 0; } return true; } } /** * */ @IgniteAsyncCallback public static class AsyncLocalNonSerializableListener extends LocalNonSerializableListener { /** * @param clsr Closure. */ AsyncLocalNonSerializableListener(IgniteInClosure<Iterable<CacheEntryEvent<?, ?>>> clsr) { super(clsr); } /** * */ public AsyncLocalNonSerializableListener() { // No-op. } } /** * */ public static class LocalNonSerializableListener implements CacheEntryUpdatedListener<Object, Object>, CacheEntryCreatedListener<Object, Object>, CacheEntryExpiredListener<Object, Object>, CacheEntryRemovedListener<Object, Object>, Externalizable { /** */ IgniteInClosure<Iterable<CacheEntryEvent<?, ?>>> clsr; /** * @param clsr Closure. */ LocalNonSerializableListener(IgniteInClosure<Iterable<CacheEntryEvent<?, ?>>> clsr) { this.clsr = clsr; } /** */ public LocalNonSerializableListener() { // No-op. } /** {@inheritDoc} */ @Override public void onCreated(Iterable<CacheEntryEvent<?, ?>> evts) throws CacheEntryListenerException { onEvents(evts); } /** {@inheritDoc} */ @Override public void onExpired(Iterable<CacheEntryEvent<?, ?>> evts) throws CacheEntryListenerException { onEvents(evts); } /** {@inheritDoc} */ @Override public void onRemoved(Iterable<CacheEntryEvent<?, ?>> evts) throws CacheEntryListenerException { onEvents(evts); } /** {@inheritDoc} */ @Override public void onUpdated(Iterable<CacheEntryEvent<?, ?>> evts) throws CacheEntryListenerException { onEvents(evts); } /** * @param evts Events. */ private void onEvents(Iterable<CacheEntryEvent<?, ?>> evts) { clsr.apply(evts); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { throw new UnsupportedOperationException("Failed. Listener should not be marshaled."); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { throw new UnsupportedOperationException("Failed. Listener should not be unmarshaled."); } } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.gml.impl; import java.math.BigInteger; import java.util.Collection; import java.util.List; import net.opengis.gml.CoordType; import net.opengis.gml.CoordinatesType; import net.opengis.gml.DirectPositionType; import net.opengis.gml.EnvelopeType; import net.opengis.gml.GmlPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Envelope Type</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getLowerCorner <em>Lower Corner</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getUpperCorner <em>Upper Corner</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getCoord <em>Coord</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getPos <em>Pos</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getCoordinates <em>Coordinates</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getAxisLabels <em>Axis Labels</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getSrsDimension <em>Srs Dimension</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getSrsName <em>Srs Name</em>}</li> * <li>{@link net.opengis.gml.impl.EnvelopeTypeImpl#getUomLabels <em>Uom Labels</em>}</li> * </ul> * </p> * * @generated */ public class EnvelopeTypeImpl extends EObjectImpl implements EnvelopeType { /** * The cached value of the '{@link #getLowerCorner() <em>Lower Corner</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLowerCorner() * @generated * @ordered */ protected DirectPositionType lowerCorner; /** * The cached value of the '{@link #getUpperCorner() <em>Upper Corner</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUpperCorner() * @generated * @ordered */ protected DirectPositionType upperCorner; /** * The cached value of the '{@link #getCoord() <em>Coord</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCoord() * @generated * @ordered */ protected EList<CoordType> coord; /** * The cached value of the '{@link #getPos() <em>Pos</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPos() * @generated * @ordered */ protected EList<DirectPositionType> pos; /** * The cached value of the '{@link #getCoordinates() <em>Coordinates</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCoordinates() * @generated * @ordered */ protected CoordinatesType coordinates; /** * The default value of the '{@link #getAxisLabels() <em>Axis Labels</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAxisLabels() * @generated * @ordered */ protected static final List<String> AXIS_LABELS_EDEFAULT = null; /** * The cached value of the '{@link #getAxisLabels() <em>Axis Labels</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAxisLabels() * @generated * @ordered */ protected List<String> axisLabels = AXIS_LABELS_EDEFAULT; /** * The default value of the '{@link #getSrsDimension() <em>Srs Dimension</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSrsDimension() * @generated * @ordered */ protected static final BigInteger SRS_DIMENSION_EDEFAULT = null; /** * The cached value of the '{@link #getSrsDimension() <em>Srs Dimension</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSrsDimension() * @generated * @ordered */ protected BigInteger srsDimension = SRS_DIMENSION_EDEFAULT; /** * The default value of the '{@link #getSrsName() <em>Srs Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSrsName() * @generated * @ordered */ protected static final String SRS_NAME_EDEFAULT = null; /** * The cached value of the '{@link #getSrsName() <em>Srs Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSrsName() * @generated * @ordered */ protected String srsName = SRS_NAME_EDEFAULT; /** * The default value of the '{@link #getUomLabels() <em>Uom Labels</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUomLabels() * @generated * @ordered */ protected static final List<String> UOM_LABELS_EDEFAULT = null; /** * The cached value of the '{@link #getUomLabels() <em>Uom Labels</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUomLabels() * @generated * @ordered */ protected List<String> uomLabels = UOM_LABELS_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EnvelopeTypeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return GmlPackage.eINSTANCE.getEnvelopeType(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DirectPositionType getLowerCorner() { return lowerCorner; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetLowerCorner(DirectPositionType newLowerCorner, NotificationChain msgs) { DirectPositionType oldLowerCorner = lowerCorner; lowerCorner = newLowerCorner; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__LOWER_CORNER, oldLowerCorner, newLowerCorner); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLowerCorner(DirectPositionType newLowerCorner) { if (newLowerCorner != lowerCorner) { NotificationChain msgs = null; if (lowerCorner != null) msgs = ((InternalEObject)lowerCorner).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ENVELOPE_TYPE__LOWER_CORNER, null, msgs); if (newLowerCorner != null) msgs = ((InternalEObject)newLowerCorner).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ENVELOPE_TYPE__LOWER_CORNER, null, msgs); msgs = basicSetLowerCorner(newLowerCorner, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__LOWER_CORNER, newLowerCorner, newLowerCorner)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DirectPositionType getUpperCorner() { return upperCorner; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetUpperCorner(DirectPositionType newUpperCorner, NotificationChain msgs) { DirectPositionType oldUpperCorner = upperCorner; upperCorner = newUpperCorner; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__UPPER_CORNER, oldUpperCorner, newUpperCorner); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setUpperCorner(DirectPositionType newUpperCorner) { if (newUpperCorner != upperCorner) { NotificationChain msgs = null; if (upperCorner != null) msgs = ((InternalEObject)upperCorner).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ENVELOPE_TYPE__UPPER_CORNER, null, msgs); if (newUpperCorner != null) msgs = ((InternalEObject)newUpperCorner).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ENVELOPE_TYPE__UPPER_CORNER, null, msgs); msgs = basicSetUpperCorner(newUpperCorner, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__UPPER_CORNER, newUpperCorner, newUpperCorner)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<CoordType> getCoord() { if (coord == null) { coord = new EObjectContainmentEList<CoordType>(CoordType.class, this, GmlPackage.ENVELOPE_TYPE__COORD); } return coord; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<DirectPositionType> getPos() { if (pos == null) { pos = new EObjectContainmentEList<DirectPositionType>(DirectPositionType.class, this, GmlPackage.ENVELOPE_TYPE__POS); } return pos; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CoordinatesType getCoordinates() { return coordinates; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetCoordinates(CoordinatesType newCoordinates, NotificationChain msgs) { CoordinatesType oldCoordinates = coordinates; coordinates = newCoordinates; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__COORDINATES, oldCoordinates, newCoordinates); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCoordinates(CoordinatesType newCoordinates) { if (newCoordinates != coordinates) { NotificationChain msgs = null; if (coordinates != null) msgs = ((InternalEObject)coordinates).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ENVELOPE_TYPE__COORDINATES, null, msgs); if (newCoordinates != null) msgs = ((InternalEObject)newCoordinates).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - GmlPackage.ENVELOPE_TYPE__COORDINATES, null, msgs); msgs = basicSetCoordinates(newCoordinates, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__COORDINATES, newCoordinates, newCoordinates)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public List<String> getAxisLabels() { return axisLabels; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setAxisLabels(List<String> newAxisLabels) { List<String> oldAxisLabels = axisLabels; axisLabels = newAxisLabels; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__AXIS_LABELS, oldAxisLabels, axisLabels)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BigInteger getSrsDimension() { return srsDimension; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSrsDimension(BigInteger newSrsDimension) { BigInteger oldSrsDimension = srsDimension; srsDimension = newSrsDimension; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__SRS_DIMENSION, oldSrsDimension, srsDimension)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getSrsName() { return srsName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSrsName(String newSrsName) { String oldSrsName = srsName; srsName = newSrsName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__SRS_NAME, oldSrsName, srsName)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public List<String> getUomLabels() { return uomLabels; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setUomLabels(List<String> newUomLabels) { List<String> oldUomLabels = uomLabels; uomLabels = newUomLabels; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, GmlPackage.ENVELOPE_TYPE__UOM_LABELS, oldUomLabels, uomLabels)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case GmlPackage.ENVELOPE_TYPE__LOWER_CORNER: return basicSetLowerCorner(null, msgs); case GmlPackage.ENVELOPE_TYPE__UPPER_CORNER: return basicSetUpperCorner(null, msgs); case GmlPackage.ENVELOPE_TYPE__COORD: return ((InternalEList<?>)getCoord()).basicRemove(otherEnd, msgs); case GmlPackage.ENVELOPE_TYPE__POS: return ((InternalEList<?>)getPos()).basicRemove(otherEnd, msgs); case GmlPackage.ENVELOPE_TYPE__COORDINATES: return basicSetCoordinates(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case GmlPackage.ENVELOPE_TYPE__LOWER_CORNER: return getLowerCorner(); case GmlPackage.ENVELOPE_TYPE__UPPER_CORNER: return getUpperCorner(); case GmlPackage.ENVELOPE_TYPE__COORD: return getCoord(); case GmlPackage.ENVELOPE_TYPE__POS: return getPos(); case GmlPackage.ENVELOPE_TYPE__COORDINATES: return getCoordinates(); case GmlPackage.ENVELOPE_TYPE__AXIS_LABELS: return getAxisLabels(); case GmlPackage.ENVELOPE_TYPE__SRS_DIMENSION: return getSrsDimension(); case GmlPackage.ENVELOPE_TYPE__SRS_NAME: return getSrsName(); case GmlPackage.ENVELOPE_TYPE__UOM_LABELS: return getUomLabels(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case GmlPackage.ENVELOPE_TYPE__LOWER_CORNER: setLowerCorner((DirectPositionType)newValue); return; case GmlPackage.ENVELOPE_TYPE__UPPER_CORNER: setUpperCorner((DirectPositionType)newValue); return; case GmlPackage.ENVELOPE_TYPE__COORD: getCoord().clear(); getCoord().addAll((Collection<? extends CoordType>)newValue); return; case GmlPackage.ENVELOPE_TYPE__POS: getPos().clear(); getPos().addAll((Collection<? extends DirectPositionType>)newValue); return; case GmlPackage.ENVELOPE_TYPE__COORDINATES: setCoordinates((CoordinatesType)newValue); return; case GmlPackage.ENVELOPE_TYPE__AXIS_LABELS: setAxisLabels((List<String>)newValue); return; case GmlPackage.ENVELOPE_TYPE__SRS_DIMENSION: setSrsDimension((BigInteger)newValue); return; case GmlPackage.ENVELOPE_TYPE__SRS_NAME: setSrsName((String)newValue); return; case GmlPackage.ENVELOPE_TYPE__UOM_LABELS: setUomLabels((List<String>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case GmlPackage.ENVELOPE_TYPE__LOWER_CORNER: setLowerCorner((DirectPositionType)null); return; case GmlPackage.ENVELOPE_TYPE__UPPER_CORNER: setUpperCorner((DirectPositionType)null); return; case GmlPackage.ENVELOPE_TYPE__COORD: getCoord().clear(); return; case GmlPackage.ENVELOPE_TYPE__POS: getPos().clear(); return; case GmlPackage.ENVELOPE_TYPE__COORDINATES: setCoordinates((CoordinatesType)null); return; case GmlPackage.ENVELOPE_TYPE__AXIS_LABELS: setAxisLabels(AXIS_LABELS_EDEFAULT); return; case GmlPackage.ENVELOPE_TYPE__SRS_DIMENSION: setSrsDimension(SRS_DIMENSION_EDEFAULT); return; case GmlPackage.ENVELOPE_TYPE__SRS_NAME: setSrsName(SRS_NAME_EDEFAULT); return; case GmlPackage.ENVELOPE_TYPE__UOM_LABELS: setUomLabels(UOM_LABELS_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case GmlPackage.ENVELOPE_TYPE__LOWER_CORNER: return lowerCorner != null; case GmlPackage.ENVELOPE_TYPE__UPPER_CORNER: return upperCorner != null; case GmlPackage.ENVELOPE_TYPE__COORD: return coord != null && !coord.isEmpty(); case GmlPackage.ENVELOPE_TYPE__POS: return pos != null && !pos.isEmpty(); case GmlPackage.ENVELOPE_TYPE__COORDINATES: return coordinates != null; case GmlPackage.ENVELOPE_TYPE__AXIS_LABELS: return AXIS_LABELS_EDEFAULT == null ? axisLabels != null : !AXIS_LABELS_EDEFAULT.equals(axisLabels); case GmlPackage.ENVELOPE_TYPE__SRS_DIMENSION: return SRS_DIMENSION_EDEFAULT == null ? srsDimension != null : !SRS_DIMENSION_EDEFAULT.equals(srsDimension); case GmlPackage.ENVELOPE_TYPE__SRS_NAME: return SRS_NAME_EDEFAULT == null ? srsName != null : !SRS_NAME_EDEFAULT.equals(srsName); case GmlPackage.ENVELOPE_TYPE__UOM_LABELS: return UOM_LABELS_EDEFAULT == null ? uomLabels != null : !UOM_LABELS_EDEFAULT.equals(uomLabels); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (axisLabels: "); result.append(axisLabels); result.append(", srsDimension: "); result.append(srsDimension); result.append(", srsName: "); result.append(srsName); result.append(", uomLabels: "); result.append(uomLabels); result.append(')'); return result.toString(); } } //EnvelopeTypeImpl
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodbv2.model; import java.io.Serializable; /** * <p> * Represents attributes that are copied (projected) from the table into an * index. These are in addition to the primary key attributes and index key * attributes, which are automatically projected. * </p> */ public class Projection implements Serializable, Cloneable { /** * <p> * The set of attributes that are projected into the index: * </p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are projected * into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are projected * into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into the * index. * </p> * </li> * </ul> */ private String projectionType; /** * <p> * Represents the non-key attribute names which will be projected into the * index. * </p> * <p> * For local secondary indexes, the total count of <i>NonKeyAttributes</i> * summed across all of the local secondary indexes, must not exceed 20. If * you project the same attribute into two different indexes, this counts as * two distinct attributes when determining the total. * </p> */ private java.util.List<String> nonKeyAttributes; /** * <p> * The set of attributes that are projected into the index: * </p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are projected * into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are projected * into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into the * index. * </p> * </li> * </ul> * * @param projectionType * The set of attributes that are projected into the index:</p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are * projected into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are * projected into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into * the index. * </p> * </li> * @see ProjectionType */ public void setProjectionType(String projectionType) { this.projectionType = projectionType; } /** * <p> * The set of attributes that are projected into the index: * </p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are projected * into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are projected * into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into the * index. * </p> * </li> * </ul> * * @return The set of attributes that are projected into the index:</p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are * projected into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are * projected into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into * the index. * </p> * </li> * @see ProjectionType */ public String getProjectionType() { return this.projectionType; } /** * <p> * The set of attributes that are projected into the index: * </p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are projected * into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are projected * into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into the * index. * </p> * </li> * </ul> * * @param projectionType * The set of attributes that are projected into the index:</p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are * projected into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are * projected into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into * the index. * </p> * </li> * @return Returns a reference to this object so that method calls can be * chained together. * @see ProjectionType */ public Projection withProjectionType(String projectionType) { setProjectionType(projectionType); return this; } /** * <p> * The set of attributes that are projected into the index: * </p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are projected * into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are projected * into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into the * index. * </p> * </li> * </ul> * * @param projectionType * The set of attributes that are projected into the index:</p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are * projected into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are * projected into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into * the index. * </p> * </li> * @see ProjectionType */ public void setProjectionType(ProjectionType projectionType) { this.projectionType = projectionType.toString(); } /** * <p> * The set of attributes that are projected into the index: * </p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are projected * into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are projected * into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into the * index. * </p> * </li> * </ul> * * @param projectionType * The set of attributes that are projected into the index:</p> * <ul> * <li> * <p> * <code>KEYS_ONLY</code> - Only the index and primary keys are * projected into the index. * </p> * </li> * <li> * <p> * <code>INCLUDE</code> - Only the specified table attributes are * projected into the index. The list of projected attributes are in * <i>NonKeyAttributes</i>. * </p> * </li> * <li> * <p> * <code>ALL</code> - All of the table attributes are projected into * the index. * </p> * </li> * @return Returns a reference to this object so that method calls can be * chained together. * @see ProjectionType */ public Projection withProjectionType(ProjectionType projectionType) { setProjectionType(projectionType); return this; } /** * <p> * Represents the non-key attribute names which will be projected into the * index. * </p> * <p> * For local secondary indexes, the total count of <i>NonKeyAttributes</i> * summed across all of the local secondary indexes, must not exceed 20. If * you project the same attribute into two different indexes, this counts as * two distinct attributes when determining the total. * </p> * * @return Represents the non-key attribute names which will be projected * into the index.</p> * <p> * For local secondary indexes, the total count of * <i>NonKeyAttributes</i> summed across all of the local secondary * indexes, must not exceed 20. If you project the same attribute * into two different indexes, this counts as two distinct * attributes when determining the total. */ public java.util.List<String> getNonKeyAttributes() { return nonKeyAttributes; } /** * <p> * Represents the non-key attribute names which will be projected into the * index. * </p> * <p> * For local secondary indexes, the total count of <i>NonKeyAttributes</i> * summed across all of the local secondary indexes, must not exceed 20. If * you project the same attribute into two different indexes, this counts as * two distinct attributes when determining the total. * </p> * * @param nonKeyAttributes * Represents the non-key attribute names which will be projected * into the index.</p> * <p> * For local secondary indexes, the total count of * <i>NonKeyAttributes</i> summed across all of the local secondary * indexes, must not exceed 20. If you project the same attribute * into two different indexes, this counts as two distinct attributes * when determining the total. */ public void setNonKeyAttributes( java.util.Collection<String> nonKeyAttributes) { if (nonKeyAttributes == null) { this.nonKeyAttributes = null; return; } this.nonKeyAttributes = new java.util.ArrayList<String>( nonKeyAttributes); } /** * <p> * Represents the non-key attribute names which will be projected into the * index. * </p> * <p> * For local secondary indexes, the total count of <i>NonKeyAttributes</i> * summed across all of the local secondary indexes, must not exceed 20. If * you project the same attribute into two different indexes, this counts as * two distinct attributes when determining the total. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setNonKeyAttributes(java.util.Collection)} or * {@link #withNonKeyAttributes(java.util.Collection)} if you want to * override the existing values. * </p> * * @param nonKeyAttributes * Represents the non-key attribute names which will be projected * into the index.</p> * <p> * For local secondary indexes, the total count of * <i>NonKeyAttributes</i> summed across all of the local secondary * indexes, must not exceed 20. If you project the same attribute * into two different indexes, this counts as two distinct attributes * when determining the total. * @return Returns a reference to this object so that method calls can be * chained together. */ public Projection withNonKeyAttributes(String... nonKeyAttributes) { if (this.nonKeyAttributes == null) { setNonKeyAttributes(new java.util.ArrayList<String>( nonKeyAttributes.length)); } for (String ele : nonKeyAttributes) { this.nonKeyAttributes.add(ele); } return this; } /** * <p> * Represents the non-key attribute names which will be projected into the * index. * </p> * <p> * For local secondary indexes, the total count of <i>NonKeyAttributes</i> * summed across all of the local secondary indexes, must not exceed 20. If * you project the same attribute into two different indexes, this counts as * two distinct attributes when determining the total. * </p> * * @param nonKeyAttributes * Represents the non-key attribute names which will be projected * into the index.</p> * <p> * For local secondary indexes, the total count of * <i>NonKeyAttributes</i> summed across all of the local secondary * indexes, must not exceed 20. If you project the same attribute * into two different indexes, this counts as two distinct attributes * when determining the total. * @return Returns a reference to this object so that method calls can be * chained together. */ public Projection withNonKeyAttributes( java.util.Collection<String> nonKeyAttributes) { setNonKeyAttributes(nonKeyAttributes); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getProjectionType() != null) sb.append("ProjectionType: " + getProjectionType() + ","); if (getNonKeyAttributes() != null) sb.append("NonKeyAttributes: " + getNonKeyAttributes()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Projection == false) return false; Projection other = (Projection) obj; if (other.getProjectionType() == null ^ this.getProjectionType() == null) return false; if (other.getProjectionType() != null && other.getProjectionType().equals(this.getProjectionType()) == false) return false; if (other.getNonKeyAttributes() == null ^ this.getNonKeyAttributes() == null) return false; if (other.getNonKeyAttributes() != null && other.getNonKeyAttributes().equals( this.getNonKeyAttributes()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getProjectionType() == null) ? 0 : getProjectionType() .hashCode()); hashCode = prime * hashCode + ((getNonKeyAttributes() == null) ? 0 : getNonKeyAttributes() .hashCode()); return hashCode; } @Override public Projection clone() { try { return (Projection) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.svn.dialogs; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.ui.PopupHandler; import com.intellij.util.containers.HashMap; import org.jetbrains.idea.svn.SvnVcs; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.SVNProperty; import org.tmatesoft.svn.core.SVNPropertyValue; import org.tmatesoft.svn.core.SVNURL; import org.tmatesoft.svn.core.wc.ISVNPropertyHandler; import org.tmatesoft.svn.core.wc.SVNPropertyData; import org.tmatesoft.svn.core.wc.SVNRevision; import org.tmatesoft.svn.core.wc.SVNWCClient; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.DefaultTableModel; import java.awt.*; import java.io.File; import java.util.Map; import java.util.TreeMap; /** * Created by IntelliJ IDEA. * User: alex * Date: Jun 20, 2006 * Time: 4:39:46 PM */ public class PropertiesComponent extends JPanel { public static final String ID = "SVN Properties"; private JTable myTable; private JTextArea myTextArea; private boolean myIsFollowSelection; private File myFile; private SvnVcs myVcs; private JSplitPane mySplitPane; private static final String CONTEXT_ID = "context"; private final CloseAction myCloseAction = new CloseAction(); private final RefreshAction myRefreshAction = new RefreshAction(); private ActionGroup myPopupActionGroup; public PropertiesComponent() { // register toolwindow and add listener to the selection. myIsFollowSelection = true; init(); } public void init() { setLayout(new BorderLayout()); myTable = new JTable(); myTextArea = new JTextArea(0, 0); myTextArea.setEditable(false); JScrollPane scrollPane = new JScrollPane(myTable); mySplitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, true, scrollPane, new JScrollPane(myTextArea)); add(mySplitPane, BorderLayout.CENTER); add(createToolbar(), BorderLayout.WEST); final DefaultTableModel model = new DefaultTableModel(createTableModel(new HashMap<String, String>()), new Object[]{"Name", "Value"}) { public boolean isCellEditable(final int row, final int column) { return false; } }; myTable.setModel(model); myTable.setShowVerticalLines(true); myTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { int index = myTable.getSelectedRow(); if (index >= 0) { Object value = myTable.getValueAt(index, 1); if (value instanceof String) { myTextArea.setText(((String) value)); } else { myTextArea.setText(""); } } else { myTextArea.setText(""); } } }); myPopupActionGroup = createPopup(); PopupHandler.installPopupHandler(myTable, myPopupActionGroup, ActionPlaces.UNKNOWN, ActionManager.getInstance()); PopupHandler.installPopupHandler(scrollPane, myPopupActionGroup, ActionPlaces.UNKNOWN, ActionManager.getInstance()); final Shortcut[] shortcuts = KeymapManager.getInstance().getActiveKeymap().getShortcuts(IdeActions.ACTION_CLOSE_ACTIVE_TAB); myCloseAction.registerCustomShortcutSet(new CustomShortcutSet(shortcuts), this); myRefreshAction.registerCustomShortcutSet(CommonShortcuts.getRerun(), this); } public void setFile(SvnVcs vcs, File file) { final Map<String, String> props = new TreeMap<String, String>(); boolean firstTime = myFile == null; if (file != null) { myFile = file; myVcs = vcs; try { vcs.createWCClient().doGetProperty(file, null, SVNRevision.UNDEFINED, SVNRevision.WORKING, false, new ISVNPropertyHandler() { public void handleProperty(File path, SVNPropertyData property) throws SVNException { final SVNPropertyValue value = property.getValue(); if (value != null) { props.put(property.getName(), value.getString()); } } public void handleProperty(SVNURL url, SVNPropertyData property) throws SVNException { } public void handleProperty(long revision, SVNPropertyData property) throws SVNException { } }); } catch (SVNException e) { props.clear(); } } DefaultTableModel model = (DefaultTableModel) myTable.getModel(); model.setDataVector(createTableModel(props), new Object[] {"Name", "Value"}); myTable.getColumnModel().setColumnSelectionAllowed(false); myTable.getColumnModel().getColumn(1).setCellRenderer(new DefaultTableCellRenderer() { protected void setValue(Object value) { if (value != null) { if (value.toString().indexOf('\r') >= 0) { value = value.toString().substring(0, value.toString().indexOf('\r')) + " [...]"; } if (value.toString().indexOf('\n') >= 0) { value = value.toString().substring(0, value.toString().indexOf('\n')) + " [...]"; } } super.setValue(value); } }); if (firstTime) { mySplitPane.setDividerLocation(.5); } if (myTable.getRowCount() > 0) { myTable.getSelectionModel().setSelectionInterval(0, 0); } } private static Object[][] createTableModel(Map<String, String> model) { Object[][] result = new Object[model.size()][2]; int index = 0; for (final String name : model.keySet()) { String value = model.get(name); if (value == null) { value = ""; } result[index][0] = name; result[index][1] = value; index++; } return result; } private JComponent createToolbar() { DefaultActionGroup group = new DefaultActionGroup(); group.add(new AddPropertyAction()); group.add(new EditPropertyAction()); group.add(new DeletePropertyAction()); group.addSeparator(); group.add(new SetKeywordsAction()); group.addSeparator(); group.add(new FollowSelectionAction()); group.add(myRefreshAction); group.add(myCloseAction); return ActionManager.getInstance().createActionToolbar("", group, false).getComponent(); } private DefaultActionGroup createPopup() { DefaultActionGroup group = new DefaultActionGroup(); group.add(new AddPropertyAction()); group.add(new EditPropertyAction()); group.add(new DeletePropertyAction()); group.addSeparator(); group.add(new SetKeywordsAction()); group.addSeparator(); group.add(myRefreshAction); return group; } private String getSelectedPropertyName() { int row = myTable.getSelectedRow(); if (row < 0) { return null; } return (String) myTable.getValueAt(row, 0); } private void updateFileStatus(boolean recursive) { if (myFile != null && myVcs != null) { String url = "file://" + myFile.getPath().replace(File.separatorChar, '/'); VirtualFile file = VirtualFileManager.getInstance().findFileByUrl(url); if (file != null) { if (recursive && file.isDirectory()) { VcsDirtyScopeManager.getInstance(myVcs.getProject()).dirDirtyRecursively(file, true); } else { VcsDirtyScopeManager.getInstance(myVcs.getProject()).fileDirty(file); } } } } private static class CloseAction extends AnAction { public void update(AnActionEvent e) { e.getPresentation().setText("Close"); e.getPresentation().setDescription("Close this tool window"); e.getPresentation().setIcon(IconLoader.getIcon("/actions/cancel.png")); } public void actionPerformed(AnActionEvent e) { Project p = e.getData(PlatformDataKeys.PROJECT); ToolWindowManager.getInstance(p).unregisterToolWindow(ID); } } private class RefreshAction extends AnAction { public void update(AnActionEvent e) { e.getPresentation().setText("Refresh"); e.getPresentation().setDescription("Reload properties"); e.getPresentation().setIcon(IconLoader.getIcon("/actions/sync.png")); e.getPresentation().setEnabled(myFile != null); } public void actionPerformed(AnActionEvent e) { setFile(myVcs, myFile); updateFileStatus(false); } } private class SetKeywordsAction extends AnAction { public void update(AnActionEvent e) { e.getPresentation().setText("Edit Keywords"); e.getPresentation().setDescription("Manage svn:keywords property"); if (!CONTEXT_ID.equals(e.getPlace())) { e.getPresentation().setIcon(IconLoader.getIcon("/actions/properties.png")); } e.getPresentation().setEnabled(myFile != null && myFile.isFile()); } public void actionPerformed(AnActionEvent e) { Project project = PlatformDataKeys.PROJECT.getData(e.getDataContext()); SVNWCClient wcClient = myVcs.createWCClient(); SVNPropertyData propValue = null; try { propValue = wcClient.doGetProperty(myFile, SVNProperty.KEYWORDS, SVNRevision.UNDEFINED, SVNRevision.WORKING); } catch (SVNException e1) { // show error message } SetKeywordsDialog dialog = new SetKeywordsDialog(project, propValue != null ? SVNPropertyValue.getPropertyAsString(propValue.getValue()) : null); dialog.show(); if (dialog.isOK()) { String value = dialog.getKeywords(); try { wcClient.doSetProperty(myFile, SVNProperty.KEYWORDS, SVNPropertyValue.create(value), false, false, null); } catch (SVNException err) { // show error message } } setFile(myVcs, myFile); updateFileStatus(false); } } private class DeletePropertyAction extends AnAction { public void update(AnActionEvent e) { e.getPresentation().setText("Delete Property"); e.getPresentation().setDescription("Delete selected property"); if (!CONTEXT_ID.equals(e.getPlace())) { e.getPresentation().setIcon(IconLoader.getIcon("/general/remove.png")); } e.getPresentation().setEnabled(myFile != null && getSelectedPropertyName() != null); } public void actionPerformed(AnActionEvent e) { try { myVcs.createWCClient().doSetProperty(myFile, getSelectedPropertyName(), null, true, false, null); } catch (SVNException error) { // show error message. } setFile(myVcs, myFile); updateFileStatus(false); } } private class AddPropertyAction extends AnAction { public void update(AnActionEvent e) { e.getPresentation().setText("Add Property"); e.getPresentation().setDescription("Add new property"); if (!CONTEXT_ID.equals(e.getPlace())) { e.getPresentation().setIcon(IconLoader.getIcon("/general/add.png")); } e.getPresentation().setEnabled(myFile != null); } public void actionPerformed(AnActionEvent e) { Project project = PlatformDataKeys.PROJECT.getData(e.getDataContext()); SetPropertyDialog dialog = new SetPropertyDialog(project, new File[] {myFile}, null, myFile.isDirectory()); dialog.show(); boolean recursive = false; if (dialog.isOK()) { String name = dialog.getPropertyName(); String value = dialog.getPropertyValue(); recursive = dialog.isRecursive(); SVNWCClient wcClient = myVcs.createWCClient(); try { wcClient.doSetProperty(myFile, name, SVNPropertyValue.create(value), false, recursive, null); } catch (SVNException err) { // show error message } } setFile(myVcs, myFile); updateFileStatus(recursive); } } private class EditPropertyAction extends AnAction { public void update(AnActionEvent e) { e.getPresentation().setText("Edit Property"); e.getPresentation().setDescription("Edit selected property value"); if (!CONTEXT_ID.equals(e.getPlace())) { e.getPresentation().setIcon(IconLoader.getIcon("/actions/editSource.png")); } e.getPresentation().setEnabled(myFile != null && getSelectedPropertyName() != null); } public void actionPerformed(AnActionEvent e) { Project project = PlatformDataKeys.PROJECT.getData(e.getDataContext()); SetPropertyDialog dialog = new SetPropertyDialog(project, new File[] {myFile}, getSelectedPropertyName(), myFile.isDirectory()); dialog.show(); boolean recursive = false; if (dialog.isOK()) { String name = dialog.getPropertyName(); String value = dialog.getPropertyValue(); recursive = dialog.isRecursive(); SVNWCClient wcClient = myVcs.createWCClient(); try { wcClient.doSetProperty(myFile, name, SVNPropertyValue.create(value), false, recursive, null); } catch (SVNException err) { // show error message } } setFile(myVcs, myFile); updateFileStatus(recursive); } } private class FollowSelectionAction extends ToggleAction { public boolean isSelected(AnActionEvent e) { return myIsFollowSelection; } public void setSelected(AnActionEvent e, boolean state) { if (state && !myIsFollowSelection) { updateSelection(e); } myIsFollowSelection = state; } public void update(final AnActionEvent e) { super.update(e); e.getPresentation().setIcon(IconLoader.getIcon("/general/autoscrollFromSource.png")); e.getPresentation().setText("Follow Selection"); e.getPresentation().setDescription("Follow Selection"); // change file if (myIsFollowSelection) { updateSelection(e); } } private void updateSelection(AnActionEvent e) { if (myVcs == null) { return; } VirtualFile vf = PlatformDataKeys.VIRTUAL_FILE.getData(e.getDataContext()); if (vf != null) { File f = new File(vf.getPath()); if (!f.equals(myFile)) { setFile(myVcs, f); Project p = PlatformDataKeys.PROJECT.getData(e.getDataContext()); ToolWindowManager.getInstance(p).getToolWindow(ID).setTitle(f.getName()); } } } } }
package ch.pontius.nio.smb; import jcifs.CIFSContext; import jcifs.CIFSException; import jcifs.Config; import jcifs.Configuration; import jcifs.context.SingletonContext; import jcifs.smb.NtlmPasswordAuthenticator; import jcifs.smb.SmbException; import jcifs.smb.SmbFile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URLEncoder; import java.nio.channels.SeekableByteChannel; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributeView; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.FileAttributeView; import java.nio.file.spi.FileSystemProvider; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * This class acts as a service-provider class for SMB/CIFS based file systems. Internally, it uses jCIFS to provide * all the file access functionality. * * @author Ralph Gasser * @version 1.1 * @since 1.0 */ public final class SMBFileSystemProvider extends FileSystemProvider { /** Internal {@link Logger} instance used by {@link SMBFileSystemProvider}. */ private final static Logger LOGGER = LoggerFactory.getLogger(SMBFileSystemProvider.class); /** Local cache of {@link SMBFileSystem} instances. */ final Map<String, SMBFileSystem> fileSystemCache; private CIFSContext context; /** Default constructor for {@link SMBFileSystemProvider}. */ public SMBFileSystemProvider() { this.fileSystemCache = new ConcurrentHashMap<>(); } /** * Returns the default scheme for {@link SMBFileSystemProvider}. * * @return URI Scheme - 'smb' */ @Override public String getScheme() { return SMBFileSystem.SMB_SCHEME; } /** * Creates a new {@link SMBFileSystem} instance for the provided URI. {@link SMBFileSystem} instances are cached based * on the authority part of the URI (i.e. URI's with the same authority share the same {@link SMBFileSystem} instance). * * Credentials for connecting with the SMB/CIFS server can be provided in several ways: * * <ol> * <li>Encode in the URI, e.g. smb://WORKGROUP;admin:1234@192.168.1.10 </li> * <li>Provide in the env Map. To do so, you have to set the keys 'workgroup', 'username' and 'password'. </li> * <li>Provide in the jCIFS config. See jCIFS documentation for more information. </li> * </ol> * * The above options will be considered according to precedence. That is, if the credentials are encoded in the URI those provided in * the env map or the jCIFS config will be ignored. * * @param uri URI for which to create {@link SMBFileSystem} * @param env Map containing configuration parameters. * @return Newly created {@link SMBFileSystem} instance * * @throws FileSystemAlreadyExistsException If an instance of {@link SMBFileSystem} already exists for provided URI. * @throws IllegalArgumentException If provided URI is not an SMB URI. */ @Override public SMBFileSystem newFileSystem(URI uri, Map<String, ?> env) { if (!uri.getScheme().equals(SMBFileSystem.SMB_SCHEME)) throw new IllegalArgumentException("The provided URI is not an SMB URI."); /* Constructs a canonical authority string, taking all possible ways to provide credentials into consideration. */ try { final CIFSContext context = this.contextFromMap(env); final String authority = this.constructAuthority(uri, context); /* Tries to create a new SMBFileSystem. */ if (this.fileSystemCache.containsKey(authority)) throw new FileSystemAlreadyExistsException("Filesystem for the provided server 'smb://" + authority + "' does already exist."); SMBFileSystem system = new SMBFileSystem(this, authority); this.fileSystemCache.put(authority, system); return system; } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException("Failed to URL encode the username and/or password in provided URI.", e); } } /** * Retrieves a {@link SMBFileSystem} instance for the provided URI from fileSystemCache and returns it. {@link SMBFileSystem} instances * are cached based on the authority part of the URI (i.e. URI's with the same authority share the same {@link SMBFileSystem} instance). * * @param uri URI for which to fetch {@link SMBFileSystem} * @return {@link SMBFileSystem} instance * * @throws FileSystemNotFoundException If no instance of {@link SMBFileSystem} could be retrieved from fileSystemCache. * @throws IllegalArgumentException If provided URI is not an SMB URI. */ @Override public SMBFileSystem getFileSystem(URI uri) { if (!uri.getScheme().equals(SMBFileSystem.SMB_SCHEME)) throw new IllegalArgumentException("The provided URI is not an SMB URI."); /* Constructs a canonical authority string, taking all possible ways to provide credentials into consideration. */ try { final CIFSContext context = this.contextFromMap(null); final String authority = this.constructAuthority(uri, context); /* Tries to fetch an existing SMBFileSystem. */ if (this.fileSystemCache.containsKey(authority)) { return this.fileSystemCache.get(authority); } else { throw new FileSystemNotFoundException("No filesystem for the provided server 'smb://" + uri.getAuthority() + "' could be found."); } } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException("Failed to URL encode the username and/or password in provided URI.", e); } } /** * Converts the provided URI to an {@link SMBPath} instance and returns it. Automatically links the {@link SMBPath} * with the {@link SMBFileSystem} associated with its authority. * * @param uri The URI from which to create the {@link SMBPath} * @return Newly created {@link SMBPath}. * @throws IllegalArgumentException If URI is not an SMB URI. */ @Override public SMBPath getPath(URI uri) { if (!uri.getScheme().equals(SMBFileSystem.SMB_SCHEME)) throw new IllegalArgumentException("The provided URI is not an SMB URI."); /* Constructs a canonical authority string, taking all possible ways to provide credentials into consideration. */ try { final CIFSContext context = this.contextFromMap(null); final String authority = this.constructAuthority(uri, context); /* Lookup authority string to determine, whether a new SMBFileSystem is required. */ if (this.fileSystemCache.containsKey(authority)) { return new SMBPath(this.getFileSystem(uri), uri); } else { return new SMBPath(this.newFileSystem(uri, new HashMap<>()), uri); } } catch (UnsupportedEncodingException e) { throw new IllegalArgumentException("Failed to URL encode the username and/or password in provided URI.", e); } } /** * Creates and returns a new {@link SeekableSMBByteChannel} instance. * * @param path The {@link SMBPath} for which a byte channel should be opened. * @param options A set of {@link StandardOpenOption}s. * @param attrs An optional list of file attributes to set when creating the file. * @return An instance of {@link SeekableSMBByteChannel}. * * @throws IllegalArgumentException If provided path is not an {@link SMBPath} instance. * @throws IOException If an I/O error occurs * @throws UnsupportedOperationException If an unsupported open option is specified (DSYNC, SYNC or SPARSE) */ @Override public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException { /* Convert path and instantiate SmbFile. */ final SMBPath smbPath = SMBPath.fromPath(path); final SmbFile file = smbPath.getSmbFile(); /* Determines how the SeekableByteChannel should be setup. */ boolean write = false; boolean create = false; boolean create_new = false; boolean append = false; boolean truncate = false; for (OpenOption option : options) { if (option.equals(StandardOpenOption.WRITE)) { write = true; } else if (option.equals(StandardOpenOption.CREATE)) { create = true; } else if (option.equals(StandardOpenOption.CREATE_NEW)) { create_new = true; } else if (option.equals(StandardOpenOption.APPEND)) { append = true; } else if (option.equals(StandardOpenOption.TRUNCATE_EXISTING)) { truncate = true; } else if (option.equals(StandardOpenOption.DSYNC) || option.equals(StandardOpenOption.SYNC) || option.equals(StandardOpenOption.SPARSE) || option.equals(StandardOpenOption.DELETE_ON_CLOSE)) { throw new UnsupportedOperationException("SMBFileSystemProvider does not support the option options SYNC, DSYNC, SPARSE or DELETE_ON_CLOSE"); } } /* Returns a new SeekableSMBByteChannel object. */ return new SeekableSMBByteChannel(file, write, create, create_new, truncate, append); } /** * Creates and returns a new {@link SMBDirectoryStream} for the specified path. * * @param dir The {@link SMBPath} for which to create a new DirectoryStream. * @param filter An optional filter that should be applied to filter entries in the stream. * @return An instance of {@link SMBDirectoryStream}. * * @throws IllegalArgumentException If provided path is not an {@link SMBPath} instance. * @throws NotDirectoryException If provided {@link SMBPath} does not point to a directory * @throws IOException If an I/O error occurs */ @Override public DirectoryStream<Path> newDirectoryStream(Path dir, DirectoryStream.Filter<? super Path> filter) throws IOException { return new SMBDirectoryStream(SMBPath.fromPath(dir), filter); } /** * Creates a directory under the provided {@link SMBPath} * * @param dir {@link SMBPath} to folder that should be created. * * @throws IllegalArgumentException If provided path is not an {@link SMBPath} instance. * @throws FileAlreadyExistsException * if a directory could not otherwise be created because a file of * that name already exists <i>(optional specific exception)</i> * @throws IOException If creating the folder fails for some reason. */ @Override public void createDirectory(Path dir, FileAttribute<?>... attrs) throws IOException { try (SmbFile smbFile = SMBPath.fromPath(dir).getSmbFile()) { smbFile.mkdir(); } catch (SmbException e) { SMBExceptionUtil.rethrowAsNIOException(e, dir); } } /** * Deletes the file under the provided {@link SMBPath} * * @param path {@link SMBPath} to file that should be deleted. * * @throws IllegalArgumentException If provided path is not an {@link SMBPath} instance. * @throws NoSuchFileException * if the file does not exist <i>(optional specific exception)</i> * @throws IOException If deleting the file fails for some reason. */ @Override public void delete(Path path) throws IOException { try (SmbFile smbFile = SMBPath.fromPath(path).getSmbFile()) { smbFile.delete(); } catch (SmbException e) { SMBExceptionUtil.rethrowAsNIOException(e, path); } } /** * Copies the file under provided source {@link SMBPath} to the destination {@link SMBPath}. * Some CopyOptions are ignored! * * @param source Source {@link SMBPath} * @param target Destination {@link SMBPath} * @param options CopyOptions * * @throws IllegalArgumentException If provided paths are not {@link SMBPath} instances. * @throws NoSuchFileException * if the file does not exist <i>(optional specific exception)</i> * @throws FileAlreadyExistsException * if the target file exists but cannot be replaced because the * {@code REPLACE_EXISTING} option is not specified <i>(optional * specific exception)</i> * @throws IOException If copying fails for some reason. */ @Override public void copy(Path source, Path target, CopyOption... options) throws IOException { boolean replaceExisting = false; boolean copyAttributes = false; for (CopyOption opt : options) { if (opt == StandardCopyOption.REPLACE_EXISTING) { replaceExisting = true; } else if (opt == StandardCopyOption.COPY_ATTRIBUTES) { copyAttributes = true; } } if (copyAttributes) { LOGGER.debug("Setting file attributes is currently not supported by SMBFileSystemProvider."); } try (SmbFile fromFile = SMBPath.fromPath(source).getSmbFile(); SmbFile toFile = SMBPath.fromPath(target).getSmbFile()) { if (!replaceExisting && toFile.exists()) { throw new FileAlreadyExistsException(toFile.toString(), null, "The specified SMB resource does already exist."); } fromFile.copyTo(toFile); } catch (SmbException e) { SMBExceptionUtil.rethrowAsNIOException(e, source, target); } } /** * Moves the file under the provided source {@link SMBPath} to the destination {@link SMBPath}. * Some CopyOptions are ignored! * * @param source Source {@link SMBPath} * @param target Destination {@link SMBPath} * @param options CopyOptions * * @throws IllegalArgumentException If provided paths are not {@link SMBPath} instances. * @throws NoSuchFileException * if the file does not exist <i>(optional specific exception)</i> * @throws FileAlreadyExistsException * if the target file exists but cannot be replaced because the * {@code REPLACE_EXISTING} option is not specified <i>(optional * specific exception)</i> * @throws IOException If moving fails for some reason. */ @Override public void move(Path source, Path target, CopyOption... options) throws IOException { boolean replaceExisting = false; for (CopyOption opt : options) { if (opt == StandardCopyOption.REPLACE_EXISTING) { replaceExisting = true; } } try (SmbFile fromFile = SMBPath.fromPath(source).getSmbFile(); SmbFile toFile = SMBPath.fromPath(target).getSmbFile()) { fromFile.renameTo(toFile, replaceExisting); } catch (SmbException e) { SMBExceptionUtil.rethrowAsNIOException(e, source, target); } } /** * Returns true, if the resources specified by the two {@link SMBPath} instance are the same. * * @param path1 First {@link SMBPath} * @param path2 Second {@link SMBPath} * @return True if the two paths point to the same resource. * * @throws IllegalArgumentException If provided paths are not {@link SMBPath} instances. * @throws IOException If an I/O error occurs. */ @Override public boolean isSameFile(Path path1, Path path2) throws IOException { SmbFile smbFile1 = SMBPath.fromPath(path1).getSmbFile(); SmbFile smbFile2 = SMBPath.fromPath(path2).getSmbFile(); return smbFile1.equals(smbFile2); } /** * Returns true, if the resource specified by the provided {@link SMBPath} instance is hidden.. * * @param path {@link SMBPath} that should be checked. * @return True if the resource under {@link SMBPath} is hidden. * * @throws IllegalArgumentException If provided paths are not {@link SMBPath} instances. * @throws IOException If an I/O error occurs. */ @Override public boolean isHidden(Path path) throws IOException { try (SmbFile smbFile = SMBPath.fromPath(path).getSmbFile()) { return smbFile.isHidden(); } catch (SmbException e) { SMBExceptionUtil.rethrowAsNIOException(e, path); return false; } } /** * Checks access to file under the provided {@link SMBPath}. * * @param path {@link SMBPath} for which access should be checked. * @param modes AccessModes that should be checked. Onl yREAD and WRITE are supported. * * @throws NoSuchFileException If file or folder specified by {@link SMBPath} does not exist. * @throws AccessDeniedException If requested access cannot be provided for file or folder under {@link SMBPath}. * @throws IllegalArgumentException If provided path is not a {@link SMBPath} instance. * @throws IOException If checking access fails for some reason. */ @Override public void checkAccess(Path path, AccessMode... modes) throws IOException { SmbFile smbFile = SMBPath.fromPath(path).getSmbFile(); /* First check if file exists. */ if (!smbFile.exists()) throw new NoSuchFileException("The specified SMB resource does not exist."); /* Determin which attributes to check. */ boolean checkRead = false; boolean checkWrite = false; for (AccessMode mode : modes) { if (mode.equals(AccessMode.READ)) checkRead = true; if (mode.equals(AccessMode.WRITE)) checkWrite = true; } /* Perform necessary checks. */ if (checkRead && !smbFile.canRead()) throw new AccessDeniedException("The specified SMB resource is not readable."); if (checkWrite && !smbFile.canWrite()) throw new AccessDeniedException("The specified SMB resource is not writable."); } /** * Reads the file attributes view of the file under the provided {@link SMBPath} and returns it. LinkOption will be ignored as * the SMB filesystem does not support symlinks. * * @param path {@link SMBPath} for which attributes view should be created. * @param type Class of the attributes view. Must be either {@link BasicFileAttributeView} or {@link SMBFileAttributeView} * @param options LinkOptions; will be ignored. * @param <V> Type of the class that's being returned. * @return {@link SMBFileAttributeView} * * @throws IllegalArgumentException If provided paths is not a {@link SMBPath} instance. */ @Override @SuppressWarnings("unchecked") public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> type, LinkOption... options) { if (type == BasicFileAttributeView.class || type == SMBFileAttributeView.class) { return (V)(new SMBFileAttributeView(SMBPath.fromPath(path))); } else { return null; } } /** * Reads the file attributes of the file under the provided {@link SMBPath} and returns it. LinkOption will be ignored as * the SMB filesystem does not support symlinks. * * @param path {@link SMBPath} for which attributes should be read. * @param type Class of the attribute. Must be either {@link BasicFileAttributes} or {@link SMBFileAttributes} * @param options LinkOptions; will be ignored. * @param <A> Type of the class that's being returned. * @return {@link SMBFileAttributes} * * @throws IllegalArgumentException If provided paths is not a {@link SMBPath} instance. * @throws IOException If reading attributes fails for some reason. */ @Override @SuppressWarnings("unchecked") public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> type, LinkOption... options) throws IOException { if (type == BasicFileAttributes.class || type == SMBFileAttributes.class) { return (A)(new SMBFileAttributes(SMBPath.fromPath(path).getSmbFile())); } else { return null; } } @Override public Map<String, Object> readAttributes(Path path, String attributes, LinkOption... options) { return null; } @Override public void setAttribute(Path path, String attribute, Object value, LinkOption... options) { throw new UnsupportedOperationException("Setting file attributes is currently not supported by SMBFileSystemProvider."); } @Override public FileStore getFileStore(Path path) { throw new UnsupportedOperationException("Access to FileStore is currently not supported by SMBFileSystemProvider."); } /** * Converts an environment map to a {@link CIFSContext}. * * @param env The environment map to convert. */ private CIFSContext contextFromMap(Map<String, ?> env) { if (context == null) { final Properties properties = new Properties(); if (env != null) { for (Map.Entry<String, ?> e : env.entrySet()) { properties.put(e.getKey(), e.getValue()); } } CIFSContext singletonContext = SingletonContext.getInstance(); properties.putAll(System.getProperties()); if (Config.getBoolean(properties, "smb-nio.useNtlmPasswordAuthenticator", false)) { Configuration config = singletonContext.getConfig(); singletonContext = singletonContext.withCredentials(new NtlmPasswordAuthenticator(config.getDefaultDomain(), config.getDefaultUsername(), config.getDefaultPassword())); } context = singletonContext; } return context; } /** * This method is used internally to construct a canonical authority string based on the provided URI and the various ways * credentials can be provided. The following options are considered in preceding order: * * <ol> * <li>Encoded in the URI, e.g. smb://WORKGROUP;admin:1234@192.168.1.10 </li> * <li>In the env Map. To do so, you have to set the keys 'workgroup', 'username' and 'password'. </li> * <li>In the jCIFS config. See jCIFS documentation for more information. </li> * </ol> * * @param uri The URI for which to construct an authority string. * @param context The {@link CIFSContext} used by this {@link SMBFileSystemProvider}. Can be null! * @return A canonical authority string. */ private String constructAuthority(URI uri, CIFSContext context) throws UnsupportedEncodingException { /* The authority string. */ String authority; /* Check if URI encodes credentials. Credentials are used in the following order: */ if (uri.getAuthority().contains(SMBFileSystem.CREDENTIALS_SEPARATOR)) { authority = uri.getAuthority(); } else { final StringBuilder builder = new StringBuilder(); if (context != null) { if (context.getConfig().getDefaultDomain() != null) { builder.append(context.getConfig().getDefaultDomain()); builder.append(";"); } if (context.getConfig().getDefaultUsername() != null) { builder.append(URLEncoder.encode(context.getConfig().getDefaultUsername(), "UTF-8")); if (context.getConfig().getDefaultPassword() != null) { builder.append(":"); builder.append(URLEncoder.encode(context.getConfig().getDefaultPassword(), "UTF-8")); } } } if (builder.length() > 0) { builder.append(SMBFileSystem.CREDENTIALS_SEPARATOR).append(uri.getAuthority()); authority = builder.toString(); } else { authority = uri.getAuthority(); } } return authority; } public CIFSContext getContext() { return context; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.KeyComparable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; public abstract class InternalTerms<A extends InternalTerms<A, B>, B extends InternalTerms.Bucket<B>> extends AbstractInternalTerms<A, B> implements Terms { protected static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); protected static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); public abstract static class Bucket<B extends Bucket<B>> extends AbstractTermsBucket implements Terms.Bucket, KeyComparable<B> { /** * Reads a bucket. Should be a constructor reference. */ @FunctionalInterface public interface Reader<B extends Bucket<B>> { B read(StreamInput in, DocValueFormat format, boolean showDocCountError) throws IOException; } long bucketOrd; protected long docCount; protected long docCountError; protected InternalAggregations aggregations; protected final boolean showDocCountError; protected final DocValueFormat format; protected Bucket(long docCount, InternalAggregations aggregations, boolean showDocCountError, long docCountError, DocValueFormat formatter) { this.showDocCountError = showDocCountError; this.format = formatter; this.docCount = docCount; this.aggregations = aggregations; this.docCountError = docCountError; } /** * Read from a stream. */ protected Bucket(StreamInput in, DocValueFormat formatter, boolean showDocCountError) throws IOException { this.showDocCountError = showDocCountError; this.format = formatter; docCount = in.readVLong(); docCountError = -1; if (showDocCountError) { docCountError = in.readLong(); } aggregations = InternalAggregations.readFrom(in); } @Override public final void writeTo(StreamOutput out) throws IOException { out.writeVLong(getDocCount()); if (showDocCountError) { out.writeLong(docCountError); } aggregations.writeTo(out); writeTermTo(out); } protected abstract void writeTermTo(StreamOutput out) throws IOException; @Override public long getDocCount() { return docCount; } @Override public long getDocCountError() { if (!showDocCountError) { throw new IllegalStateException("show_terms_doc_count_error is false"); } return docCountError; } @Override protected void setDocCountError(long docCountError) { this.docCountError = docCountError; } @Override protected void updateDocCountError(long docCountErrorDiff) { this.docCountError += docCountErrorDiff; } @Override protected boolean getShowDocCountError() { return showDocCountError; } @Override public Aggregations getAggregations() { return aggregations; } @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); if (showDocCountError) { builder.field(InternalTerms.DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); } aggregations.toXContentInternal(builder, params); builder.endObject(); return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { return false; } Bucket<?> that = (Bucket<?>) obj; // No need to take format and showDocCountError, they are attributes // of the parent terms aggregation object that are only copied here // for serialization purposes return Objects.equals(docCount, that.docCount) && Objects.equals(docCountError, that.docCountError) && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { return Objects.hash(getClass(), docCount, docCountError, aggregations); } } protected final BucketOrder reduceOrder; protected final BucketOrder order; protected final int requiredSize; protected final long minDocCount; /** * Creates a new {@link InternalTerms} * @param name The name of the aggregation * @param reduceOrder The {@link BucketOrder} that should be used to merge shard results. * @param order The {@link BucketOrder} that should be used to sort the final reduce. * @param requiredSize The number of top buckets. * @param minDocCount The minimum number of documents allowed per bucket. * @param metadata The metadata associated with the aggregation. */ protected InternalTerms(String name, BucketOrder reduceOrder, BucketOrder order, int requiredSize, long minDocCount, Map<String, Object> metadata) { super(name, metadata); this.reduceOrder = reduceOrder; this.order = order; this.requiredSize = requiredSize; this.minDocCount = minDocCount; } /** * Read from a stream. */ protected InternalTerms(StreamInput in) throws IOException { super(in); reduceOrder = InternalOrder.Streams.readOrder(in); if (in.getVersion().onOrAfter(Version.V_7_10_0)) { order = InternalOrder.Streams.readOrder(in); } else { order = reduceOrder; } requiredSize = readSize(in); minDocCount = in.readVLong(); } @Override protected final void doWriteTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_7_10_0)) { reduceOrder.writeTo(out); } order.writeTo(out); writeSize(requiredSize, out); out.writeVLong(minDocCount); writeTermTypeInfoTo(out); } protected abstract void writeTermTypeInfoTo(StreamOutput out) throws IOException; @Override public abstract List<B> getBuckets(); @Override public abstract B getBucketByKey(String term); @Override protected BucketOrder getReduceOrder() { return reduceOrder; } @Override protected BucketOrder getOrder() { return order; } @Override protected long getMinDocCount() { return minDocCount; } @Override protected int getRequiredSize() { return requiredSize; } protected abstract void setDocCountError(long docCountError); protected abstract int getShardSize(); protected abstract A create(String name, List<B> buckets, BucketOrder reduceOrder, long docCountError, long otherDocCount); @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; InternalTerms<?,?> that = (InternalTerms<?,?>) obj; return Objects.equals(minDocCount, that.minDocCount) && Objects.equals(reduceOrder, that.reduceOrder) && Objects.equals(order, that.order) && Objects.equals(requiredSize, that.requiredSize); } @Override public int hashCode() { return Objects.hash(super.hashCode(), minDocCount, reduceOrder, order, requiredSize); } protected static XContentBuilder doXContentCommon(XContentBuilder builder, Params params, long docCountError, long otherDocCount, List<? extends Bucket<?>> buckets) throws IOException { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), docCountError); builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket<?> bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); return builder; } }
package org.keycloak.testsuite; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.LinkedList; import java.util.List; import org.infinispan.AdvancedCache; import org.infinispan.Cache; import org.infinispan.context.Flag; import org.jboss.logging.Logger; import org.keycloak.connections.infinispan.InfinispanConnectionProvider; import org.keycloak.models.ClientModel; import org.keycloak.models.ClientSessionModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.KeycloakSessionFactory; import org.keycloak.models.KeycloakSessionTask; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.models.UserSessionProvider; import org.keycloak.models.UserSessionProviderFactory; import org.keycloak.models.session.UserSessionPersisterProvider; import org.keycloak.models.sessions.infinispan.entities.SessionEntity; import org.keycloak.models.sessions.infinispan.entities.UserSessionEntity; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.common.util.Time; /** * HOWTO USE THIS: * * 1) Run KeycloakServer with system properties (assuming mongo up and running on localhost): * -Dkeycloak.realm.provider=mongo -Dkeycloak.user.provider=mongo -Dkeycloak.userSessionPersister.provider=mongo -Dkeycloak.connectionsMongo.db=keycloak -Dkeycloak.connectionsInfinispan.clustered=true -Dresources -DstartInfinispanCLI * * 2) Write command on STDIN to persist 50000 userSessions to mongo: persistSessions 50000 * * 3) Run command "clear" to ensure infinispan cache is cleared. Doublecheck with command "size" is 0 * * 4) Write command to load sessions from persistent storage - 100 sessions per worker transaction: loadPersistentSessions 100 * * See the progress in log. Finally run command "size" to ensure size is 100001 (50000 userSessions + 50000 clientSessions + 1 initializationState item) * * 5) Alternative to step 3+4 - Kill the server after step 2 and start two KeycloakServer in parallel on ports 8081 and 8082 . See the progress in logs of loading persistent sessions to infinispan. * Kill the coordinator (usually 8081 node) during startup and see the node 8082 became coordinator and took ownership of loading persistent sessions. After node 8082 fully started, the size of infinispan is again 100001 * * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public class InfinispanCLI { private static final Logger log = Logger.getLogger(InfinispanCLI.class); private final KeycloakSessionFactory sessionFactory; public InfinispanCLI(KeycloakServer server) { this.sessionFactory = server.getSessionFactory(); } // WARNING: Stdin blocking operation public void start() throws IOException { log.info("Starting infinispan CLI. Exit with 'exit'"); BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); String line; try { while ((line = reader.readLine()) != null) { log.info("Command: " + line); if (line.equals("exit")) { return; } final String finalLine = line; KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { InfinispanConnectionProvider provider = session.getProvider(InfinispanConnectionProvider.class); Cache<String, SessionEntity> ispnCache = provider.getCache(InfinispanConnectionProvider.OFFLINE_SESSION_CACHE_NAME); runTask(finalLine, ispnCache); } }); } } finally { log.info("Exit infinispan CLI"); reader.close(); } } private void runTask(String line, Cache<String, SessionEntity> cache) { try { String[] splits = line.split(" "); if (splits[0].equals("put")) { UserSessionEntity userSession = new UserSessionEntity(); String id = splits[1]; userSession.setId(id); userSession.setRealm(splits[2]); userSession.setLastSessionRefresh(Time.currentTime()); cache.put(id, userSession); } else if (splits[0].equals("get")) { String id = splits[1]; UserSessionEntity userSession = (UserSessionEntity) cache.get(id); printSession(id, userSession); } else if (splits[0].equals("remove")) { String id = splits[1]; cache.remove(id); } else if (splits[0].equals("clear")) { cache.clear(); log.info("Cache cleared"); } else if (splits[0].equals("size")) { log.info("Size: " + cache.size()); } else if (splits[0].equals("list")) { for (String id : cache.keySet()) { SessionEntity entity = cache.get(id); if (!(entity instanceof UserSessionEntity)) { continue; } UserSessionEntity userSession = (UserSessionEntity) cache.get(id); log.info("list: key=" + id + ", value=" + toString(userSession)); } } else if (splits[0].equals("getLocal")) { String id = splits[1]; cache = ((AdvancedCache) cache).withFlags(Flag.CACHE_MODE_LOCAL); UserSessionEntity userSession = (UserSessionEntity) cache.get(id); printSession(id, userSession); } else if (splits[0].equals("persistSessions")) { final int count = Integer.parseInt(splits[1]); final List<String> userSessionIds = new LinkedList<>(); final List<String> clientSessionIds = new LinkedList<>(); // Create sessions in separate transaction first KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { RealmModel realm = session.realms().getRealmByName("master"); UserModel john = session.users().getUserByUsername("admin", realm); ClientModel testApp = realm.getClientByClientId("security-admin-console"); UserSessionPersisterProvider persister = session.getProvider(UserSessionPersisterProvider.class); for (int i=0 ; i<count ; i++) { UserSessionModel userSession = session.sessions().createUserSession(realm, john, "john-doh@localhost", "127.0.0.2", "form", true, null, null); ClientSessionModel clientSession = session.sessions().createClientSession(realm, testApp); clientSession.setUserSession(userSession); clientSession.setRedirectUri("http://redirect"); clientSession.setNote("foo", "bar-" + i); userSessionIds.add(userSession.getId()); clientSessionIds.add(clientSession.getId()); } } }); log.info("Sessions created in infinispan storage"); // Persist them now KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { RealmModel realm = session.realms().getRealmByName("master"); UserSessionPersisterProvider persister = session.getProvider(UserSessionPersisterProvider.class); for (String userSessionId : userSessionIds) { UserSessionModel userSession = session.sessions().getUserSession(realm, userSessionId); persister.createUserSession(userSession, true); } log.info("userSessions persisted"); for (String clientSessionId : clientSessionIds) { ClientSessionModel clientSession = session.sessions().getClientSession(realm, clientSessionId); persister.createClientSession(clientSession, true); } log.info("clientSessions persisted"); } }); // Persist them now KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { RealmModel realm = session.realms().getRealmByName("master"); UserSessionPersisterProvider persister = session.getProvider(UserSessionPersisterProvider.class); log.info(count + " sessions persisted. Total number of sessions: " + persister.getUserSessionsCount(true)); } }); } else if (splits[0].equals("loadPersistentSessions")) { int sessionsPerSegment = Integer.parseInt(splits[1]); UserSessionProviderFactory sessionProviderFactory = (UserSessionProviderFactory) sessionFactory.getProviderFactory(UserSessionProvider.class); sessionProviderFactory.loadPersistentSessions(sessionFactory, 10, sessionsPerSegment); log.info("All persistent sessions loaded successfully"); } } catch (RuntimeException e) { log.error("Error occured during command. ", e); } } private void printSession(String id, UserSessionEntity userSession) { if (userSession == null) { log.info("Not found session with Id: " + id); } else { log.info("Found session. ID: " + toString(userSession)); } } private String toString(UserSessionEntity userSession) { return "ID: " + userSession.getId() + ", realm: " + userSession.getRealm() + ", lastAccessTime: " + Time.toDate(userSession.getLastSessionRefresh()) + ", clientSessions: " + userSession.getClientSessions().size(); } }
/* Derby - Class org.apache.derby.iapi.types.SQLDouble Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.iapi.types; import org.apache.derby.iapi.reference.SQLState; import org.apache.derby.iapi.services.io.ArrayInputStream; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.iapi.services.io.Storable; import org.apache.derby.iapi.services.sanity.SanityManager; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.types.BooleanDataValue; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.NumberDataValue; import org.apache.derby.iapi.types.TypeId; import org.apache.derby.iapi.services.cache.ClassSize; import org.apache.derby.iapi.types.NumberDataType; import org.apache.derby.iapi.types.SQLBoolean; import java.io.ObjectOutput; import java.io.ObjectInput; import java.io.IOException; import java.sql.ResultSet; import java.sql.PreparedStatement; import java.sql.SQLException; /** * SQLDouble satisfies the DataValueDescriptor * interfaces (i.e., OrderableDataType). It implements a double column, * e.g. for * storing a column value; it can be specified * when constructed to not allow nulls. Nullability cannot be changed * after construction, as it affects the storage size and mechanism. * <p> * Because OrderableDataType is a subtype of DataType, * SQLDouble can play a role in either a DataType/Row * or a OrderableDataType/Row, interchangeably. * <p> * We assume the store has a flag for nullness of the value, * and simply return a 0-length array for the stored form * when the value is null. * <p> * PERFORMANCE: There are likely alot of performance improvements * possible for this implementation -- it new's Double * more than it probably wants to. * <p> * This is modeled after SQLInteger. * <p> * We don't let doubles get constructed with NaN or Infinity values, and * check for those values where they can occur on operations, so the * set* operations do not check for them coming in. * */ public final class SQLDouble extends NumberDataType { /* * DataValueDescriptor interface * (mostly implemented in DataType) */ // JDBC is lax in what it permits and what it // returns, so we are similarly lax // @see DataValueDescriptor /** * @exception StandardException thrown on failure to convert */ public int getInt() throws StandardException { // REMIND: do we want to check for truncation? if ((value > (((double) Integer.MAX_VALUE) + 1.0d)) || (value < (((double) Integer.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "INTEGER"); return (int)value; } /** * @exception StandardException thrown on failure to convert */ public byte getByte() throws StandardException { if ((value > (((double) Byte.MAX_VALUE) + 1.0d)) || (value < (((double) Byte.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "TINYINT"); return (byte) value; } /** * @exception StandardException thrown on failure to convert */ public short getShort() throws StandardException { if ((value > (((double) Short.MAX_VALUE) + 1.0d)) || (value < (((double) Short.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "SMALLINT"); return (short) value; } /** * @exception StandardException thrown on failure to convert */ public long getLong() throws StandardException { if ((value > (((double) Long.MAX_VALUE) + 1.0d)) || (value < (((double) Long.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "BIGINT"); return (long) value; } /** * @exception StandardException thrown on failure to convert */ public float getFloat() throws StandardException { if (Float.isInfinite((float)value)) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.REAL_NAME); return (float) value; } public double getDouble() { /* This value is bogus if the SQLDouble is null */ return value; } /** * DOUBLE implementation. Convert to a BigDecimal using getString. */ public int typeToBigDecimal() { return java.sql.Types.CHAR; } // for lack of a specification: getDouble()==0 gives true // independent of the NULL flag public boolean getBoolean() { return (value != 0); } public String getString() { if (isNull()) return null; else return Double.toString(value); } public Object getObject() { // REMIND: could create one Double and reuse it? if (isNull()) return null; else return new Double(value); } /** * Set the value from a correctly typed Double object. * @throws StandardException */ void setObject(Object theValue) throws StandardException { setValue(((Double) theValue).doubleValue()); } protected void setFrom(DataValueDescriptor theValue) throws StandardException { setValue(theValue.getDouble()); } public int getLength() { return DOUBLE_LENGTH; } // this is for DataType's error generator public String getTypeName() { return TypeId.DOUBLE_NAME; } /* * Storable interface, implies Externalizable, TypedFormat */ /** Return my format identifier. @see org.apache.derby.iapi.services.io.TypedFormat#getTypeFormatId */ public int getTypeFormatId() { return StoredFormatIds.SQL_DOUBLE_ID; } /* * see if the double value is null. */ /** @see Storable#isNull */ public boolean isNull() { return isnull; } public void writeExternal(ObjectOutput out) throws IOException { // never called when value is null if (SanityManager.DEBUG) SanityManager.ASSERT(! isNull()); out.writeDouble(value); } /** @see java.io.Externalizable#readExternal */ public void readExternal(ObjectInput in) throws IOException { value = in.readDouble(); isnull = false; } /** @see java.io.Externalizable#readExternal */ public void readExternalFromArray(ArrayInputStream in) throws IOException { value = in.readDouble(); isnull = false; } /** * @see Storable#restoreToNull * */ public void restoreToNull() { value = 0; isnull = true; } /** @exception StandardException Thrown on error */ protected int typeCompare(DataValueDescriptor arg) throws StandardException { /* neither are null, get the value */ double thisValue = this.getDouble(); double otherValue = arg.getDouble(); if (thisValue == otherValue) return 0; else if (thisValue > otherValue) return 1; else return -1; } /* * DataValueDescriptor interface */ /** @see DataValueDescriptor#getClone */ public DataValueDescriptor getClone() { try { return new SQLDouble(value, isnull); } catch (StandardException se) { if (SanityManager.DEBUG) SanityManager.THROWASSERT( "error on clone, " + " value = " + value + " isnull = " + isnull, se); return null; } } /** * @see DataValueDescriptor#getNewNull */ public DataValueDescriptor getNewNull() { return new SQLDouble(); } /** * @see DataValueDescriptor#setValueFromResultSet * * @exception StandardException Thrown on error * @exception SQLException Thrown on error */ public void setValueFromResultSet(ResultSet resultSet, int colNumber, boolean isNullable) throws StandardException, SQLException { double dv = resultSet.getDouble(colNumber); isnull = (isNullable && resultSet.wasNull()); if (isnull) value = 0; else value = NumberDataType.normalizeDOUBLE(dv); } /** Set the value into a PreparedStatement. @exception SQLException Error setting value in PreparedStatement */ public final void setInto(PreparedStatement ps, int position) throws SQLException { if (isNull()) { ps.setNull(position, java.sql.Types.DOUBLE); return; } ps.setDouble(position, value); } /** Set this value into a ResultSet for a subsequent ResultSet.insertRow or ResultSet.updateRow. This method will only be called for non-null values. @exception SQLException thrown by the ResultSet object @exception StandardException thrown by me accessing my value. */ public final void setInto(ResultSet rs, int position) throws SQLException, StandardException { rs.updateDouble(position, value); } /* * class interface */ /* * constructors */ /** no-arg constructor, required by Formattable */ // This constructor also gets used when we are // allocating space for a double. public SQLDouble() { isnull = true; } public SQLDouble(double val) throws StandardException { value = NumberDataType.normalizeDOUBLE(val); } public SQLDouble(Double obj) throws StandardException { if (isnull = (obj == null)) ; else value = NumberDataType.normalizeDOUBLE(obj.doubleValue()); } private SQLDouble(double val, boolean startsnull) throws StandardException { value = NumberDataType.normalizeDOUBLE(val); // maybe only do if !startsnull isnull = startsnull; } /** @exception StandardException throws NumberFormatException when the String format is not recognized. */ public void setValue(String theValue) throws StandardException { if (theValue == null) { value = 0; isnull = true; } else { double doubleValue = 0; try { // ??? jsk: rounding??? doubleValue = Double.valueOf(theValue.trim()).doubleValue(); } catch (NumberFormatException nfe) { throw invalidFormat(); } value = NumberDataType.normalizeDOUBLE(doubleValue); isnull = false; } } /** * @exception StandardException on NaN or Infinite double */ public void setValue(double theValue) throws StandardException { value = NumberDataType.normalizeDOUBLE(theValue); isnull = false; } /** * @exception StandardException on NaN or Infinite float */ public void setValue(float theValue) throws StandardException { value = NumberDataType.normalizeDOUBLE(theValue); isnull = false; } public void setValue(long theValue) { value = theValue; // no check needed isnull = false; } public void setValue(int theValue) { value = theValue; // no check needed isnull = false; } public void setValue(Number theValue) throws StandardException { if (objectNull(theValue)) return; if (SanityManager.ASSERT) { if (!(theValue instanceof java.lang.Double)) SanityManager.THROWASSERT("SQLDouble.setValue(Number) passed a " + theValue.getClass()); } setValue(theValue.doubleValue()); } /** Called for an application setting this value using a BigDecimal */ public void setBigDecimal(Number bigDecimal) throws StandardException { if (objectNull(bigDecimal)) return; // Note BigDecimal.doubleValue() handles the case where // its value is outside the range of a double. It returns // infinity values which should throw an exception in setValue(double). setValue(bigDecimal.doubleValue()); } /** * @see NumberDataValue#setValue * */ public void setValue(boolean theValue) { value = theValue?1:0; isnull = false; } /* * DataValueDescriptor interface */ /** @see DataValueDescriptor#typePrecedence */ public int typePrecedence() { return TypeId.DOUBLE_PRECEDENCE; } /* ** SQL Operators */ /** * The = operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the = * @param right The value on the right side of the = * is not. * * @return A SQL boolean value telling whether the two parameters are equal * * @exception StandardException Thrown on error */ public BooleanDataValue equals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() == right.getDouble()); } /** * The <> operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the <> * @param right The value on the right side of the <> * is not. * * @return A SQL boolean value telling whether the two parameters * are not equal * * @exception StandardException Thrown on error */ public BooleanDataValue notEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() != right.getDouble()); } /** * The < operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the < * @param right The value on the right side of the < * * @return A SQL boolean value telling whether the first operand is less * than the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue lessThan(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() < right.getDouble()); } /** * The > operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the > * @param right The value on the right side of the > * * @return A SQL boolean value telling whether the first operand is greater * than the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue greaterThan(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() > right.getDouble()); } /** * The <= operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the <= * @param right The value on the right side of the <= * * @return A SQL boolean value telling whether the first operand is less * than or equal to the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue lessOrEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() <= right.getDouble()); } /** * The >= operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the >= * @param right The value on the right side of the >= * * @return A SQL boolean value telling whether the first operand is greater * than or equal to the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue greaterOrEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() >= right.getDouble()); } /** * This method implements the + operator for "double + double". * * @param addend1 One of the addends * @param addend2 The other addend * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the addition * * @exception StandardException Thrown on error */ public NumberDataValue plus(NumberDataValue addend1, NumberDataValue addend2, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (addend1.isNull() || addend2.isNull()) { result.setToNull(); return result; } double tmpresult = addend1.getDouble() + addend2.getDouble(); // No need to check underflow (result rounded to 0.0), // since the difference between two non-equal valid DB2 DOUBLE values is always non-zero in java.lang.Double precision. result.setValue(tmpresult); return result; } /** * This method implements the - operator for "double - double". * * @param left The value to be subtracted from * @param right The value to be subtracted * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the subtraction * * @exception StandardException Thrown on error */ public NumberDataValue minus(NumberDataValue left, NumberDataValue right, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (left.isNull() || right.isNull()) { result.setToNull(); return result; } double tmpresult = left.getDouble() - right.getDouble(); // No need to check underflow (result rounded to 0.0), // since no difference between two valid DB2 DOUBLE values can be rounded off to 0.0 in java.lang.Double result.setValue(tmpresult); return result; } /** * This method implements the * operator for "double * double". * * @param left The first value to be multiplied * @param right The second value to be multiplied * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the multiplication * * @exception StandardException Thrown on error */ public NumberDataValue times(NumberDataValue left, NumberDataValue right, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (left.isNull() || right.isNull()) { result.setToNull(); return result; } double leftValue = left.getDouble(); double rightValue = right.getDouble(); double tempResult = leftValue * rightValue; // check underflow (result rounded to 0.0) if ( (tempResult == 0.0) && ( (leftValue != 0.0) && (rightValue != 0.0) ) ) { throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME); } result.setValue(tempResult); return result; } /** * This method implements the / operator for "double / double". * * @param dividend The numerator * @param divisor The denominator * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the division * * @exception StandardException Thrown on error */ public NumberDataValue divide(NumberDataValue dividend, NumberDataValue divisor, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (dividend.isNull() || divisor.isNull()) { result.setToNull(); return result; } /* ** For double division, we can't catch divide by zero with Double.NaN; ** So we check the divisor before the division. */ double divisorValue = divisor.getDouble(); if (divisorValue == 0.0e0D) { throw StandardException.newException(SQLState.LANG_DIVIDE_BY_ZERO); } double dividendValue = dividend.getDouble(); double divideResult = dividendValue / divisorValue; if (Double.isNaN(divideResult)) { throw StandardException.newException(SQLState.LANG_DIVIDE_BY_ZERO); } // check underflow (result rounded to 0.0d) if ((divideResult == 0.0d) && (dividendValue != 0.0d)) { throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME); } result.setValue(divideResult); return result; } /** * This method implements the unary minus operator for double. * * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the division * * @exception StandardException Thrown on error */ public NumberDataValue minus(NumberDataValue result) throws StandardException { double minusResult; if (result == null) { result = new SQLDouble(); } if (this.isNull()) { result.setToNull(); return result; } /* ** Doubles are assumed to be symmetric -- that is, their ** smallest negative value is representable as a positive ** value, and vice-versa. */ minusResult = -(this.getDouble()); result.setValue(minusResult); return result; } /** * This method implements the isNegative method. * * @return A boolean. If this.value is negative, return true. * For positive values or null, return false. */ protected boolean isNegative() { return !isNull() && (value < 0.0d); } /* * String display of value */ public String toString() { if (isNull()) return "NULL"; else return Double.toString(value); } /* * Hash code */ public int hashCode() { long longVal = (long) value; double doubleLongVal = (double) longVal; /* ** NOTE: This is coded to work around a bug in Visual Cafe 3.0. ** If longVal is compared directly to value on that platform ** with the JIT enabled, the values will not always compare ** as equal even when they should be equal. This happens with ** the value Long.MAX_VALUE, for example. ** ** Assigning the long value back to a double and then doing ** the comparison works around the bug. ** ** This fixes Cloudscape bug number 1757. ** ** - Jeff Lichtman */ if (doubleLongVal != value) { longVal = Double.doubleToLongBits(value); } return (int) (longVal ^ (longVal >> 32)); } /* * useful constants... */ static final int DOUBLE_LENGTH = 32; // must match the number of bytes written by DataOutput.writeDouble() private static final int BASE_MEMORY_USAGE = ClassSize.estimateBaseFromCatalog( SQLDouble.class); public int estimateMemoryUsage() { return BASE_MEMORY_USAGE; } /* * object state */ private double value; private boolean isnull; }
/* * Copyright 2013 Chris Banes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.co.senab.photup; import android.content.Context; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import de.greenrobot.event.EventBus; import uk.co.senab.photup.events.PhotoSelectionAddedEvent; import uk.co.senab.photup.events.PhotoSelectionRemovedEvent; import uk.co.senab.photup.events.UploadsModifiedEvent; import uk.co.senab.photup.model.Account; import uk.co.senab.photup.model.FbUser; import uk.co.senab.photup.model.PhotoUpload; import uk.co.senab.photup.model.Place; import uk.co.senab.photup.model.UploadQuality; import uk.co.senab.photup.util.PhotoUploadDatabaseHelper; public class PhotoUploadController { public static PhotoUploadController getFromContext(Context context) { return PhotupApplication.getApplication(context).getPhotoUploadController(); } private static List<PhotoUpload> checkListForInvalid(final Context context, final List<PhotoUpload> uploads) { ArrayList<PhotoUpload> toBeRemoved = null; for (PhotoUpload upload : uploads) { if (!upload.isValid(context)) { if (null == toBeRemoved) { toBeRemoved = new ArrayList<PhotoUpload>(); } toBeRemoved.add(upload); } } if (null != toBeRemoved) { uploads.removeAll(toBeRemoved); // Delete from Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.deleteFromDatabase(context, toBeRemoved); } } return toBeRemoved; } private final Context mContext; private final ArrayList<PhotoUpload> mSelectedPhotoList; private final ArrayList<PhotoUpload> mUploadingList; PhotoUploadController(Context context) { mContext = context; mSelectedPhotoList = new ArrayList<PhotoUpload>(); mUploadingList = new ArrayList<PhotoUpload>(); populateFromDatabase(); } public synchronized boolean addSelection(final PhotoUpload selection) { boolean result = false; if (!mSelectedPhotoList.contains(selection)) { selection.setUploadState(PhotoUpload.STATE_SELECTED); mSelectedPhotoList.add(selection); // Save to Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.saveToDatabase(mContext, selection); } postEvent(new PhotoSelectionAddedEvent(selection)); result = true; } // Remove it from Upload list if it's there if (mUploadingList.contains(selection)) { mUploadingList.remove(selection); postEvent(new UploadsModifiedEvent()); } return result; } public synchronized void addSelections(List<PhotoUpload> selections) { final HashSet<PhotoUpload> currentSelectionsSet = new HashSet<PhotoUpload>( mSelectedPhotoList); final HashSet<PhotoUpload> currentUploadSet = new HashSet<PhotoUpload>(mUploadingList); boolean listModified = false; for (final PhotoUpload selection : selections) { if (!currentSelectionsSet.contains(selection)) { // Remove it from Upload list if it's there if (currentUploadSet.contains(selection)) { mUploadingList.remove(selection); } selection.setUploadState(PhotoUpload.STATE_SELECTED); mSelectedPhotoList.add(selection); listModified = true; } } if (listModified) { // Save to Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.saveToDatabase(mContext, mSelectedPhotoList, true); } postEvent(new PhotoSelectionAddedEvent(selections)); } } public boolean addUpload(PhotoUpload selection) { if (null != selection && selection.isValid(mContext)) { synchronized (this) { if (!mUploadingList.contains(selection)) { selection.setUploadState(PhotoUpload.STATE_UPLOAD_WAITING); // Save to Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.saveToDatabase(mContext, selection); } mUploadingList.add(selection); mSelectedPhotoList.remove(selection); postEvent(new UploadsModifiedEvent()); return true; } } } return false; } public synchronized void addUploadsFromSelected(final Account account, final String targetId, final UploadQuality quality, final Place place) { // Check The Selected List to make sure they're all valid checkSelectedForInvalid(false); for (PhotoUpload upload : mSelectedPhotoList) { upload.setUploadParams(account, targetId, quality); upload.setUploadState(PhotoUpload.STATE_UPLOAD_WAITING); if (null != place) { upload.setPlace(place); } } // Update Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.saveToDatabase(mContext, mSelectedPhotoList, true); } ArrayList<PhotoUpload> eventResult = new ArrayList<PhotoUpload>(mSelectedPhotoList); mUploadingList.addAll(mSelectedPhotoList); mSelectedPhotoList.clear(); postEvent(new PhotoSelectionRemovedEvent(eventResult)); postEvent(new UploadsModifiedEvent()); } public synchronized void clearSelected() { if (!mSelectedPhotoList.isEmpty()) { // Delete from Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.deleteAllSelected(mContext); } // Reset States (as may still be in cache) for (PhotoUpload upload : mSelectedPhotoList) { upload.setUploadState(PhotoUpload.STATE_NONE); } ArrayList<PhotoUpload> eventResult = new ArrayList<PhotoUpload>(mSelectedPhotoList); // Clear from memory mSelectedPhotoList.clear(); postEvent(new PhotoSelectionRemovedEvent(eventResult)); } } public synchronized int getActiveUploadsCount() { int count = 0; for (PhotoUpload upload : mUploadingList) { if (upload.getUploadState() != PhotoUpload.STATE_UPLOAD_COMPLETED) { count++; } } return count; } public synchronized PhotoUpload getNextUpload() { for (PhotoUpload selection : mUploadingList) { if (selection.getUploadState() == PhotoUpload.STATE_UPLOAD_WAITING) { return selection; } } return null; } public synchronized List<PhotoUpload> getSelected() { checkSelectedForInvalid(true); return new ArrayList<PhotoUpload>(mSelectedPhotoList); } public synchronized int getSelectedCount() { return mSelectedPhotoList.size(); } public synchronized List<PhotoUpload> getUploadingUploads() { return new ArrayList<PhotoUpload>(mUploadingList); } public synchronized int getUploadsCount() { return mUploadingList.size(); } public synchronized boolean hasSelections() { return !mSelectedPhotoList.isEmpty(); } public synchronized boolean hasSelectionsWithPlace() { for (PhotoUpload selection : mSelectedPhotoList) { if (selection.hasPlace()) { return true; } } return false; } public synchronized boolean hasUploads() { return !mUploadingList.isEmpty(); } public synchronized boolean hasWaitingUploads() { for (PhotoUpload upload : mUploadingList) { if (upload.getUploadState() == PhotoUpload.STATE_UPLOAD_WAITING) { return true; } } return false; } public synchronized boolean isOnUploadList(PhotoUpload selection) { return mUploadingList.contains(selection); } public synchronized boolean isSelected(PhotoUpload selection) { return mSelectedPhotoList.contains(selection); } public synchronized boolean moveFailedToSelected() { boolean result = false; final Iterator<PhotoUpload> iterator = mUploadingList.iterator(); PhotoUpload upload; while (iterator.hasNext()) { upload = iterator.next(); if (upload.getUploadState() == PhotoUpload.STATE_UPLOAD_ERROR) { // Reset State and add to selection list upload.setUploadState(PhotoUpload.STATE_SELECTED); mSelectedPhotoList.add(upload); postEvent(new PhotoSelectionAddedEvent(upload)); // Remove from Uploading list iterator.remove(); result = true; } } if (result) { // Update Database, but don't force update if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.saveToDatabase(mContext, mSelectedPhotoList, false); } postEvent(new UploadsModifiedEvent()); } return result; } public boolean removeSelection(final PhotoUpload selection) { boolean removed = false; synchronized (this) { removed = mSelectedPhotoList.remove(selection); } if (removed) { // Delete from Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.deleteFromDatabase(mContext, selection); } // Reset State (as may still be in cache) selection.setUploadState(PhotoUpload.STATE_NONE); postEvent(new PhotoSelectionRemovedEvent(selection)); } return removed; } public void removeUpload(final PhotoUpload selection) { boolean removed = false; synchronized (this) { removed = mUploadingList.remove(selection); } if (removed) { // Delete from Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.deleteFromDatabase(mContext, selection); } // Reset State (as may still be in cache) selection.setUploadState(PhotoUpload.STATE_NONE); postEvent(new UploadsModifiedEvent()); } } public void reset() { // Clear the cache PhotoUpload.clearCache(); synchronized (this) { // Clear the internal lists mSelectedPhotoList.clear(); mUploadingList.clear(); } // Finally delete the database mContext.deleteDatabase(DatabaseHelper.DATABASE_NAME); } public synchronized void updateDatabase() { if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.saveToDatabase(mContext, mSelectedPhotoList, false); PhotoUploadDatabaseHelper.saveToDatabase(mContext, mUploadingList, false); } } void populateDatabaseItemsFromAccounts(HashMap<String, Account> accounts) { if (!mSelectedPhotoList.isEmpty()) { for (PhotoUpload upload : mSelectedPhotoList) { upload.populateFromAccounts(accounts); } } if (!mUploadingList.isEmpty()) { for (PhotoUpload upload : mUploadingList) { upload.populateFromAccounts(accounts); } } } void populateDatabaseItemsFromFriends(HashMap<String, FbUser> friends) { if (!mSelectedPhotoList.isEmpty()) { for (PhotoUpload upload : mSelectedPhotoList) { upload.populateFromFriends(friends); } } if (!mUploadingList.isEmpty()) { for (PhotoUpload upload : mUploadingList) { upload.populateFromFriends(friends); } } } void populateFromDatabase() { if (Flags.ENABLE_DB_PERSISTENCE) { final List<PhotoUpload> selectedFromDb = PhotoUploadDatabaseHelper .getSelected(mContext); if (null != selectedFromDb) { mSelectedPhotoList.addAll(selectedFromDb); checkSelectedForInvalid(false); PhotoUpload.populateCache(selectedFromDb); } final List<PhotoUpload> uploadsFromDb = PhotoUploadDatabaseHelper.getUploads(mContext); if (null != uploadsFromDb) { mUploadingList.addAll(uploadsFromDb); checkUploadsForInvalid(false); PhotoUpload.populateCache(uploadsFromDb); } } } private void checkSelectedForInvalid(final boolean sendEvent) { if (!mSelectedPhotoList.isEmpty()) { List<PhotoUpload> removedUploads = checkListForInvalid(mContext, mSelectedPhotoList); // Delete from Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.deleteAllSelected(mContext); } if (sendEvent && null != removedUploads) { postEvent(new PhotoSelectionRemovedEvent(removedUploads)); } } } private void checkUploadsForInvalid(final boolean sendEvent) { if (!mUploadingList.isEmpty()) { List<PhotoUpload> removedUploads = checkListForInvalid(mContext, mUploadingList); // Delete from Database if (Flags.ENABLE_DB_PERSISTENCE) { PhotoUploadDatabaseHelper.deleteAllSelected(mContext); } if (sendEvent && null != removedUploads) { postEvent(new UploadsModifiedEvent()); } } } private void postEvent(Object event) { EventBus.getDefault().post(event); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.applicationsmanager; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.yarn.MockApps; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.ReservationId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.api.protocolrecords.LogAggregationReport; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppMetrics; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.util.Records; import com.google.common.collect.Lists; @InterfaceAudience.Private public abstract class MockAsm extends MockApps { public static class ApplicationBase implements RMApp { ResourceRequest amReq; @Override public String getUser() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ApplicationSubmissionContext getApplicationSubmissionContext() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getName() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getQueue() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getStartTime() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getSubmitTime() { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getFinishTime() { throw new UnsupportedOperationException("Not supported yet."); } @Override public StringBuilder getDiagnostics() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getCollectorAddr() { throw new UnsupportedOperationException("Not supported yet."); } @Override public void setCollectorAddr(String collectorAddr) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void removeCollectorAddr() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ApplicationId getApplicationId() { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppAttempt getCurrentAppAttempt() { throw new UnsupportedOperationException("Not supported yet."); } @Override public Map<ApplicationAttemptId, RMAppAttempt> getAppAttempts() { throw new UnsupportedOperationException("Not supported yet."); } @Override public float getProgress() { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId) { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppState getState() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getTrackingUrl() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getOriginalTrackingUrl() { throw new UnsupportedOperationException("Not supported yet."); } @Override public int getMaxAppAttempts() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ApplicationReport createAndGetApplicationReport( String clientUserName,boolean allowAccess) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void handle(RMAppEvent event) { throw new UnsupportedOperationException("Not supported yet."); } @Override public FinalApplicationStatus getFinalApplicationStatus() { throw new UnsupportedOperationException("Not supported yet."); } @Override public int pullRMNodeUpdates(Collection<RMNode> updatedNodes) { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getApplicationType() { throw new UnsupportedOperationException("Not supported yet."); } @Override public Set<String> getApplicationTags() { throw new UnsupportedOperationException("Not supported yet."); } @Override public void setQueue(String name) { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean isAppFinalStateStored() { throw new UnsupportedOperationException("Not supported yet."); } @Override public YarnApplicationState createApplicationState() { throw new UnsupportedOperationException("Not supported yet."); } @Override public Set<NodeId> getRanNodes() { throw new UnsupportedOperationException("Not supported yet."); } @Override public RMAppMetrics getRMAppMetrics() { return new RMAppMetrics(Resource.newInstance(0, 0), 0, 0, 0, 0); } @Override public ReservationId getReservationId() { throw new UnsupportedOperationException("Not supported yet."); } @Override public ResourceRequest getAMResourceRequest() { return this.amReq; } @Override public Map<NodeId, LogAggregationReport> getLogAggregationReportsForApp() { throw new UnsupportedOperationException("Not supported yet."); } @Override public LogAggregationStatus getLogAggregationStatusForAppReport() { throw new UnsupportedOperationException("Not supported yet."); } } public static RMApp newApplication(int i) { final ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(newAppID(i), 0); final Container masterContainer = Records.newRecord(Container.class); ContainerId containerId = ContainerId.newContainerId(appAttemptId, 0); masterContainer.setId(containerId); masterContainer.setNodeHttpAddress("node:port"); final String user = newUserName(); final String name = newAppName(); final String queue = newQueue(); final long start = 123456 + i * 1000; final long finish = 234567 + i * 1000; final String type = YarnConfiguration.DEFAULT_APPLICATION_TYPE; YarnApplicationState[] allStates = YarnApplicationState.values(); final YarnApplicationState state = allStates[i % allStates.length]; final int maxAppAttempts = i % 1000; return new ApplicationBase() { @Override public ApplicationId getApplicationId() { return appAttemptId.getApplicationId(); } @Override public String getUser() { return user; } @Override public String getName() { return name; } @Override public String getApplicationType() { return type; } @Override public String getQueue() { return queue; } @Override public long getStartTime() { return start; } @Override public long getFinishTime() { return finish; } @Override public String getTrackingUrl() { return null; } @Override public YarnApplicationState createApplicationState() { return state; } @Override public StringBuilder getDiagnostics() { return new StringBuilder(); } @Override public float getProgress() { return (float)Math.random(); } @Override public FinalApplicationStatus getFinalApplicationStatus() { return FinalApplicationStatus.UNDEFINED; } @Override public RMAppAttempt getCurrentAppAttempt() { return null; } @Override public int getMaxAppAttempts() { return maxAppAttempts; } @Override public Set<String> getApplicationTags() { return null; } @Override public ApplicationReport createAndGetApplicationReport( String clientUserName, boolean allowAccess) { ApplicationResourceUsageReport usageReport = ApplicationResourceUsageReport.newInstance(0, 0, null, null, null, 0, 0); ApplicationReport report = ApplicationReport.newInstance( getApplicationId(), appAttemptId, getUser(), getQueue(), getName(), null, 0, null, null, getDiagnostics().toString(), getTrackingUrl(), getStartTime(), getFinishTime(), getFinalApplicationStatus(), usageReport , null, getProgress(), type, null); return report; } }; } public static List<RMApp> newApplications(int n) { List<RMApp> list = Lists.newArrayList(); for (int i = 0; i < n; ++i) { list.add(newApplication(i)); } return list; } }
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2007 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.connectbot; import java.util.List; import org.connectbot.bean.HostBean; import org.connectbot.data.HostStorage; import org.connectbot.service.OnHostStatusChangedListener; import org.connectbot.service.TerminalBridge; import org.connectbot.service.TerminalManager; import org.connectbot.transport.TransportFactory; import org.connectbot.util.HostDatabase; import org.connectbot.util.PreferenceConstants; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.Intent.ShortcutIconResource; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.IBinder; import android.preference.PreferenceManager; import android.support.annotation.StyleRes; import android.support.annotation.VisibleForTesting; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.DialogFragment; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.format.DateUtils; import android.util.Log; import android.view.ContextMenu; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.MenuItem.OnMenuItemClickListener; import android.view.View; import android.view.View.OnKeyListener; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.Spinner; import android.widget.TextView; public class HostListActivity extends AppCompatListActivity implements OnHostStatusChangedListener { public final static String TAG = "CB.HostListActivity"; public static final String DISCONNECT_ACTION = "org.connectbot.action.DISCONNECT"; public final static int REQUEST_EDIT = 1; protected TerminalManager bound = null; private HostStorage hostdb; private List<HostBean> hosts; protected LayoutInflater inflater = null; protected boolean sortedByColor = false; private MenuItem sortcolor; private MenuItem sortlast; private SharedPreferences prefs = null; protected boolean makingShortcut = false; private boolean waitingForDisconnectAll = false; /** * Whether to close the activity when disconnectAll is called. True if this activity was * only brought to the foreground via the notification button to disconnect all hosts. */ private boolean closeOnDisconnectAll = true; private ServiceConnection connection = new ServiceConnection() { public void onServiceConnected(ComponentName className, IBinder service) { bound = ((TerminalManager.TerminalBinder) service).getService(); // update our listview binder to find the service HostListActivity.this.updateList(); bound.registerOnHostStatusChangedListener(HostListActivity.this); if (waitingForDisconnectAll) { disconnectAll(); } } public void onServiceDisconnected(ComponentName className) { bound.unregisterOnHostStatusChangedListener(HostListActivity.this); bound = null; HostListActivity.this.updateList(); } }; @Override public void onStart() { super.onStart(); // start the terminal manager service this.bindService(new Intent(this, TerminalManager.class), connection, Context.BIND_AUTO_CREATE); hostdb = HostDatabase.get(this); } @Override public void onStop() { super.onStop(); this.unbindService(connection); hostdb = null; closeOnDisconnectAll = true; } @Override public void onResume() { super.onResume(); // Must disconnectAll before setting closeOnDisconnectAll to know whether to keep the // activity open after disconnecting. if ((getIntent().getFlags() & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0 && DISCONNECT_ACTION.equals(getIntent().getAction())) { Log.d(TAG, "Got disconnect all request"); disconnectAll(); } // Still close on disconnect if waiting for a disconnect. closeOnDisconnectAll = waitingForDisconnectAll && closeOnDisconnectAll; } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setIntent(intent); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_EDIT) { this.updateList(); } } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.act_hostlist); setTitle(R.string.title_hosts_list); mListView = (RecyclerView) findViewById(R.id.list); mListView.setHasFixedSize(true); mListView.setLayoutManager(new LinearLayoutManager(this)); mListView.addItemDecoration(new ListItemDecoration(this)); mEmptyView = findViewById(R.id.empty); this.prefs = PreferenceManager.getDefaultSharedPreferences(this); // detect HTC Dream and apply special preferences if (Build.MANUFACTURER.equals("HTC") && Build.DEVICE.equals("dream")) { SharedPreferences.Editor editor = prefs.edit(); boolean doCommit = false; if (!prefs.contains(PreferenceConstants.SHIFT_FKEYS) && !prefs.contains(PreferenceConstants.CTRL_FKEYS)) { editor.putBoolean(PreferenceConstants.SHIFT_FKEYS, true); editor.putBoolean(PreferenceConstants.CTRL_FKEYS, true); doCommit = true; } if (!prefs.contains(PreferenceConstants.STICKY_MODIFIERS)) { editor.putString(PreferenceConstants.STICKY_MODIFIERS, PreferenceConstants.YES); doCommit = true; } if (!prefs.contains(PreferenceConstants.KEYMODE)) { editor.putString(PreferenceConstants.KEYMODE, PreferenceConstants.KEYMODE_RIGHT); doCommit = true; } if (doCommit) { editor.commit(); } } this.makingShortcut = Intent.ACTION_CREATE_SHORTCUT.equals(getIntent().getAction()) || Intent.ACTION_PICK.equals(getIntent().getAction()); // connect with hosts database and populate list this.hostdb = HostDatabase.get(this); this.sortedByColor = prefs.getBoolean(PreferenceConstants.SORT_BY_COLOR, false); this.registerForContextMenu(mListView); FloatingActionButton addHostButton = (FloatingActionButton) findViewById(R.id.add_host_button); addHostButton.setVisibility(makingShortcut ? View.GONE : View.VISIBLE); addHostButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = EditHostActivity.createIntentForNewHost(HostListActivity.this); startActivityForResult(intent, REQUEST_EDIT); } public void onNothingSelected(AdapterView<?> arg0) {} }); this.inflater = LayoutInflater.from(this); } @Override public boolean onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); // don't offer menus when creating shortcut if (makingShortcut) return true; sortcolor.setVisible(!sortedByColor); sortlast.setVisible(sortedByColor); return true; } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); // don't offer menus when creating shortcut if (makingShortcut) return true; // add host, ssh keys, about sortcolor = menu.add(R.string.list_menu_sortcolor); sortcolor.setIcon(android.R.drawable.ic_menu_share); sortcolor.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { sortedByColor = true; updateList(); return true; } }); sortlast = menu.add(R.string.list_menu_sortname); sortlast.setIcon(android.R.drawable.ic_menu_share); sortlast.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { sortedByColor = false; updateList(); return true; } }); MenuItem keys = menu.add(R.string.list_menu_pubkeys); keys.setIcon(android.R.drawable.ic_lock_lock); keys.setIntent(new Intent(HostListActivity.this, PubkeyListActivity.class)); MenuItem colors = menu.add(R.string.title_colors); colors.setIcon(android.R.drawable.ic_menu_slideshow); colors.setIntent(new Intent(HostListActivity.this, ColorsActivity.class)); MenuItem settings = menu.add(R.string.list_menu_settings); settings.setIcon(android.R.drawable.ic_menu_preferences); settings.setIntent(new Intent(HostListActivity.this, SettingsActivity.class)); MenuItem help = menu.add(R.string.title_help); help.setIcon(android.R.drawable.ic_menu_help); help.setIntent(new Intent(HostListActivity.this, HelpActivity.class)); return true; } /** * Disconnects all active connections and closes the activity if appropriate. */ private void disconnectAll() { if (bound == null) { waitingForDisconnectAll = true; return; } new AlertDialog.Builder(HostListActivity.this) .setMessage(getString(R.string.disconnect_all_message)) .setPositiveButton(R.string.disconnect_all_pos, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { bound.disconnectAll(true, false); waitingForDisconnectAll = false; // Clear the intent so that the activity can be relaunched without closing. // TODO(jlklein): Find a better way to do this. setIntent(new Intent()); if (closeOnDisconnectAll) { finish(); } } }) .setNegativeButton(R.string.disconnect_all_neg, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { waitingForDisconnectAll = false; // Clear the intent so that the activity can be relaunched without closing. // TODO(jlklein): Find a better way to do this. setIntent(new Intent()); } }).create().show(); } /** * @return */ private boolean startConsoleActivity(Uri uri) { HostBean host = TransportFactory.findHost(hostdb, uri); if (host == null) { host = TransportFactory.getTransport(uri.getScheme()).createHost(uri); host.setColor(HostDatabase.COLOR_GRAY); host.setPubkeyId(HostDatabase.PUBKEYID_ANY); hostdb.saveHost(host); } Intent intent = new Intent(HostListActivity.this, ConsoleActivity.class); intent.setData(uri); startActivity(intent); return true; } protected void updateList() { if (prefs.getBoolean(PreferenceConstants.SORT_BY_COLOR, false) != sortedByColor) { Editor edit = prefs.edit(); edit.putBoolean(PreferenceConstants.SORT_BY_COLOR, sortedByColor); edit.commit(); } if (hostdb == null) hostdb = HostDatabase.get(this); hosts = hostdb.getHosts(sortedByColor); // Don't lose hosts that are connected via shortcuts but not in the database. if (bound != null) { for (TerminalBridge bridge : bound.getBridges()) { if (!hosts.contains(bridge.host)) hosts.add(0, bridge.host); } } mAdapter = new HostAdapter(this, hosts, bound); mListView.setAdapter(mAdapter); adjustViewVisibility(); } @Override public void onHostStatusChanged() { updateList(); } private class HostViewHolder extends ItemViewHolder { public final ImageView icon; public final TextView nickname; public final TextView caption; public HostBean host; public HostViewHolder(View v) { super(v); icon = (ImageView) v.findViewById(android.R.id.icon); nickname = (TextView) v.findViewById(android.R.id.text1); caption = (TextView) v.findViewById(android.R.id.text2); } @Override public void onClick(View v) { // launch off to console details Uri uri = host.getUri(); Intent contents = new Intent(Intent.ACTION_VIEW, uri); contents.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); if (makingShortcut) { // create shortcut if requested ShortcutIconResource icon = Intent.ShortcutIconResource.fromContext( HostListActivity.this, R.drawable.icon); Intent intent = new Intent(); intent.putExtra(Intent.EXTRA_SHORTCUT_INTENT, contents); intent.putExtra(Intent.EXTRA_SHORTCUT_NAME, host.getNickname()); intent.putExtra(Intent.EXTRA_SHORTCUT_ICON_RESOURCE, icon); setResult(RESULT_OK, intent); finish(); } else { // otherwise just launch activity to show this host contents.setClass(HostListActivity.this, ConsoleActivity.class); HostListActivity.this.startActivity(contents); } } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { menu.setHeaderTitle(host.getNickname()); // edit, disconnect, delete MenuItem connect = menu.add(R.string.list_host_disconnect); final TerminalBridge bridge = (bound == null) ? null : bound.getConnectedBridge(host); connect.setEnabled(bridge != null); connect.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { bridge.dispatchDisconnect(true); return true; } }); MenuItem edit = menu.add(R.string.list_host_edit); edit.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { Intent intent = EditHostActivity.createIntentForExistingHost( HostListActivity.this, host.getId()); HostListActivity.this.startActivityForResult(intent, REQUEST_EDIT); return true; } }); MenuItem portForwards = menu.add(R.string.list_host_portforwards); portForwards.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { Intent intent = new Intent(HostListActivity.this, PortForwardListActivity.class); intent.putExtra(Intent.EXTRA_TITLE, host.getId()); HostListActivity.this.startActivityForResult(intent, REQUEST_EDIT); return true; } }); if (!TransportFactory.canForwardPorts(host.getProtocol())) portForwards.setEnabled(false); MenuItem delete = menu.add(R.string.list_host_delete); delete.setOnMenuItemClickListener(new OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { // prompt user to make sure they really want this new AlertDialog.Builder(HostListActivity.this) .setMessage(getString(R.string.delete_message, host.getNickname())) .setPositiveButton(R.string.delete_pos, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { // make sure we disconnect if (bridge != null) bridge.dispatchDisconnect(true); hostdb.deleteHost(host); updateList(); } }) .setNegativeButton(R.string.delete_neg, null).create().show(); return true; } }); } } @VisibleForTesting private class HostAdapter extends ItemAdapter { private final List<HostBean> hosts; private final TerminalManager manager; public final static int STATE_UNKNOWN = 1, STATE_CONNECTED = 2, STATE_DISCONNECTED = 3; public HostAdapter(Context context, List<HostBean> hosts, TerminalManager manager) { super(context); this.hosts = hosts; this.manager = manager; } /** * Check if we're connected to a terminal with the given host. */ private int getConnectedState(HostBean host) { // always disconnected if we don't have backend service if (this.manager == null || host == null) { return STATE_UNKNOWN; } if (manager.getConnectedBridge(host) != null) { return STATE_CONNECTED; } if (manager.disconnected.contains(host)) { return STATE_DISCONNECTED; } return STATE_UNKNOWN; } @Override public HostViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View v = LayoutInflater.from(parent.getContext()) .inflate(R.layout.item_host, parent, false); HostViewHolder vh = new HostViewHolder(v); return vh; } @Override public void onBindViewHolder(ItemViewHolder holder, int position) { HostViewHolder hostHolder = (HostViewHolder) holder; HostBean host = hosts.get(position); hostHolder.host = host; if (host == null) { // Well, something bad happened. We can't continue. Log.e("HostAdapter", "Host bean is null!"); hostHolder.nickname.setText("Error during lookup"); } else { hostHolder.nickname.setText(host.getNickname()); } switch (this.getConnectedState(host)) { case STATE_UNKNOWN: hostHolder.icon.setImageState(new int[] { }, true); break; case STATE_CONNECTED: hostHolder.icon.setImageState(new int[] { android.R.attr.state_checked }, true); break; case STATE_DISCONNECTED: hostHolder.icon.setImageState(new int[] { android.R.attr.state_expanded }, true); break; } @StyleRes final int chosenStyleFirstLine; @StyleRes final int chosenStyleSecondLine; if (HostDatabase.COLOR_RED.equals(host.getColor())) { chosenStyleFirstLine = R.style.ListItemFirstLineText_Red; chosenStyleSecondLine = R.style.ListItemSecondLineText_Red; } else if (HostDatabase.COLOR_GREEN.equals(host.getColor())) { chosenStyleFirstLine = R.style.ListItemFirstLineText_Green; chosenStyleSecondLine = R.style.ListItemSecondLineText_Green; } else if (HostDatabase.COLOR_BLUE.equals(host.getColor())) { chosenStyleFirstLine = R.style.ListItemFirstLineText_Blue; chosenStyleSecondLine = R.style.ListItemSecondLineText_Blue; } else { chosenStyleFirstLine = R.style.ListItemFirstLineText; chosenStyleSecondLine = R.style.ListItemSecondLineText; } hostHolder.nickname.setTextAppearance(context, chosenStyleFirstLine); hostHolder.caption.setTextAppearance(context, chosenStyleSecondLine); CharSequence nice = context.getString(R.string.bind_never); if (host.getLastConnect() > 0) { nice = DateUtils.getRelativeTimeSpanString(host.getLastConnect() * 1000); } hostHolder.caption.setText(nice); } @Override public long getItemId(int position) { return hosts.get(position).getId(); } @Override public int getItemCount() { return hosts.size(); } } public static class AddHostDialogFragment extends DialogFragment { private TextView mAddressField; private Spinner mSpinner; HostListActivity mListener; @Override public void onAttach(Activity activity) { super.onAttach(activity); mListener = (HostListActivity) activity; } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { LayoutInflater inflater = getActivity().getLayoutInflater(); AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); View addHostDialog = inflater.inflate(R.layout.dia_add_host, null); builder.setView(addHostDialog) .setPositiveButton(R.string.button_add, null) .setNegativeButton(R.string.button_cancel, null); AlertDialog dialog = builder.create(); mAddressField = (TextView) addHostDialog.findViewById(R.id.front_quickconnect); mAddressField.setOnKeyListener(new OnKeyListener() { public boolean onKey(View v, int keyCode, KeyEvent event) { if (event.getAction() == KeyEvent.ACTION_UP) return false; if (keyCode != KeyEvent.KEYCODE_ENTER) return false; processNewUriEntered(); return true; } }); mSpinner = (Spinner) addHostDialog.findViewById(R.id.transport_selection); ArrayAdapter<String> transportSelection = new ArrayAdapter<String>(getActivity(), android.R.layout.simple_spinner_item, TransportFactory.getTransportNames()); transportSelection.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { public void onItemSelected(AdapterView<?> arg0, View view, int position, long id) { String formatHint = TransportFactory.getFormatHint( (String) mSpinner.getSelectedItem(), getActivity()); mAddressField.setHint(formatHint); mAddressField.setError(null); mAddressField.requestFocus(); } public void onNothingSelected(AdapterView<?> arg0) { } }); mSpinner.setAdapter(transportSelection); return dialog; } @Override public void onResume() { super.onResume(); final AlertDialog alertDialog = (AlertDialog) getDialog(); Button addButton = alertDialog.getButton(AlertDialog.BUTTON_POSITIVE); addButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { processNewUriEntered(); } }); } /** * Processes the URI that has been entered. If it is a valid URI, adds that host * and starts ConsoleActivity; otherwise, shows an error in the address field. */ private void processNewUriEntered() { Uri uri = TransportFactory.getUri((String) mSpinner .getSelectedItem(), mAddressField.getText().toString()); if (uri == null) { mAddressField.setError(getString(R.string.list_format_error, TransportFactory.getFormatHint( (String) mSpinner.getSelectedItem(), getActivity()))); mAddressField.requestFocus(); return; } mListener.startConsoleActivity(uri); getDialog().dismiss(); } } }
package com.stanfy.enroscar.net.operation; import android.content.ContentResolver; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.graphics.Bitmap; import android.net.Uri; import android.support.v4.content.Loader; import android.util.Log; import com.stanfy.enroscar.beans.BeansManager; import com.stanfy.enroscar.content.loader.ResponseData; import com.stanfy.enroscar.rest.EntityTypeToken; import com.stanfy.enroscar.rest.RemoteServerApiConfiguration; import com.stanfy.enroscar.net.operation.executor.RequestExecutor; import com.stanfy.enroscar.rest.Utils; import com.stanfy.enroscar.rest.loader.RequestBuilderLoader; import com.stanfy.enroscar.rest.request.binary.AssetFdBinaryData; import com.stanfy.enroscar.rest.request.binary.BitmapBinaryData; import com.stanfy.enroscar.rest.request.binary.ContentUriBinaryData; import com.stanfy.enroscar.rest.request.binary.EmptyBinaryData; import java.io.File; import java.lang.reflect.Type; import java.net.URI; import java.net.URISyntaxException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Locale; /** * Base class for request builders. * @param <MT> model type * @author Roman Mazur - Stanfy (http://www.stanfy.com) */ public abstract class BaseRequestBuilder<MT> implements RequestBuilder<MT> { /** Invalid request identifier. */ public static final int INVALID_REQUEST_IDENTIFIER = -1; /** Logging tag. */ private static final String TAG = "RequestBuilder"; /** Date format. */ private final SimpleDateFormat dateFormat = new SimpleDateFormat(getDateTimeFormat(), Locale.US); /** Configuration. */ private final RemoteServerApiConfiguration config; /** Result object. */ private final RequestDescription result; /** Context. */ private final Context context; /** Class of the expected model. */ private final EntityTypeToken expectedModelType; /** Performer. */ private RequestExecutor executor; public BaseRequestBuilder(final Context context) { this.config = BeansManager.get(context).getContainer().getBean(RemoteServerApiConfiguration.BEAN_NAME, RemoteServerApiConfiguration.class); if (this.config == null) { throw new IllegalStateException("RemoteServerApiConfiguration bean is not added to the container"); } this.context = context.getApplicationContext(); this.result = config.createRequestDescription(); result.simpleParameters = new ParametersGroup(); result.simpleParameters.name = "stub"; result.contentLanguage = Locale.getDefault().getLanguage(); this.expectedModelType = EntityTypeToken.fromClassParameter(getClass()); result.modelType = this.expectedModelType; } /** * Override this method in order to provide custom dates format for {@link #parseDate(String)} and {@link #formatDate(Date)}. * @return date format */ protected String getDateTimeFormat() { return "yyyy-MM-dd HH:mm:ss Z"; } /** * @param d date instance * @return formatted string (see {@link #getDateTimeFormat()}) */ protected String formatDate(final Date d) { return d != null ? dateFormat.format(d) : null; } /** * @param d date string * @return date instance parsed with {@link #getDateTimeFormat()} format */ protected Date parseDate(final String d) { if (d == null) { return null; } try { return dateFormat.parse(d); } catch (final ParseException e) { Log.e(TAG, "Cannot parse date " + d, e); return null; } } /** * @param type type token describing the data type for the request */ protected void setModelType(final Type type) { result.modelType = EntityTypeToken.fromEntityType(type); } @Override public BaseRequestBuilder<MT> setExecutor(final RequestExecutor executor) { this.executor = executor; return this; } /** * @param url URL to set */ protected void setTargetUrl(final String url) { result.url = url; } /** * @param operationType operation type * @see OperationType */ protected void setRequestOperationType(final int operationType) { result.operationType = operationType; } /** * @param name cache manager bean name */ protected void setRequestCacheName(final String name) { result.cacheName = name; } /** * @param name content handler name */ protected void setRequestContentHandler(final String name) { result.contentHandler = name; } /** * Set network stats tag. Converts string tag to integer one. * @param tag string tag */ protected void setConvertedTrafficStatsTag(final String tag) { result.statsTag = Utils.getTrafficStatsTag(tag); if (config.isDebugRest()) { Log.d(TAG, "TrafficStats tag <" + tag + ">=" + Integer.toHexString(result.statsTag)); } } /** * Setup binary content from the local file. Parameter name will be equal to {@link RequestDescription#BINARY_NAME_DEFAULT}. * @param data content URI * @param contentType content MIME-type */ protected void addBinaryContent(final Uri data, final String contentType) { addBinaryContent(null, data, contentType); } /** * Setup binary content with the local file. * @param name parameter name * @param data content URI * @param contentType content MIME-type */ protected void addBinaryContent(final String name, final Uri data, final String contentType) { String contentName = RequestDescription.BINARY_NAME_DEFAULT; if (ContentResolver.SCHEME_FILE.equals(data.getScheme())) { try { contentName = new File(new URI(data.toString())).getName(); } catch (final URISyntaxException e) { Log.e(TAG, "Bad file URI: " + data, e); } } addBinaryContent(name, contentName, data, contentType); } /** * Setup binary content with the local file. * @param name parameter name * @param contentName content name * @param data content URI * @param contentType content MIME-type */ protected void addBinaryContent(final String name, final String contentName, final Uri data, final String contentType) { final ContentUriBinaryData bdata = new ContentUriBinaryData(); bdata.setName(name); bdata.setContentUri(data, contentName); bdata.setContentType(contentType); result.addBinaryData(bdata); } /** * Setup binary content with the bitmap. * @param name parameter name * @param bitmap bitmap object * @param fileName file name */ protected void addBitmap(final String name, final Bitmap bitmap, final String fileName) { final BitmapBinaryData bdata = new BitmapBinaryData(); bdata.setName(name); bdata.setContentName(fileName); bdata.setBitmap(bitmap); result.addBinaryData(bdata); } /** * Setup binary content with the file descriptor. * @param name parameter name * @param fd file descriptor * @param contentType content MIME-type * @param fileName file name */ protected void addFileDescriptor(final String name, final AssetFileDescriptor fd, final String contentType, final String fileName) { final AssetFdBinaryData bdata = new AssetFdBinaryData(); bdata.setFileDescriptor(fileName, fd); bdata.setName(name); bdata.setContentType(contentType); result.addBinaryData(bdata); } /** * @param name name for empty binary type */ protected void addEmptyBinary(final String name) { final EmptyBinaryData bdata = new EmptyBinaryData(); bdata.setName(name); result.addBinaryData(bdata); } /** * @param name parameter name * @param value parameter value * @return added parameter instance */ protected ParameterValue addSimpleParameter(final String name, final long value) { return addSimpleParameter(name, String.valueOf(value)); } /** * @param name parameter name * @param value parameter value * @return added parameter instance */ protected ParameterValue addSimpleParameter(final String name, final int value) { return addSimpleParameter(name, String.valueOf(value)); } /** * @param name parameter name * @param value parameter value * @return added parameter instance */ protected ParameterValue addSimpleParameter(final String name, final boolean value) { return addSimpleParameter(name, value ? "1" : "0"); } /** * @param name parameter name * @param value parameter value * @return added parameter instance */ protected ParameterValue addSimpleParameter(final String name, final String value) { return result.simpleParameters.addSimpleParameter(name, value); } /** * @param p parameter to add to the request description */ protected void addParameter(final Parameter p) { result.simpleParameters.addParameter(p); } /** * Remove parameter with the specified name from request description. * @param name parameter name * @return removed parameter instance, null if no parameter was found */ protected Parameter removeParameter(final String name) { if (name == null) { throw new IllegalArgumentException("Parameter name cannot be null"); } final Iterator<Parameter> iter = result.simpleParameters.getChildren().iterator(); while (iter.hasNext()) { final Parameter p = iter.next(); if (name.equals(p.name)) { iter.remove(); return p; } } return null; } /** * Add header to request description. * @param name header name * @param value header value * @return this for chaining */ protected BaseRequestBuilder<MT> addHeader(final String name, final String value) { result.addHeader(name, value); return this; } /** * Remove header. * @param name header name */ protected void removeHeader(final String name) { result.removeHeader(name); } /** * @param contentAnalyzer bean name of {@link com.stanfy.enroscar.rest.response.ContentAnalyzer} instance */ protected void defineContentAnalyzer(final String contentAnalyzer) { result.setContentAnalyzer(contentAnalyzer); } /** @return request description object */ protected RequestDescription getResult() { return result; } /** @return the context */ @Override public Context getContext() { return context; } /** * Clear the builder. */ public void clear() { final RequestDescription result = this.result; result.simpleParameters.children.clear(); result.clearBinaryData(); result.contentType = null; result.clearHeaders(); } public BaseRequestBuilder<?> setParallel(final boolean value) { result.parallelMode = value; return this; } public BaseRequestBuilder<?> setTaskQueueName(final String taskQueue) { result.parallelMode = false; result.taskQueueName = taskQueue; return this; } @Override public EntityTypeToken getExpectedModelType() { return expectedModelType; } @Override public void execute() { if (result.url == null) { throw new IllegalStateException("URL is not specified!"); } if (result.modelType == null) { throw new IllegalStateException("Model is not specified!"); } if (result.contentHandler == null) { result.contentHandler = config.getDefaultContentHandlerName(); } if (result.contentHandler == null) { throw new IllegalStateException("Content handler is not specified"); } if (result.cacheName == null) { result.cacheName = config.getDefaultCacheBeanName(); } result.setCanceled(false); if (executor != null) { executor.performRequest(result); } else { Log.w(TAG, "Don't know how to perform operation " + result.getUrl()); } } /** * Create an appropriate loader instance. * Basic usage: * <pre> * public Loader onCreateLoader(int id, Bundle args) { * return new RequestBuilder(this) * .addParam("aaa", "bbb") * .getLoader(); * } * </pre> * @return loader instance that uses this request builder */ @Override public Loader<ResponseData<MT>> getLoader() { return new RequestBuilderLoader<MT>(this); } /** * @param <T> list element type * @param <LT> list type * @return list request builder wrapper instance */ protected <T, LT extends List<T>> ListRequestBuilderWrapper<LT, T> createLoadMoreListWrapper() { return new ListRequestBuilderWrapper<LT, T>(this) { }; } public <T, LT extends List<T>> ListRequestBuilderWrapper<LT, T> asLoadMoreList(final String offset, final String limit) { final ListRequestBuilderWrapper<LT, T> wrapper = createLoadMoreListWrapper(); if (offset != null) { wrapper.setOffsetParamName(offset); } if (limit != null) { wrapper.setLimitParamName(limit); } return wrapper; } public <T, LT extends List<T>> ListRequestBuilderWrapper<LT, T> asLoadMoreList() { return asLoadMoreList(ListRequestBuilderWrapper.PARAM_OFFSET, ListRequestBuilderWrapper.PARAM_LIMIT); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.incremental; import com.google.common.base.Supplier; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.dimension.DimensionSpec; import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnValueSelector; import org.apache.druid.segment.DimensionSelector; import org.apache.druid.segment.column.ColumnCapabilities; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; /** * */ public class OnheapIncrementalIndex extends IncrementalIndex<Aggregator> { private static final Logger log = new Logger(OnheapIncrementalIndex.class); /** * overhead per {@link ConcurrentHashMap.Node} or {@link java.util.concurrent.ConcurrentSkipListMap.Node} object */ private static final int ROUGH_OVERHEAD_PER_MAP_ENTRY = Long.BYTES * 5 + Integer.BYTES; private final ConcurrentHashMap<Integer, Aggregator[]> aggregators = new ConcurrentHashMap<>(); private final FactsHolder facts; private final AtomicInteger indexIncrement = new AtomicInteger(0); private final long maxBytesPerRowForAggregators; protected final int maxRowCount; protected final long maxBytesInMemory; @Nullable private volatile Map<String, ColumnSelectorFactory> selectors; @Nullable private String outOfRowsReason = null; OnheapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, boolean deserializeComplexMetrics, boolean reportParseExceptions, boolean concurrentEventAdd, boolean sortFacts, int maxRowCount, long maxBytesInMemory ) { super(incrementalIndexSchema, deserializeComplexMetrics, reportParseExceptions, concurrentEventAdd); this.maxRowCount = maxRowCount; this.maxBytesInMemory = maxBytesInMemory == 0 ? Long.MAX_VALUE : maxBytesInMemory; this.facts = incrementalIndexSchema.isRollup() ? new RollupFactsHolder(sortFacts, dimsComparator(), getDimensions()) : new PlainFactsHolder(sortFacts, dimsComparator()); maxBytesPerRowForAggregators = getMaxBytesPerRowForAggregators(incrementalIndexSchema); } /** * Gives estimated max size per aggregator. It is assumed that every aggregator will have enough overhead for its own * object header and for a pointer to a selector. We are adding a overhead-factor for each object as additional 16 * bytes. * These 16 bytes or 128 bits is the object metadata for 64-bit JVM process and consists of: * <ul> * <li>Class pointer which describes the object type: 64 bits * <li>Flags which describe state of the object including hashcode: 64 bits * <ul/> * total size estimation consists of: * <ul> * <li> metrics length : Integer.BYTES * len * <li> maxAggregatorIntermediateSize : getMaxIntermediateSize per aggregator + overhead-factor(16 bytes) * </ul> * * @param incrementalIndexSchema * * @return long max aggregator size in bytes */ private static long getMaxBytesPerRowForAggregators(IncrementalIndexSchema incrementalIndexSchema) { long maxAggregatorIntermediateSize = ((long) Integer.BYTES) * incrementalIndexSchema.getMetrics().length; maxAggregatorIntermediateSize += Arrays.stream(incrementalIndexSchema.getMetrics()) .mapToLong(aggregator -> aggregator.getMaxIntermediateSizeWithNulls() + Long.BYTES * 2L) .sum(); return maxAggregatorIntermediateSize; } @Override public FactsHolder getFacts() { return facts; } @Override protected Aggregator[] initAggs( final AggregatorFactory[] metrics, final Supplier<InputRow> rowSupplier, final boolean deserializeComplexMetrics, final boolean concurrentEventAdd ) { selectors = new HashMap<>(); for (AggregatorFactory agg : metrics) { selectors.put( agg.getName(), new CachingColumnSelectorFactory( makeColumnSelectorFactory(agg, rowSupplier, deserializeComplexMetrics), concurrentEventAdd ) ); } return new Aggregator[metrics.length]; } @Override protected AddToFactsResult addToFacts( InputRow row, IncrementalIndexRow key, ThreadLocal<InputRow> rowContainer, Supplier<InputRow> rowSupplier, boolean skipMaxRowsInMemoryCheck ) throws IndexSizeExceededException { List<String> parseExceptionMessages; final int priorIndex = facts.getPriorIndex(key); Aggregator[] aggs; final AggregatorFactory[] metrics = getMetrics(); final AtomicInteger numEntries = getNumEntries(); final AtomicLong sizeInBytes = getBytesInMemory(); if (IncrementalIndexRow.EMPTY_ROW_INDEX != priorIndex) { aggs = concurrentGet(priorIndex); parseExceptionMessages = doAggregate(metrics, aggs, rowContainer, row); } else { aggs = new Aggregator[metrics.length]; factorizeAggs(metrics, aggs, rowContainer, row); parseExceptionMessages = doAggregate(metrics, aggs, rowContainer, row); final int rowIndex = indexIncrement.getAndIncrement(); concurrentSet(rowIndex, aggs); // Last ditch sanity checks if ((numEntries.get() >= maxRowCount || sizeInBytes.get() >= maxBytesInMemory) && facts.getPriorIndex(key) == IncrementalIndexRow.EMPTY_ROW_INDEX && !skipMaxRowsInMemoryCheck) { throw new IndexSizeExceededException( "Maximum number of rows [%d] or max size in bytes [%d] reached", maxRowCount, maxBytesInMemory ); } final int prev = facts.putIfAbsent(key, rowIndex); if (IncrementalIndexRow.EMPTY_ROW_INDEX == prev) { numEntries.incrementAndGet(); long estimatedRowSize = estimateRowSizeInBytes(key, maxBytesPerRowForAggregators); sizeInBytes.addAndGet(estimatedRowSize); } else { // We lost a race aggs = concurrentGet(prev); parseExceptionMessages = doAggregate(metrics, aggs, rowContainer, row); // Free up the misfire concurrentRemove(rowIndex); // This is expected to occur ~80% of the time in the worst scenarios } } return new AddToFactsResult(numEntries.get(), sizeInBytes.get(), parseExceptionMessages); } /** * Gives an estimated size of row in bytes, it accounts for: * <ul> * <li> overhead per Map Entry * <li> TimeAndDims key size * <li> aggregator size * </ul> * * @param key TimeAndDims key * @param maxBytesPerRowForAggregators max size per aggregator * * @return estimated size of row */ private long estimateRowSizeInBytes(IncrementalIndexRow key, long maxBytesPerRowForAggregators) { return ROUGH_OVERHEAD_PER_MAP_ENTRY + key.estimateBytesInMemory() + maxBytesPerRowForAggregators; } @Override public int getLastRowIndex() { return indexIncrement.get() - 1; } private void factorizeAggs( AggregatorFactory[] metrics, Aggregator[] aggs, ThreadLocal<InputRow> rowContainer, InputRow row ) { rowContainer.set(row); for (int i = 0; i < metrics.length; i++) { final AggregatorFactory agg = metrics[i]; aggs[i] = agg.factorize(selectors.get(agg.getName())); } rowContainer.set(null); } private List<String> doAggregate( AggregatorFactory[] metrics, Aggregator[] aggs, ThreadLocal<InputRow> rowContainer, InputRow row ) { List<String> parseExceptionMessages = new ArrayList<>(); rowContainer.set(row); for (int i = 0; i < aggs.length; i++) { final Aggregator agg = aggs[i]; synchronized (agg) { try { agg.aggregate(); } catch (ParseException e) { // "aggregate" can throw ParseExceptions if a selector expects something but gets something else. log.debug(e, "Encountered parse error, skipping aggregator[%s].", metrics[i].getName()); parseExceptionMessages.add(e.getMessage()); } } } rowContainer.set(null); return parseExceptionMessages; } private void closeAggregators() { Closer closer = Closer.create(); for (Aggregator[] aggs : aggregators.values()) { for (Aggregator agg : aggs) { closer.register(agg); } } try { closer.close(); } catch (IOException e) { throw new RuntimeException(e); } } protected Aggregator[] concurrentGet(int offset) { // All get operations should be fine return aggregators.get(offset); } protected void concurrentSet(int offset, Aggregator[] value) { aggregators.put(offset, value); } protected void concurrentRemove(int offset) { aggregators.remove(offset); } @Override public boolean canAppendRow() { final boolean countCheck = size() < maxRowCount; // if maxBytesInMemory = -1, then ignore sizeCheck final boolean sizeCheck = maxBytesInMemory <= 0 || getBytesInMemory().get() < maxBytesInMemory; final boolean canAdd = countCheck && sizeCheck; if (!countCheck && !sizeCheck) { outOfRowsReason = StringUtils.format( "Maximum number of rows [%d] and maximum size in bytes [%d] reached", maxRowCount, maxBytesInMemory ); } else { if (!countCheck) { outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } else if (!sizeCheck) { outOfRowsReason = StringUtils.format("Maximum size in bytes [%d] reached", maxBytesInMemory); } } return canAdd; } @Override public String getOutOfRowsReason() { return outOfRowsReason; } @Override protected Aggregator[] getAggsForRow(int rowOffset) { return concurrentGet(rowOffset); } @Override protected Object getAggVal(Aggregator agg, int rowOffset, int aggPosition) { return agg.get(); } @Override public float getMetricFloatValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].getFloat(); } @Override public long getMetricLongValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].getLong(); } @Override public Object getMetricObjectValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].get(); } @Override protected double getMetricDoubleValue(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].getDouble(); } @Override public boolean isNull(int rowOffset, int aggOffset) { return concurrentGet(rowOffset)[aggOffset].isNull(); } /** * Clear out maps to allow GC * NOTE: This is NOT thread-safe with add... so make sure all the adding is DONE before closing */ @Override public void close() { super.close(); closeAggregators(); aggregators.clear(); facts.clear(); if (selectors != null) { selectors.clear(); } } /** * Caches references to selector objects for each column instead of creating a new object each time in order to save * heap space. In general the selectorFactory need not to thread-safe. If required, set concurrentEventAdd to true to * use concurrent hash map instead of vanilla hash map for thread-safe operations. */ static class CachingColumnSelectorFactory implements ColumnSelectorFactory { private final Map<String, ColumnValueSelector<?>> columnSelectorMap; private final ColumnSelectorFactory delegate; public CachingColumnSelectorFactory(ColumnSelectorFactory delegate, boolean concurrentEventAdd) { this.delegate = delegate; if (concurrentEventAdd) { columnSelectorMap = new ConcurrentHashMap<>(); } else { columnSelectorMap = new HashMap<>(); } } @Override public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) { return delegate.makeDimensionSelector(dimensionSpec); } @Override public ColumnValueSelector<?> makeColumnValueSelector(String columnName) { ColumnValueSelector existing = columnSelectorMap.get(columnName); if (existing != null) { return existing; } // We cannot use columnSelectorMap.computeIfAbsent(columnName, delegate::makeColumnValueSelector) // here since makeColumnValueSelector may modify the columnSelectorMap itself through // virtual column references, triggering a ConcurrentModificationException in JDK 9 and above. ColumnValueSelector<?> columnValueSelector = delegate.makeColumnValueSelector(columnName); existing = columnSelectorMap.putIfAbsent(columnName, columnValueSelector); return existing != null ? existing : columnValueSelector; } @Nullable @Override public ColumnCapabilities getColumnCapabilities(String columnName) { return delegate.getColumnCapabilities(columnName); } } }
/* * Copyright 2017, Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.language.v1beta2; import com.google.api.core.BetaApi; import com.google.api.gax.grpc.ChannelAndExecutor; import com.google.api.gax.grpc.ClientContext; import com.google.api.gax.grpc.UnaryCallable; import com.google.auth.Credentials; import com.google.cloud.language.v1beta2.AnnotateTextRequest.Features; import io.grpc.ManagedChannel; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND SERVICE /** * Service Description: Provides text analysis operations such as sentiment analysis and entity * recognition. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre> * <code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnalyzeSentimentResponse response = languageServiceClient.analyzeSentiment(document); * } * </code> * </pre> * * <p>Note: close() needs to be called on the languageServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * * <p>The surface of this class includes several types of Java methods for each of the API's * methods: * * <ol> * <li> A "flattened" method. With this type of method, the fields of the request type have been * converted into function parameters. It may be the case that not all fields are available as * parameters, and not every API method will have a flattened method entry point. * <li> A "request object" method. This type of method only takes one parameter, a request object, * which must be constructed before the call. Not every API method will have a request object * method. * <li> A "callable" method. This type of method takes no parameters and returns an immutable API * callable object, which can be used to initiate calls to the service. * </ol> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of LanguageServiceSettings to * create(). For example: * * <pre> * <code> * LanguageServiceSettings languageServiceSettings = * LanguageServiceSettings.defaultBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * LanguageServiceClient languageServiceClient = * LanguageServiceClient.create(languageServiceSettings); * </code> * </pre> */ @Generated("by GAPIC") @BetaApi public class LanguageServiceClient implements AutoCloseable { private final LanguageServiceSettings settings; private final ScheduledExecutorService executor; private final ManagedChannel channel; private final List<AutoCloseable> closeables = new ArrayList<>(); private final UnaryCallable<AnalyzeSentimentRequest, AnalyzeSentimentResponse> analyzeSentimentCallable; private final UnaryCallable<AnalyzeEntitiesRequest, AnalyzeEntitiesResponse> analyzeEntitiesCallable; private final UnaryCallable<AnalyzeEntitySentimentRequest, AnalyzeEntitySentimentResponse> analyzeEntitySentimentCallable; private final UnaryCallable<AnalyzeSyntaxRequest, AnalyzeSyntaxResponse> analyzeSyntaxCallable; private final UnaryCallable<AnnotateTextRequest, AnnotateTextResponse> annotateTextCallable; /** Constructs an instance of LanguageServiceClient with default settings. */ public static final LanguageServiceClient create() throws IOException { return create(LanguageServiceSettings.defaultBuilder().build()); } /** * Constructs an instance of LanguageServiceClient, using the given settings. The channels are * created based on the settings passed in, or defaults for any settings that are not set. */ public static final LanguageServiceClient create(LanguageServiceSettings settings) throws IOException { return new LanguageServiceClient(settings); } /** * Constructs an instance of LanguageServiceClient, using the given settings. This is protected so * that it easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected LanguageServiceClient(LanguageServiceSettings settings) throws IOException { this.settings = settings; ChannelAndExecutor channelAndExecutor = settings.getChannelAndExecutor(); this.executor = channelAndExecutor.getExecutor(); this.channel = channelAndExecutor.getChannel(); Credentials credentials = settings.getCredentialsProvider().getCredentials(); ClientContext clientContext = ClientContext.newBuilder() .setExecutor(this.executor) .setChannel(this.channel) .setCredentials(credentials) .build(); this.analyzeSentimentCallable = UnaryCallable.create(settings.analyzeSentimentSettings(), clientContext); this.analyzeEntitiesCallable = UnaryCallable.create(settings.analyzeEntitiesSettings(), clientContext); this.analyzeEntitySentimentCallable = UnaryCallable.create(settings.analyzeEntitySentimentSettings(), clientContext); this.analyzeSyntaxCallable = UnaryCallable.create(settings.analyzeSyntaxSettings(), clientContext); this.annotateTextCallable = UnaryCallable.create(settings.annotateTextSettings(), clientContext); if (settings.getChannelProvider().shouldAutoClose()) { closeables.add( new Closeable() { @Override public void close() throws IOException { channel.shutdown(); } }); } if (settings.getExecutorProvider().shouldAutoClose()) { closeables.add( new Closeable() { @Override public void close() throws IOException { executor.shutdown(); } }); } } public final LanguageServiceSettings getSettings() { return settings; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Analyzes the sentiment of the provided text. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnalyzeSentimentResponse response = languageServiceClient.analyzeSentiment(document); * } * </code></pre> * * @param document Input document. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeSentimentResponse analyzeSentiment(Document document) { AnalyzeSentimentRequest request = AnalyzeSentimentRequest.newBuilder().setDocument(document).build(); return analyzeSentiment(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Analyzes the sentiment of the provided text. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnalyzeSentimentRequest request = AnalyzeSentimentRequest.newBuilder() * .setDocument(document) * .build(); * AnalyzeSentimentResponse response = languageServiceClient.analyzeSentiment(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ private final AnalyzeSentimentResponse analyzeSentiment(AnalyzeSentimentRequest request) { return analyzeSentimentCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Analyzes the sentiment of the provided text. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnalyzeSentimentRequest request = AnalyzeSentimentRequest.newBuilder() * .setDocument(document) * .build(); * ApiFuture&lt;AnalyzeSentimentResponse&gt; future = languageServiceClient.analyzeSentimentCallable().futureCall(request); * // Do something * AnalyzeSentimentResponse response = future.get(); * } * </code></pre> */ public final UnaryCallable<AnalyzeSentimentRequest, AnalyzeSentimentResponse> analyzeSentimentCallable() { return analyzeSentimentCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finds named entities (currently proper names and common nouns) in the text along with entity * types, salience, mentions for each entity, and other properties. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeEntitiesResponse response = languageServiceClient.analyzeEntities(document, encodingType); * } * </code></pre> * * @param document Input document. * @param encodingType The encoding type used by the API to calculate offsets. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeEntitiesResponse analyzeEntities( Document document, EncodingType encodingType) { AnalyzeEntitiesRequest request = AnalyzeEntitiesRequest.newBuilder() .setDocument(document) .setEncodingType(encodingType) .build(); return analyzeEntities(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finds named entities (currently proper names and common nouns) in the text along with entity * types, salience, mentions for each entity, and other properties. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeEntitiesRequest request = AnalyzeEntitiesRequest.newBuilder() * .setDocument(document) * .setEncodingType(encodingType) * .build(); * AnalyzeEntitiesResponse response = languageServiceClient.analyzeEntities(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeEntitiesResponse analyzeEntities(AnalyzeEntitiesRequest request) { return analyzeEntitiesCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finds named entities (currently proper names and common nouns) in the text along with entity * types, salience, mentions for each entity, and other properties. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeEntitiesRequest request = AnalyzeEntitiesRequest.newBuilder() * .setDocument(document) * .setEncodingType(encodingType) * .build(); * ApiFuture&lt;AnalyzeEntitiesResponse&gt; future = languageServiceClient.analyzeEntitiesCallable().futureCall(request); * // Do something * AnalyzeEntitiesResponse response = future.get(); * } * </code></pre> */ public final UnaryCallable<AnalyzeEntitiesRequest, AnalyzeEntitiesResponse> analyzeEntitiesCallable() { return analyzeEntitiesCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finds entities, similar to * [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text * and analyzes sentiment associated with each entity and its mentions. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeEntitySentimentResponse response = languageServiceClient.analyzeEntitySentiment(document, encodingType); * } * </code></pre> * * @param document Input document. * @param encodingType The encoding type used by the API to calculate offsets. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeEntitySentimentResponse analyzeEntitySentiment( Document document, EncodingType encodingType) { AnalyzeEntitySentimentRequest request = AnalyzeEntitySentimentRequest.newBuilder() .setDocument(document) .setEncodingType(encodingType) .build(); return analyzeEntitySentiment(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finds entities, similar to * [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text * and analyzes sentiment associated with each entity and its mentions. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeEntitySentimentRequest request = AnalyzeEntitySentimentRequest.newBuilder() * .setDocument(document) * .setEncodingType(encodingType) * .build(); * AnalyzeEntitySentimentResponse response = languageServiceClient.analyzeEntitySentiment(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeEntitySentimentResponse analyzeEntitySentiment( AnalyzeEntitySentimentRequest request) { return analyzeEntitySentimentCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Finds entities, similar to * [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text * and analyzes sentiment associated with each entity and its mentions. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeEntitySentimentRequest request = AnalyzeEntitySentimentRequest.newBuilder() * .setDocument(document) * .setEncodingType(encodingType) * .build(); * ApiFuture&lt;AnalyzeEntitySentimentResponse&gt; future = languageServiceClient.analyzeEntitySentimentCallable().futureCall(request); * // Do something * AnalyzeEntitySentimentResponse response = future.get(); * } * </code></pre> */ public final UnaryCallable<AnalyzeEntitySentimentRequest, AnalyzeEntitySentimentResponse> analyzeEntitySentimentCallable() { return analyzeEntitySentimentCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Analyzes the syntax of the text and provides sentence boundaries and tokenization along with * part of speech tags, dependency trees, and other properties. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeSyntaxResponse response = languageServiceClient.analyzeSyntax(document, encodingType); * } * </code></pre> * * @param document Input document. * @param encodingType The encoding type used by the API to calculate offsets. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeSyntaxResponse analyzeSyntax(Document document, EncodingType encodingType) { AnalyzeSyntaxRequest request = AnalyzeSyntaxRequest.newBuilder() .setDocument(document) .setEncodingType(encodingType) .build(); return analyzeSyntax(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Analyzes the syntax of the text and provides sentence boundaries and tokenization along with * part of speech tags, dependency trees, and other properties. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeSyntaxRequest request = AnalyzeSyntaxRequest.newBuilder() * .setDocument(document) * .setEncodingType(encodingType) * .build(); * AnalyzeSyntaxResponse response = languageServiceClient.analyzeSyntax(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnalyzeSyntaxResponse analyzeSyntax(AnalyzeSyntaxRequest request) { return analyzeSyntaxCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * Analyzes the syntax of the text and provides sentence boundaries and tokenization along with * part of speech tags, dependency trees, and other properties. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnalyzeSyntaxRequest request = AnalyzeSyntaxRequest.newBuilder() * .setDocument(document) * .setEncodingType(encodingType) * .build(); * ApiFuture&lt;AnalyzeSyntaxResponse&gt; future = languageServiceClient.analyzeSyntaxCallable().futureCall(request); * // Do something * AnalyzeSyntaxResponse response = future.get(); * } * </code></pre> */ public final UnaryCallable<AnalyzeSyntaxRequest, AnalyzeSyntaxResponse> analyzeSyntaxCallable() { return analyzeSyntaxCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * A convenience method that provides all syntax, sentiment, and entity features in one call. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnnotateTextResponse response = languageServiceClient.annotateText(document, features, encodingType); * } * </code></pre> * * @param document Input document. * @param features The enabled features. * @param encodingType The encoding type used by the API to calculate offsets. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnnotateTextResponse annotateText( Document document, AnnotateTextRequest.Features features, EncodingType encodingType) { AnnotateTextRequest request = AnnotateTextRequest.newBuilder() .setDocument(document) .setFeatures(features) .setEncodingType(encodingType) .build(); return annotateText(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * A convenience method that provides all syntax, sentiment, and entity features in one call. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnnotateTextRequest request = AnnotateTextRequest.newBuilder() * .setDocument(document) * .setFeatures(features) * .setEncodingType(encodingType) * .build(); * AnnotateTextResponse response = languageServiceClient.annotateText(request); * } * </code></pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.grpc.ApiException if the remote call fails */ public final AnnotateTextResponse annotateText(AnnotateTextRequest request) { return annotateTextCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD /** * A convenience method that provides all syntax, sentiment, and entity features in one call. * * <p>Sample code: * * <pre><code> * try (LanguageServiceClient languageServiceClient = LanguageServiceClient.create()) { * Document document = Document.newBuilder().build(); * AnnotateTextRequest.Features features = AnnotateTextRequest.Features.newBuilder().build(); * EncodingType encodingType = EncodingType.NONE; * AnnotateTextRequest request = AnnotateTextRequest.newBuilder() * .setDocument(document) * .setFeatures(features) * .setEncodingType(encodingType) * .build(); * ApiFuture&lt;AnnotateTextResponse&gt; future = languageServiceClient.annotateTextCallable().futureCall(request); * // Do something * AnnotateTextResponse response = future.get(); * } * </code></pre> */ public final UnaryCallable<AnnotateTextRequest, AnnotateTextResponse> annotateTextCallable() { return annotateTextCallable; } /** * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately * cancelled. */ @Override public final void close() throws Exception { for (AutoCloseable closeable : closeables) { closeable.close(); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.documentation.docstrings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PyNames; import com.jetbrains.python.psi.PyIndentUtil; import com.jetbrains.python.psi.StructuredDocString; import com.jetbrains.python.toolbox.Substring; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.regex.Pattern; /** * Common base class for docstring styles supported by Napoleon Sphinx extension. * * @author Mikhail Golubev * @see <a href="http://sphinxcontrib-napoleon.readthedocs.org/en/latest/index.html">Napoleon</a> */ public abstract class SectionBasedDocString extends DocStringLineParser implements StructuredDocString { /** * Frequently used section types */ @NonNls public static final String RETURNS_SECTION = "returns"; @NonNls public static final String RAISES_SECTION = "raises"; @NonNls public static final String KEYWORD_ARGUMENTS_SECTION = "keyword arguments"; @NonNls public static final String PARAMETERS_SECTION = "parameters"; @NonNls public static final String ATTRIBUTES_SECTION = "attributes"; @NonNls public static final String METHODS_SECTION = "methods"; @NonNls public static final String OTHER_PARAMETERS_SECTION = "other parameters"; @NonNls public static final String YIELDS_SECTION = "yields"; private static final Pattern PLAIN_TEXT = Pattern.compile("\\w+(\\s+\\w+){2}"); // dumb heuristic - consecutive words protected static final Map<String, String> SECTION_ALIASES = ImmutableMap.<String, String>builder() .put("arguments", PARAMETERS_SECTION) .put("args", PARAMETERS_SECTION) .put("parameters", PARAMETERS_SECTION) .put("keyword args", KEYWORD_ARGUMENTS_SECTION) .put("keyword arguments", KEYWORD_ARGUMENTS_SECTION) .put("other parameters", OTHER_PARAMETERS_SECTION) .put("attributes", ATTRIBUTES_SECTION) .put("methods", METHODS_SECTION) .put("note", "notes") .put("notes", "notes") .put("example", "examples") .put("examples", "examples") .put("return", RETURNS_SECTION) .put("returns", RETURNS_SECTION) .put("yield", YIELDS_SECTION) .put("yields", "yields") .put("raises", RAISES_SECTION) .put("references", "references") .put("see also", "see also") .put("warning", "warnings") .put("warns", "warnings") .put("warnings", "warnings") .build(); private static final Pattern SPHINX_REFERENCE_RE = Pattern.compile("(:\\w+:\\S+:`.+?`|:\\S+:`.+?`|`.+?`)"); public static Set<String> SECTION_NAMES = SECTION_ALIASES.keySet(); private static final ImmutableSet<String> SECTIONS_WITH_NAME_AND_OPTIONAL_TYPE = ImmutableSet.of(ATTRIBUTES_SECTION, PARAMETERS_SECTION, KEYWORD_ARGUMENTS_SECTION, OTHER_PARAMETERS_SECTION); private static final ImmutableSet<String> SECTIONS_WITH_TYPE_AND_OPTIONAL_NAME = ImmutableSet.of(RETURNS_SECTION, YIELDS_SECTION); private static final ImmutableSet<String> SECTIONS_WITH_TYPE = ImmutableSet.of(RAISES_SECTION); private static final ImmutableSet<String> SECTIONS_WITH_NAME = ImmutableSet.of(METHODS_SECTION); @Nullable public static String getNormalizedSectionTitle(@NotNull @NonNls String title) { return SECTION_ALIASES.get(title.toLowerCase()); } public static boolean isValidSectionTitle(@NotNull @NonNls String title) { return StringUtil.isCapitalized(title) && getNormalizedSectionTitle(title) != null; } private final Substring mySummary; private final List<Section> mySections = new ArrayList<>(); private final List<Substring> myOtherContent = new ArrayList<>(); protected SectionBasedDocString(@NotNull Substring text) { super(text); List<Substring> summary = Collections.emptyList(); int startLine = consumeEmptyLines(parseHeader(0)); int lineNum = startLine; while (lineNum < getLineCount()) { final Pair<Section, Integer> parsedSection = parseSection(lineNum); if (parsedSection.getFirst() != null) { mySections.add(parsedSection.getFirst()); lineNum = parsedSection.getSecond(); } else if (lineNum == startLine) { final Pair<List<Substring>, Integer> parsedSummary = parseSummary(lineNum); summary = parsedSummary.getFirst(); lineNum = parsedSummary.getSecond(); } else { myOtherContent.add(getLine(lineNum)); lineNum++; } lineNum = consumeEmptyLines(lineNum); } //noinspection ConstantConditions mySummary = summary.isEmpty() ? null : summary.get(0).union(summary.get(summary.size() - 1)).trim(); } @NotNull private Pair<List<Substring>, Integer> parseSummary(int lineNum) { final List<Substring> result = new ArrayList<>(); while (!(isEmptyOrDoesNotExist(lineNum) || isBlockEnd(lineNum))) { result.add(getLine(lineNum)); lineNum++; } return Pair.create(result, lineNum); } /** * Used to parse e.g. optional function signature at the beginning of NumPy-style docstring * * @return first line from which to star parsing remaining sections */ protected int parseHeader(int startLine) { return startLine; } @NotNull protected Pair<Section, Integer> parseSection(int sectionStartLine) { final Pair<Substring, Integer> parsedHeader = parseSectionHeader(sectionStartLine); if (parsedHeader.getFirst() == null) { return Pair.create(null, sectionStartLine); } final String normalized = getNormalizedSectionTitle(parsedHeader.getFirst().toString()); if (normalized == null) { return Pair.create(null, sectionStartLine); } final List<SectionField> fields = new ArrayList<>(); final int sectionIndent = getLineIndentSize(sectionStartLine); int lineNum = consumeEmptyLines(parsedHeader.getSecond()); while (!isSectionBreak(lineNum, sectionIndent)) { if (!isEmpty(lineNum)) { final Pair<SectionField, Integer> parsedField = parseSectionField(lineNum, normalized, sectionIndent); if (parsedField.getFirst() != null) { fields.add(parsedField.getFirst()); lineNum = parsedField.getSecond(); continue; } else { myOtherContent.add(getLine(lineNum)); } } lineNum++; } return Pair.create(new Section(parsedHeader.getFirst(), fields), lineNum); } @NotNull protected Pair<SectionField, Integer> parseSectionField(int lineNum, @NotNull String normalizedSectionTitle, int sectionIndent) { if (SECTIONS_WITH_NAME_AND_OPTIONAL_TYPE.contains(normalizedSectionTitle)) { return parseSectionField(lineNum, sectionIndent, true, false); } if (SECTIONS_WITH_TYPE_AND_OPTIONAL_NAME.contains(normalizedSectionTitle)) { return parseSectionField(lineNum, sectionIndent, true, true); } if (SECTIONS_WITH_NAME.contains(normalizedSectionTitle)) { return parseSectionField(lineNum, sectionIndent, false, false); } if (SECTIONS_WITH_TYPE.contains(normalizedSectionTitle)) { return parseSectionField(lineNum, sectionIndent, false, true); } return parseGenericField(lineNum, sectionIndent); } protected abstract Pair<SectionField, Integer> parseSectionField(int lineNum, int sectionIndent, boolean mayHaveType, boolean preferType); @NotNull protected Pair<SectionField, Integer> parseGenericField(int lineNum, int sectionIndent) { final Pair<List<Substring>, Integer> pair = parseIndentedBlock(lineNum, getSectionIndentationThreshold(sectionIndent)); final Substring firstLine = ContainerUtil.getFirstItem(pair.getFirst()); final Substring lastLine = ContainerUtil.getLastItem(pair.getFirst()); if (firstLine != null && lastLine != null) { return Pair.create(new SectionField((Substring)null, null, firstLine.union(lastLine).trim()), pair.getSecond()); } return Pair.create(null, pair.getSecond()); } @NotNull protected abstract Pair<Substring, Integer> parseSectionHeader(int lineNum); protected boolean isSectionStart(int lineNum) { final Pair<Substring, Integer> pair = parseSectionHeader(lineNum); return pair.getFirst() != null; } protected boolean isSectionBreak(int lineNum, int curSectionIndent) { return lineNum >= getLineCount() || // note that field may have the same indent as its containing section (!isEmpty(lineNum) && getLineIndentSize(lineNum) <= getSectionIndentationThreshold(curSectionIndent)) || isSectionStart(lineNum); } /** * Consumes all lines that are indented more than {@code blockIndent} and don't contain start of a new section. * Trailing empty lines (e.g. due to indentation of closing triple quotes) are omitted in result. * * @param blockIndent indentation threshold, block ends with a line that has greater indentation */ @NotNull protected Pair<List<Substring>, Integer> parseIndentedBlock(int lineNum, int blockIndent) { final int blockEnd = consumeIndentedBlock(lineNum, blockIndent); return Pair.create(myLines.subList(lineNum, blockEnd), blockEnd); } /** * Inside section any indentation that is equal or smaller to returned one signals about section break. * It's safe to return negative value, because it's used only for comparisons. * * @see #isSectionBreak(int, int) * @see #parseGenericField(int, int) */ protected int getSectionIndentationThreshold(int sectionIndent) { return sectionIndent; } @Override protected boolean isBlockEnd(int lineNum) { return isSectionStart(lineNum); } protected boolean isValidType(@NotNull String type) { return !type.isEmpty() && !PLAIN_TEXT.matcher(type).find(); } protected boolean isValidName(@NotNull String name) { return PyNames.isIdentifierString(name); } /** * Properly partitions line by first colon taking into account possible Sphinx references inside * <p/> * <h3>Example</h3> * <pre><code> * runtime (:class:`Runtime`): Use it to access the environment. * </code></pre> */ @NotNull protected static List<Substring> splitByFirstColon(@NotNull Substring line) { final List<Substring> parts = line.split(SPHINX_REFERENCE_RE); if (parts.size() > 1) { for (Substring part : parts) { final int i = part.indexOf(":"); if (i >= 0) { final Substring beforeColon = new Substring(line.getSuperString(), line.getStartOffset(), part.getStartOffset() + i); final Substring afterColon = new Substring(line.getSuperString(), part.getStartOffset() + i + 1, line.getEndOffset()); return Arrays.asList(beforeColon, afterColon); } } return Collections.singletonList(line); } return line.split(":", 1); } @NotNull public List<Section> getSections() { return Collections.unmodifiableList(mySections); } @Override public String getSummary() { return mySummary != null ? mySummary.concatTrimmedLines("\n") : ""; } @NotNull @Override public String getDescription() { return ""; } @NotNull @Override public List<String> getParameters() { return ContainerUtil.map(getParameterSubstrings(), substring -> substring.toString()); } @NotNull @Override public List<Substring> getParameterSubstrings() { final List<Substring> result = new ArrayList<>(); for (SectionField field : getParameterFields()) { ContainerUtil.addAllNotNull(result, field.getNamesAsSubstrings()); } return result; } @Nullable @Override public String getParamType(@Nullable String paramName) { final Substring sub = getParamTypeSubstring(paramName); return sub != null ? sub.toString() : null; } @Nullable @Override public Substring getParamTypeSubstring(@Nullable String paramName) { if (paramName != null) { final SectionField field = getFirstFieldForParameter(paramName); if (field != null) { return field.getTypeAsSubstring(); } } return null; } @Nullable @Override public String getParamDescription(@Nullable String paramName) { if (paramName != null) { final SectionField field = getFirstFieldForParameter(paramName); if (field != null) { return field.getDescription(); } } return null; } @Nullable public SectionField getFirstFieldForParameter(@NotNull final String name) { return ContainerUtil.find(getParameterFields(), field -> field.getNames().contains(name)); } @NotNull public List<SectionField> getParameterFields() { final List<SectionField> result = new ArrayList<>(); for (Section section : getParameterSections()) { result.addAll(section.getFields()); } return result; } @NotNull public List<Section> getParameterSections() { return getSectionsWithNormalizedTitle(PARAMETERS_SECTION); } @NotNull @Override public List<String> getKeywordArguments() { final List<String> result = new ArrayList<>(); for (SectionField field : getKeywordArgumentFields()) { result.addAll(field.getNames()); } return result; } @NotNull @Override public List<Substring> getKeywordArgumentSubstrings() { final List<Substring> result = new ArrayList<>(); for (SectionField field : getKeywordArgumentFields()) { ContainerUtil.addAllNotNull(field.getNamesAsSubstrings()); } return result; } @Nullable @Override public String getKeywordArgumentDescription(@Nullable String paramName) { if (paramName != null) { final SectionField argument = getFirstFieldForKeywordArgument(paramName); if (argument != null) { return argument.getDescription(); } } return null; } @NotNull public List<SectionField> getKeywordArgumentFields() { final List<SectionField> result = new ArrayList<>(); for (Section section : getSectionsWithNormalizedTitle(KEYWORD_ARGUMENTS_SECTION)) { result.addAll(section.getFields()); } return result; } @Nullable private SectionField getFirstFieldForKeywordArgument(@NotNull final String name) { return ContainerUtil.find(getKeywordArgumentFields(), field -> field.getNames().contains(name)); } @Nullable @Override public String getReturnType() { final Substring sub = getReturnTypeSubstring(); return sub != null ? sub.toString() : null; } @Nullable @Override public Substring getReturnTypeSubstring() { final SectionField field = getFirstReturnField(); return field != null ? field.getTypeAsSubstring() : null; } @Nullable @Override public String getReturnDescription() { final SectionField field = getFirstReturnField(); return field != null ? field.getDescription() : null; } @NotNull public List<SectionField> getReturnFields() { final List<SectionField> result = new ArrayList<>(); for (Section section : getSectionsWithNormalizedTitle(RETURNS_SECTION)) { result.addAll(section.getFields()); } return result; } @Nullable private SectionField getFirstReturnField() { return ContainerUtil.getFirstItem(getReturnFields()); } @NotNull @Override public List<String> getRaisedExceptions() { return ContainerUtil.mapNotNull(getExceptionFields(), field -> StringUtil.nullize(field.getType())); } @Nullable @Override public String getRaisedExceptionDescription(@Nullable String exceptionName) { if (exceptionName != null) { final SectionField exception = getFirstFieldForException(exceptionName); if (exception != null) { return exception.getDescription(); } } return null; } @NotNull public List<SectionField> getExceptionFields() { final List<SectionField> result = new ArrayList<>(); for (Section section : getSectionsWithNormalizedTitle(RAISES_SECTION)) { result.addAll(section.getFields()); } return result; } @Nullable private SectionField getFirstFieldForException(@NotNull final String exceptionType) { return ContainerUtil.find(getExceptionFields(), field -> exceptionType.equals(field.getType())); } @NotNull public List<SectionField> getAttributeFields() { final List<SectionField> result = new ArrayList<>(); for (Section section : getSectionsWithNormalizedTitle(ATTRIBUTES_SECTION)) { result.addAll(section.getFields()); } return result; } @NotNull public List<Section> getSectionsWithNormalizedTitle(@NotNull final String title) { return ContainerUtil.mapNotNull(mySections, section -> section.getNormalizedTitle().equals(getNormalizedSectionTitle(title)) ? section : null); } @Nullable public Section getFirstSectionWithNormalizedTitle(@NotNull String title) { return ContainerUtil.getFirstItem(getSectionsWithNormalizedTitle(title)); } @Nullable @Override public String getAttributeDescription() { return null; } @NotNull protected static Substring cleanUpName(@NotNull Substring name) { int firstNotStar = 0; while (firstNotStar < name.length() && name.charAt(firstNotStar) == '*') { firstNotStar++; } return name.substring(firstNotStar).trimLeft(); } public static class Section { private final Substring myTitle; private final List<SectionField> myFields; public Section(@NotNull Substring title, @NotNull List<SectionField> fields) { myTitle = title; myFields = new ArrayList<>(fields); } @NotNull public Substring getTitleAsSubstring() { return myTitle; } @NotNull public String getTitle() { return myTitle.toString(); } @NotNull public String getNormalizedTitle() { //noinspection ConstantConditions return getNormalizedSectionTitle(getTitle()); } @NotNull public List<SectionField> getFields() { return Collections.unmodifiableList(myFields); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Section section = (Section)o; if (!myTitle.equals(section.myTitle)) return false; if (!myFields.equals(section.myFields)) return false; return true; } @Override public int hashCode() { int result = myTitle.hashCode(); result = 31 * result + myFields.hashCode(); return result; } } public static class SectionField { private final List<Substring> myNames; private final Substring myType; private final Substring myDescription; public SectionField(@Nullable Substring name, @Nullable Substring type, @Nullable Substring description) { this(name == null ? Collections.<Substring>emptyList() : Collections.singletonList(name), type, description); } public SectionField(@NotNull List<Substring> names, @Nullable Substring type, @Nullable Substring description) { myNames = names; myType = type; myDescription = description; } @Nullable public String getName() { return myNames.isEmpty() ? null : myNames.get(0).toString(); } @Nullable public Substring getNameAsSubstring() { return myNames.isEmpty() ? null : myNames.get(0); } @NotNull public List<Substring> getNamesAsSubstrings() { return myNames; } @NotNull public List<String> getNames() { return ContainerUtil.map(myNames, substring -> substring.toString()); } @Nullable public String getType() { return myType == null ? null : myType.toString(); } @Nullable public Substring getTypeAsSubstring() { return myType; } @Nullable public String getDescription() { return myDescription == null ? null : PyIndentUtil.removeCommonIndent(myDescription.getValue(), true); } @Nullable public Substring getDescriptionAsSubstring() { return myDescription; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SectionField field = (SectionField)o; if (myNames != null ? !myNames.equals(field.myNames) : field.myNames != null) return false; if (myType != null ? !myType.equals(field.myType) : field.myType != null) return false; if (myDescription != null ? !myDescription.equals(field.myDescription) : field.myDescription != null) return false; return true; } @Override public int hashCode() { int result = myNames != null ? myNames.hashCode() : 0; result = 31 * result + (myType != null ? myType.hashCode() : 0); result = 31 * result + (myDescription != null ? myDescription.hashCode() : 0); return result; } } }
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.devtools.filewatch; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.boot.devtools.filewatch.ChangedFile.Type; import org.springframework.util.FileCopyUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatIllegalStateException; import static org.mockito.Mockito.mock; /** * Tests for {@link FileSystemWatcher}. * * @author Phillip Webb */ class FileSystemWatcherTests { private FileSystemWatcher watcher; private List<Set<ChangedFiles>> changes = Collections.synchronizedList(new ArrayList<>()); @TempDir File tempDir; @BeforeEach void setup() { setupWatcher(20, 10); } @Test void pollIntervalMustBePositive() { assertThatIllegalArgumentException() .isThrownBy(() -> new FileSystemWatcher(true, Duration.ofMillis(0), Duration.ofMillis(1))) .withMessageContaining("PollInterval must be positive"); } @Test void quietPeriodMustBePositive() { assertThatIllegalArgumentException() .isThrownBy(() -> new FileSystemWatcher(true, Duration.ofMillis(1), Duration.ofMillis(0))) .withMessageContaining("QuietPeriod must be positive"); } @Test void pollIntervalMustBeGreaterThanQuietPeriod() { assertThatIllegalArgumentException() .isThrownBy(() -> new FileSystemWatcher(true, Duration.ofMillis(1), Duration.ofMillis(1))) .withMessageContaining("PollInterval must be greater than QuietPeriod"); } @Test void listenerMustNotBeNull() { assertThatIllegalArgumentException().isThrownBy(() -> this.watcher.addListener(null)) .withMessageContaining("FileChangeListener must not be null"); } @Test void cannotAddListenerToStartedListener() { this.watcher.start(); assertThatIllegalStateException().isThrownBy(() -> this.watcher.addListener(mock(FileChangeListener.class))) .withMessageContaining("FileSystemWatcher already started"); } @Test void sourceDirectoryMustNotBeNull() { assertThatIllegalArgumentException().isThrownBy(() -> this.watcher.addSourceDirectory(null)) .withMessageContaining("Directory must not be null"); } @Test void sourceDirectoryMustNotBeAFile() throws IOException { File file = new File(this.tempDir, "file"); assertThat(file.createNewFile()).isTrue(); assertThat(file.isFile()).isTrue(); assertThatIllegalArgumentException().isThrownBy(() -> this.watcher.addSourceDirectory(file)) .withMessageContaining("Directory '" + file + "' must not be a file"); } @Test void cannotAddSourceDirectoryToStartedListener() throws Exception { this.watcher.start(); assertThatIllegalStateException().isThrownBy(() -> this.watcher.addSourceDirectory(this.tempDir)) .withMessageContaining("FileSystemWatcher already started"); } @Test void addFile() throws Exception { File directory = startWithNewDirectory(); File file = touch(new File(directory, "test.txt")); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); ChangedFile expected = new ChangedFile(directory, file, Type.ADD); assertThat(changedFiles.getFiles()).contains(expected); } @Test void addNestedFile() throws Exception { File directory = startWithNewDirectory(); File file = touch(new File(new File(directory, "sub"), "text.txt")); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); ChangedFile expected = new ChangedFile(directory, file, Type.ADD); assertThat(changedFiles.getFiles()).contains(expected); } @Test void createSourceDirectoryAndAddFile() throws IOException { File directory = new File(this.tempDir, "does/not/exist"); assertThat(directory.exists()).isFalse(); this.watcher.addSourceDirectory(directory); this.watcher.start(); directory.mkdirs(); File file = touch(new File(directory, "text.txt")); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); ChangedFile expected = new ChangedFile(directory, file, Type.ADD); assertThat(changedFiles.getFiles()).contains(expected); } @Test void waitsForPollingInterval() throws Exception { setupWatcher(10, 1); File directory = startWithNewDirectory(); touch(new File(directory, "test1.txt")); while (this.changes.size() != 1) { Thread.sleep(10); } touch(new File(directory, "test2.txt")); this.watcher.stopAfter(1); assertThat(this.changes.size()).isEqualTo(2); } @Test void waitsForQuietPeriod() throws Exception { setupWatcher(300, 200); File directory = startWithNewDirectory(); for (int i = 0; i < 100; i++) { touch(new File(directory, i + "test.txt")); Thread.sleep(10); } this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); assertThat(changedFiles.getFiles()).hasSize(100); } @Test void withExistingFiles() throws Exception { File directory = new File(this.tempDir, UUID.randomUUID().toString()); directory.mkdir(); touch(new File(directory, "test.txt")); this.watcher.addSourceDirectory(directory); this.watcher.start(); File file = touch(new File(directory, "test2.txt")); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); ChangedFile expected = new ChangedFile(directory, file, Type.ADD); assertThat(changedFiles.getFiles()).contains(expected); } @Test void multipleSources() throws Exception { File directory1 = new File(this.tempDir, UUID.randomUUID().toString()); directory1.mkdir(); File directory2 = new File(this.tempDir, UUID.randomUUID().toString()); directory2.mkdir(); this.watcher.addSourceDirectory(directory1); this.watcher.addSourceDirectory(directory2); this.watcher.start(); File file1 = touch(new File(directory1, "test.txt")); File file2 = touch(new File(directory2, "test.txt")); this.watcher.stopAfter(1); Set<ChangedFiles> change = getSingleOnChange(); assertThat(change.size()).isEqualTo(2); for (ChangedFiles changedFiles : change) { if (changedFiles.getSourceDirectory().equals(directory1)) { ChangedFile file = new ChangedFile(directory1, file1, Type.ADD); assertThat(changedFiles.getFiles()).containsOnly(file); } else { ChangedFile file = new ChangedFile(directory2, file2, Type.ADD); assertThat(changedFiles.getFiles()).containsOnly(file); } } } @Test void multipleListeners() throws Exception { File directory = new File(this.tempDir, UUID.randomUUID().toString()); directory.mkdir(); final Set<ChangedFiles> listener2Changes = new LinkedHashSet<>(); this.watcher.addSourceDirectory(directory); this.watcher.addListener(listener2Changes::addAll); this.watcher.start(); File file = touch(new File(directory, "test.txt")); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); ChangedFile expected = new ChangedFile(directory, file, Type.ADD); assertThat(changedFiles.getFiles()).contains(expected); assertThat(listener2Changes).isEqualTo(this.changes.get(0)); } @Test void modifyDeleteAndAdd() throws Exception { File directory = new File(this.tempDir, UUID.randomUUID().toString()); directory.mkdir(); File modify = touch(new File(directory, "modify.txt")); File delete = touch(new File(directory, "delete.txt")); this.watcher.addSourceDirectory(directory); this.watcher.start(); FileCopyUtils.copy("abc".getBytes(), modify); delete.delete(); File add = touch(new File(directory, "add.txt")); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); Set<ChangedFile> actual = changedFiles.getFiles(); Set<ChangedFile> expected = new HashSet<>(); expected.add(new ChangedFile(directory, modify, Type.MODIFY)); expected.add(new ChangedFile(directory, delete, Type.DELETE)); expected.add(new ChangedFile(directory, add, Type.ADD)); assertThat(actual).isEqualTo(expected); } @Test void withTriggerFilter() throws Exception { File directory = new File(this.tempDir, UUID.randomUUID().toString()); directory.mkdir(); File file = touch(new File(directory, "file.txt")); File trigger = touch(new File(directory, "trigger.txt")); this.watcher.addSourceDirectory(directory); this.watcher.setTriggerFilter((candidate) -> candidate.getName().equals("trigger.txt")); this.watcher.start(); FileCopyUtils.copy("abc".getBytes(), file); Thread.sleep(100); assertThat(this.changes).isEmpty(); FileCopyUtils.copy("abc".getBytes(), trigger); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); Set<ChangedFile> actual = changedFiles.getFiles(); Set<ChangedFile> expected = new HashSet<>(); expected.add(new ChangedFile(directory, file, Type.MODIFY)); assertThat(actual).isEqualTo(expected); } @Test void withSnapshotRepository() throws Exception { SnapshotStateRepository repository = new TestSnapshotStateRepository(); setupWatcher(20, 10, repository); File directory = new File(this.tempDir, UUID.randomUUID().toString()); directory.mkdir(); File file = touch(new File(directory, "file.txt")); this.watcher.addSourceDirectory(directory); this.watcher.start(); file.delete(); this.watcher.stopAfter(1); this.changes.clear(); File recreate = touch(new File(directory, "file.txt")); setupWatcher(20, 10, repository); this.watcher.addSourceDirectory(directory); this.watcher.start(); this.watcher.stopAfter(1); ChangedFiles changedFiles = getSingleChangedFiles(); Set<ChangedFile> actual = changedFiles.getFiles(); Set<ChangedFile> expected = new HashSet<>(); expected.add(new ChangedFile(directory, recreate, Type.ADD)); assertThat(actual).isEqualTo(expected); } private void setupWatcher(long pollingInterval, long quietPeriod) { setupWatcher(pollingInterval, quietPeriod, null); } private void setupWatcher(long pollingInterval, long quietPeriod, SnapshotStateRepository snapshotStateRepository) { this.watcher = new FileSystemWatcher(false, Duration.ofMillis(pollingInterval), Duration.ofMillis(quietPeriod), snapshotStateRepository); this.watcher.addListener((changeSet) -> FileSystemWatcherTests.this.changes.add(changeSet)); } private File startWithNewDirectory() throws IOException { File directory = new File(this.tempDir, UUID.randomUUID().toString()); directory.mkdir(); this.watcher.addSourceDirectory(directory); this.watcher.start(); return directory; } private ChangedFiles getSingleChangedFiles() { Set<ChangedFiles> singleChange = getSingleOnChange(); assertThat(singleChange).hasSize(1); return singleChange.iterator().next(); } private Set<ChangedFiles> getSingleOnChange() { assertThat(this.changes).hasSize(1); return this.changes.get(0); } private File touch(File file) throws IOException { file.getParentFile().mkdirs(); FileOutputStream fileOutputStream = new FileOutputStream(file); fileOutputStream.close(); return file; } private static class TestSnapshotStateRepository implements SnapshotStateRepository { private Object state; @Override public void save(Object state) { this.state = state; } @Override public Object restore() { return this.state; } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.NavigableSet; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputController; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; @Category({CoprocessorTests.class, MediumTests.class}) public class TestRegionObserverScannerOpenHook { private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); static final Path DIR = UTIL.getDataTestDir(); public static class NoDataFilter extends FilterBase { @Override public ReturnCode filterKeyValue(Cell ignored) throws IOException { return ReturnCode.SKIP; } @Override public boolean filterAllRemaining() throws IOException { return true; } @Override public boolean filterRow() throws IOException { return true; } } /** * Do the same logic as the {@link BaseRegionObserver}. Needed since {@link BaseRegionObserver} is * an abstract class. */ public static class EmptyRegionObsever extends BaseRegionObserver { } /** * Don't return any data from a scan by creating a custom {@link StoreScanner}. */ public static class NoDataFromScan extends BaseRegionObserver { @Override public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s) throws IOException { scan.setFilter(new NoDataFilter()); return new StoreScanner(store, store.getScanInfo(), scan, targetCols, ((HStore)store).getHRegion().getReadpoint(IsolationLevel.READ_COMMITTED)); } } /** * Don't allow any data in a flush by creating a custom {@link StoreScanner}. */ public static class NoDataFromFlush extends BaseRegionObserver { @Override public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException { Scan scan = new Scan(); scan.setFilter(new NoDataFilter()); return new StoreScanner(store, store.getScanInfo(), scan, Collections.singletonList(memstoreScanner), ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); } } /** * Don't allow any data to be written out in the compaction by creating a custom * {@link StoreScanner}. */ public static class NoDataFromCompaction extends BaseRegionObserver { @Override public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs, InternalScanner s) throws IOException { Scan scan = new Scan(); scan.setFilter(new NoDataFilter()); return new StoreScanner(store, store.getScanInfo(), scan, scanners, ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP); } } Region initHRegion(byte[] tableName, String callingMethod, Configuration conf, byte[]... families) throws IOException { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); for (byte[] family : families) { htd.addFamily(new HColumnDescriptor(family)); } HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); Path path = new Path(DIR + callingMethod); WAL wal = HBaseTestingUtility.createWal(conf, path, info); HRegion r = HRegion.createHRegion(info, path, conf, htd, wal); // this following piece is a hack. currently a coprocessorHost // is secretly loaded at OpenRegionHandler. we don't really // start a region server here, so just manually create cphost // and set it to region. RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf); r.setCoprocessorHost(host); return r; } @Test public void testRegionObserverScanTimeStacking() throws Exception { byte[] ROW = Bytes.toBytes("testRow"); byte[] TABLE = Bytes.toBytes(getClass().getName()); byte[] A = Bytes.toBytes("A"); byte[][] FAMILIES = new byte[][] { A }; Configuration conf = HBaseConfiguration.create(); Region region = initHRegion(TABLE, getClass().getName(), conf, FAMILIES); RegionCoprocessorHost h = region.getCoprocessorHost(); h.load(NoDataFromScan.class, Coprocessor.PRIORITY_HIGHEST, conf); h.load(EmptyRegionObsever.class, Coprocessor.PRIORITY_USER, conf); Put put = new Put(ROW); put.add(A, A, A); region.put(put); Get get = new Get(ROW); Result r = region.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " + r, r.listCells()); HBaseTestingUtility.closeRegionAndWAL(region); } @Test public void testRegionObserverFlushTimeStacking() throws Exception { byte[] ROW = Bytes.toBytes("testRow"); byte[] TABLE = Bytes.toBytes(getClass().getName()); byte[] A = Bytes.toBytes("A"); byte[][] FAMILIES = new byte[][] { A }; Configuration conf = HBaseConfiguration.create(); Region region = initHRegion(TABLE, getClass().getName(), conf, FAMILIES); RegionCoprocessorHost h = region.getCoprocessorHost(); h.load(NoDataFromFlush.class, Coprocessor.PRIORITY_HIGHEST, conf); h.load(EmptyRegionObsever.class, Coprocessor.PRIORITY_USER, conf); // put a row and flush it to disk Put put = new Put(ROW); put.add(A, A, A); region.put(put); region.flush(true); Get get = new Get(ROW); Result r = region.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " + r, r.listCells()); HBaseTestingUtility.closeRegionAndWAL(region); } /* * Custom HRegion which uses CountDownLatch to signal the completion of compaction */ public static class CompactionCompletionNotifyingRegion extends HRegion { private static volatile CountDownLatch compactionStateChangeLatch = null; @SuppressWarnings("deprecation") public CompactionCompletionNotifyingRegion(Path tableDir, WAL log, FileSystem fs, Configuration confParam, HRegionInfo info, HTableDescriptor htd, RegionServerServices rsServices) { super(tableDir, log, fs, confParam, info, htd, rsServices); } public CountDownLatch getCompactionStateChangeLatch() { if (compactionStateChangeLatch == null) compactionStateChangeLatch = new CountDownLatch(1); return compactionStateChangeLatch; } @Override public boolean compact(CompactionContext compaction, Store store, CompactionThroughputController throughputController) throws IOException { boolean ret = super.compact(compaction, store, throughputController); if (ret) compactionStateChangeLatch.countDown(); return ret; } } /** * Unfortunately, the easiest way to test this is to spin up a mini-cluster since we want to do * the usual compaction mechanism on the region, rather than going through the backdoor to the * region */ @Test public void testRegionObserverCompactionTimeStacking() throws Exception { // setup a mini cluster so we can do a real compaction on a region Configuration conf = UTIL.getConfiguration(); conf.setClass(HConstants.REGION_IMPL, CompactionCompletionNotifyingRegion.class, HRegion.class); conf.setInt("hbase.hstore.compaction.min", 2); UTIL.startMiniCluster(); String tableName = "testRegionObserverCompactionTimeStacking"; byte[] ROW = Bytes.toBytes("testRow"); byte[] A = Bytes.toBytes("A"); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); desc.addFamily(new HColumnDescriptor(A)); desc.addCoprocessor(EmptyRegionObsever.class.getName(), null, Coprocessor.PRIORITY_USER, null); desc.addCoprocessor(NoDataFromCompaction.class.getName(), null, Coprocessor.PRIORITY_HIGHEST, null); Admin admin = UTIL.getHBaseAdmin(); admin.createTable(desc); Table table = UTIL.getConnection().getTable(desc.getTableName()); // put a row and flush it to disk Put put = new Put(ROW); put.add(A, A, A); table.put(put); HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getTableName()); List<Region> regions = rs.getOnlineRegions(desc.getTableName()); assertEquals("More than 1 region serving test table with 1 row", 1, regions.size()); Region region = regions.get(0); admin.flushRegion(region.getRegionInfo().getRegionName()); CountDownLatch latch = ((CompactionCompletionNotifyingRegion)region) .getCompactionStateChangeLatch(); // put another row and flush that too put = new Put(Bytes.toBytes("anotherrow")); put.add(A, A, A); table.put(put); admin.flushRegion(region.getRegionInfo().getRegionName()); // run a compaction, which normally would should get rid of the data // wait for the compaction checker to complete latch.await(); // check both rows to ensure that they aren't there Get get = new Get(ROW); Result r = table.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: " + r, r.listCells()); get = new Get(Bytes.toBytes("anotherrow")); r = table.get(get); assertNull( "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor Found: " + r, r.listCells()); table.close(); UTIL.shutdownMiniCluster(); } }
/* * $Header: /home/cvs/jakarta-tomcat-4.0/webapps/admin/WEB-INF/classes/org/apache/webapp/admin/resources/SetUpDataSourceAction.java,v 1.10 2002/08/16 20:17:07 amyroh Exp $ * $Revision: 1.10 $ * $Date: 2002/08/16 20:17:07 $ * * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-DataSource documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Struts", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.webapp.admin.resources; import java.io.IOException; import java.net.URLDecoder; import java.util.Iterator; import java.util.Locale; import javax.management.Attribute; import javax.management.MBeanServer; import javax.management.MBeanServerFactory; import javax.management.QueryExp; import javax.management.Query; import javax.management.ObjectInstance; import javax.management.ObjectName; import javax.management.JMException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanOperationInfo; import javax.management.MBeanInfo; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.struts.action.Action; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.util.MessageResources; import org.apache.webapp.admin.ApplicationServlet; /** * <p>Implementation of <strong>Action</strong> that sets up and stashes * a <code>DataSourceForm</code> bean in request scope. The form bean will have * a null <code>objectName</code> property if this form represents a DataSource * being added, or a non-null value for an existing DataSource.</p> * * @author Manveen Kaur * @version $Revision: 1.10 $ $Date: 2002/08/16 20:17:07 $ * @since 4.1 */ public final class SetUpDataSourceAction extends Action { // ----------------------------------------------------- Instance Variables /** * The MBeanServer we will be interacting with. */ private MBeanServer mserver = null; /** * The MessageResources we will be retrieving messages from. */ private MessageResources resources = null; // --------------------------------------------------------- Public Methods /** * Process the specified HTTP request, and create the corresponding HTTP * response (or forward to another web component that will create it). * Return an <code>ActionForward</code> instance describing where and how * control should be forwarded, or <code>null</code> if the response has * already been completed. * * @param mapping The ActionMapping used to select this instance * @param actionForm The optional ActionForm bean for this request (if any) * @param request The HTTP request we are processing * @param response The HTTP response we are creating * * @exception IOException if an input/output error occurs * @exception ServletException if a servlet exception occurs */ public ActionForward perform(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { // Look up the components we will be using as needed if (mserver == null) { mserver = ((ApplicationServlet) getServlet()).getServer(); } if (resources == null) { resources = getServlet().getResources(); } HttpSession session = request.getSession(); Locale locale = (Locale) session.getAttribute(Action.LOCALE_KEY); // Set up the form bean based on the creating or editing state String objectName = request.getParameter("objectName"); String resourcetype = request.getParameter("resourcetype"); String path = request.getParameter("path"); String host = request.getParameter("host"); String service = request.getParameter("service"); DataSourceForm dataSourceForm = new DataSourceForm(); dataSourceForm.setResourcetype(resourcetype); dataSourceForm.setPath(path); dataSourceForm.setHost(host); dataSourceForm.setService(service); dataSourceForm.setType(ResourceUtils.DATASOURCE_CLASS); if (objectName == null) { dataSourceForm.setNodeLabel (resources.getMessage(locale, "resources.actions.datasrc.create")); dataSourceForm.setObjectName(null); dataSourceForm.setActive("4"); dataSourceForm.setIdle("2"); dataSourceForm.setWait("5000"); dataSourceForm.setType(ResourceUtils.DATASOURCE_CLASS); } else { dataSourceForm.setNodeLabel (resources.getMessage(locale, "resources.actions.datasrc.edit")); dataSourceForm.setObjectName(objectName); String attribute = null; try { ObjectName oname = new ObjectName(objectName); attribute = "name"; dataSourceForm.setJndiName ((String) mserver.getAttribute(oname, attribute)); attribute = "url"; dataSourceForm.setUrl ((String) mserver.getAttribute(oname, attribute)); attribute = "driverClassName"; dataSourceForm.setDriverClass ((String) mserver.getAttribute(oname, attribute)); attribute = "username"; dataSourceForm.setUsername ((String) mserver.getAttribute(oname, attribute)); attribute = "password"; dataSourceForm.setPassword ((String) mserver.getAttribute(oname, attribute)); try { attribute = "maxActive"; dataSourceForm.setActive ((String) mserver.getAttribute(oname, attribute)); } catch (Exception e) { // if maxActive not defined, display default value dataSourceForm.setActive("4"); } try { attribute = "maxIdle"; dataSourceForm.setIdle ((String) mserver.getAttribute(oname, attribute)); } catch (Exception e) { // if maxIdle not defined, display default value dataSourceForm.setIdle("2"); } try { attribute = "maxWait"; dataSourceForm.setWait ((String) mserver.getAttribute(oname, attribute)); } catch (Exception e) { // if maxWait not defined, display default value dataSourceForm.setWait("5000"); } try { attribute = "validationQuery"; dataSourceForm.setQuery ((String) mserver.getAttribute(oname, attribute)); } catch (Exception e) { // don't display anything } } catch (Exception e) { getServlet().log (resources.getMessage(locale, "users.error.attribute.get", attribute), e); response.sendError (HttpServletResponse.SC_INTERNAL_SERVER_ERROR, resources.getMessage (locale, "users.error.attribute.get", attribute)); return (null); } } // Stash the form bean and forward to the display page saveToken(request); request.setAttribute("dataSourceForm", dataSourceForm); return (mapping.findForward("DataSource")); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.integrationtests; import org.drools.compiler.Address; import org.drools.compiler.CommonTestMethodBase; import org.drools.compiler.Person; import org.junit.Test; import org.kie.api.KieBase; import org.kie.api.runtime.KieSession; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class NullSafeDereferencingTest extends CommonTestMethodBase { @Test public void testNullSafeBinding() { String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " Person( $streetName : address!.street ) \n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); assertEquals(2, ksession.fireAllRules()); ksession.dispose(); } @Test public void testNullSafeNullComparison() { String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " Person( address!.street == null ) \n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); assertEquals(1, ksession.fireAllRules()); ksession.dispose(); } @Test public void testNullSafeNullComparison2() { String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " $street : String()\n"+ " Person( address!.street == $street ) \n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); ksession.insert("Main Street"); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); assertEquals(1, ksession.fireAllRules()); ksession.dispose(); } @Test public void testNullSafeNullComparisonReverse() { // DROOLS-82 String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " Person( \"Main Street\".equalsIgnoreCase(address!.street) )\n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); assertEquals(1, ksession.fireAllRules()); ksession.dispose(); } @Test public void testNullSafeNullComparisonReverseComplex() { // DROOLS-82 String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " Person( \"Main\".equalsIgnoreCase(address!.street!.substring(0, address!.street!.indexOf(' '))) )\n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); assertEquals(1, ksession.fireAllRules()); ksession.dispose(); } @Test public void testDoubleNullSafe() { String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " Person( address!.street!.length > 15 ) \n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); Person alex = new Person("Alex", 34); alex.setAddress(new Address("The Main Very Big Street")); ksession.insert(alex); assertEquals(1, ksession.fireAllRules()); ksession.dispose(); } @Test public void testMixedNullSafes() { String str = "import org.drools.compiler.*;\n" + "rule R1 when\n" + " $p : Person( " + " address!.street!.length > 0 && ( address!.street!.length < 15 || > 20 && < 30 ) " + " && address!.zipCode!.length > 0 && address.zipCode == \"12345\" " + " ) \n" + "then\n" + " System.out.println( $p ); \n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street",null,"12345")); ksession.insert(mark); Person edson = new Person("Edson", 34); edson.setAddress(new Address(null)); ksession.insert(edson); Person alex = new Person("Alex", 34); alex.setAddress(new Address("The Main Verrry Long Street")); ksession.insert(alex); Person frank = new Person("Frank", 24); frank.setAddress(new Address("Long Street number 21",null,"12345")); ksession.insert(frank); assertEquals(2, ksession.fireAllRules()); ksession.dispose(); } @Test public void testNullSafeMemberOf() { // DROOLS-50 String str = "declare A\n" + " list : java.util.List\n" + "end\n" + "\n" + "rule Init when\n" + "then\n" + " insert( new A( java.util.Arrays.asList( \"test\" ) ) );" + " insert( \"test\" );" + "end\n" + "rule R when\n" + " $a : A()\n" + " $s : String( this memberOf $a!.list )\n" + "then\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); assertEquals(2, ksession.fireAllRules()); ksession.dispose(); } @Test public void testNullSafeInnerConstraint() { String str = "declare Content\n" + " complexContent : Content\n" + " extension : Content\n" + "end\n" + "\n" + "declare Context\n" + " ctx : Content\n" + "end\n" + "\n" + "rule \"Complex Type Attribute\"\n" + "when\n" + " $con : Content()\n" + " Context( ctx == $con || == $con!.complexContent!.extension )\n" + "then\n" + " System.out.println( $con ); \n" + "end\n" + "\n" + "rule \"Init\"\n" + "when\n" + "then\n" + " Content ext = new Content();\n" + " Content complex = new Content( new Content( null, ext ), null );\n" + " Content complex2 = new Content( null, null );\n" + " Context ctx = new Context( ext );\n" + " Context ctx2 = new Context( complex2 );\n" + " insert( complex );\n" + " insert( complex2 );\n" + " insert( ctx );\n" + " insert( ctx2 );\n" + "end"; KieBase kbase = loadKnowledgeBaseFromString(str); KieSession ksession = kbase.newKieSession(); assertEquals( 3, ksession.fireAllRules() ); ksession.dispose(); } @Test public void testNullSafeNestedAccessors() { String str = "package org.drools.test; " + "import " + Person.class.getName() + "; " + "global java.util.List list; " + "rule R1 when " + " $street : String() "+ " Person( address!.( street == $street, $zip : zipCode ) ) " + "then " + " list.add( $zip ); " + "end"; KieBase kbase = loadKnowledgeBaseFromString( str ); KieSession ksession = kbase.newKieSession(); List list = new ArrayList(); ksession.setGlobal( "list", list ); ksession.insert(new Person("Mario", 38)); Person mark = new Person("Mark", 37); mark.setAddress(new Address("Main Street", "", "123456")); ksession.insert(mark); ksession.insert("Main Street"); assertEquals( 1, ksession.fireAllRules() ); ksession.dispose(); assertEquals( Arrays.asList( "123456" ), list ); } }
/* * Copyright 2002-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.servlet.function; import java.io.IOException; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import org.junit.jupiter.api.Test; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.lang.Nullable; import org.springframework.web.servlet.handler.PathPatternsTestUtils; import org.springframework.web.testfixture.servlet.MockHttpServletRequest; import static java.util.Collections.emptyList; import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.web.servlet.function.RequestPredicates.HEAD; /** * Unit tests for {@link RouterFunctionBuilder}. * * @author Arjen Poutsma */ class RouterFunctionBuilderTests { @Test void route() { RouterFunction<ServerResponse> route = RouterFunctions.route() .GET("/foo", request -> ServerResponse.ok().build()) .POST("/", RequestPredicates.contentType(MediaType.TEXT_PLAIN), request -> ServerResponse.noContent().build()) .route(HEAD("/foo"), request -> ServerResponse.accepted().build()) .build(); ServerRequest getFooRequest = initRequest("GET", "/foo"); Optional<HttpStatus> responseStatus = route.route(getFooRequest) .map(handlerFunction -> handle(handlerFunction, getFooRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.OK); ServerRequest headFooRequest = initRequest("HEAD", "/foo"); responseStatus = route.route(headFooRequest) .map(handlerFunction -> handle(handlerFunction, getFooRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.ACCEPTED); ServerRequest barRequest = initRequest("POST", "/", req -> req.setContentType("text/plain")); responseStatus = route.route(barRequest) .map(handlerFunction -> handle(handlerFunction, barRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.NO_CONTENT); ServerRequest invalidRequest = initRequest("POST", "/"); responseStatus = route.route(invalidRequest) .map(handlerFunction -> handle(handlerFunction, invalidRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).isEmpty(); } private static ServerResponse handle(HandlerFunction<ServerResponse> handlerFunction, ServerRequest request) { try { return handlerFunction.handle(request); } catch (Exception ex) { throw new AssertionError(ex.getMessage(), ex); } } @Test void resources() { Resource resource = new ClassPathResource("/org/springframework/web/servlet/function/"); assertThat(resource.exists()).isTrue(); RouterFunction<ServerResponse> route = RouterFunctions.route() .resources("/resources/**", resource) .build(); ServerRequest resourceRequest = initRequest("GET", "/resources/response.txt"); Optional<HttpStatus> responseStatus = route.route(resourceRequest) .map(handlerFunction -> handle(handlerFunction, resourceRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.OK); ServerRequest invalidRequest = initRequest("POST", "/resources/foo.txt"); responseStatus = route.route(invalidRequest) .map(handlerFunction -> handle(handlerFunction, invalidRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).isEmpty(); } @Test void nest() { RouterFunction<ServerResponse> route = RouterFunctions.route() .path("/foo", builder -> builder.path("/bar", () -> RouterFunctions.route() .GET("/baz", request -> ServerResponse.ok().build()) .build())) .build(); ServerRequest fooRequest = initRequest("GET", "/foo/bar/baz"); Optional<HttpStatus> responseStatus = route.route(fooRequest) .map(handlerFunction -> handle(handlerFunction, fooRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.OK); } @Test void filters() { AtomicInteger filterCount = new AtomicInteger(); RouterFunction<ServerResponse> route = RouterFunctions.route() .GET("/foo", request -> ServerResponse.ok().build()) .GET("/bar", request -> { throw new IllegalStateException(); }) .before(request -> { int count = filterCount.getAndIncrement(); assertThat(count).isEqualTo(0); return request; }) .after((request, response) -> { int count = filterCount.getAndIncrement(); assertThat(count).isEqualTo(3); return response; }) .filter((request, next) -> { int count = filterCount.getAndIncrement(); assertThat(count).isEqualTo(1); ServerResponse responseMono = next.handle(request); count = filterCount.getAndIncrement(); assertThat(count).isEqualTo(2); return responseMono; }) .onError(IllegalStateException.class, (e, request) -> ServerResponse.status(HttpStatus.INTERNAL_SERVER_ERROR) .build()) .build(); ServerRequest fooRequest = initRequest("GET", "/foo"); route.route(fooRequest) .map(handlerFunction -> handle(handlerFunction, fooRequest)); assertThat(filterCount.get()).isEqualTo(4); filterCount.set(0); ServerRequest barRequest = initRequest("GET", "/bar"); Optional<HttpStatus> responseStatus = route.route(barRequest) .map(handlerFunction -> handle(handlerFunction, barRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.INTERNAL_SERVER_ERROR); } @Test public void multipleOnErrors() { RouterFunction<ServerResponse> route = RouterFunctions.route() .GET("/error", request -> { throw new IOException(); }) .onError(IOException.class, (t, r) -> ServerResponse.status(200).build()) .onError(Exception.class, (t, r) -> ServerResponse.status(201).build()) .build(); MockHttpServletRequest servletRequest = new MockHttpServletRequest("GET", "/error"); ServerRequest serverRequest = new DefaultServerRequest(servletRequest, emptyList()); Optional<HttpStatus> responseStatus = route.route(serverRequest) .map(handlerFunction -> handle(handlerFunction, serverRequest)) .map(ServerResponse::statusCode); assertThat(responseStatus).contains(HttpStatus.OK); } private ServerRequest initRequest(String httpMethod, String requestUri) { return initRequest(httpMethod, requestUri, null); } private ServerRequest initRequest( String httpMethod, String requestUri, @Nullable Consumer<MockHttpServletRequest> consumer) { return new DefaultServerRequest( PathPatternsTestUtils.initRequest(httpMethod, null, requestUri, true, consumer), emptyList()); } @Test public void attributes() { RouterFunction<ServerResponse> route = RouterFunctions.route() .GET("/atts/1", request -> ServerResponse.ok().build()) .withAttribute("foo", "bar") .withAttribute("baz", "qux") .GET("/atts/2", request -> ServerResponse.ok().build()) .withAttributes(atts -> { atts.put("foo", "bar"); atts.put("baz", "qux"); }) .build(); AttributesTestVisitor visitor = new AttributesTestVisitor(); route.accept(visitor); assertThat(visitor.visitCount()).isEqualTo(2); } }
/* * $Id: PlotLayerHints.java,v 1.1 2004/12/27 16:15:22 luca Exp $ * * This software is provided by NOAA for full, free and open release. It is * understood by the recipient/user that NOAA assumes no liability for any * errors contained in the code. Although this software is released without * conditions or restrictions in its use, it is expected that appropriate * credit be given to its author and to the National Oceanic and Atmospheric * Administration should the software be included by the recipient as an * element in other product development. */ package gov.noaa.pmel.sgt.plot; import java.util.Map; import java.util.Set; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; /** * @since 2.x */ public class PlotLayerHints implements Map, Cloneable { HashMap hintmap = new HashMap(5); /** * PlotKey Type hint key */ public static final String KEY_PLOTKEY_TYPE = "PlotKeyType"; /** * PlotKey Type hint values -- on layer */ public static final String VALUE_PLOTKEY_ON_LAYER = "OnLayer"; /** * PlotKey Type hint values -- on seperate layer */ public static final String VALUE_PLOTKEY_ON_SEPERATE_LAYER = "OnSeperateLayer"; /** * PlotKey Type hint values -- in pop-up window */ public static final String VALUE_PLOTKEY_IN_POPUP = "InPopUp"; /** * PlotKey Type hint values -- in JTable */ public static final String VALUE_PLOTKEY_IN_TABLE = "InTable"; /** * PlotKey Type hint values -- no key */ public static final String VALUE_PLOTKEY_NONE = "None"; /** * PlotKey location hint key */ public static final String KEY_PLOTKEY_LOCATION = "PlotKeyLocation"; /** * Layer Placement hint key */ public static final String KEY_LAYER_PLACEMENT = "LayerPlacement"; /** * Layer Placement hint values -- overlay */ public static final String VALUE_LAYER_PLACEMENT_OVERLAY = "Overlay"; /** * AspectRatio hint key */ public static final String KEY_ASPECT_RATIO = "AspectRatio"; /** * AspecRatio hint values -- lock X and Y scales */ public static final String VALUE_ASPECT_RATIO_LOCK = "Lock"; /** * AspecRatio hint values -- dont lock X and Y scales (during resize) */ public static final String VALUE_ASPECT_RATIO_NO_LOCK = "NoLock"; /** * Axis location should actually go through a series of steps * * X Axis Location * 1) bottom of plot region * 2) top of plot region * 3) below bottom axis (increase border if needed) * 4) above top axis (increase border if needed) * * Y Axis Locations * 1) left of plot region * 2) right of plot region * 3) outside left axis (increase border if needed) * 4) outside right axis (increase border if needed) * * * X Axis Location hint key */ public static final String KEY_X_AXIS_LOCATION = "XAxisLocation"; /** * X Axis Location hint values -- default * First try bottom, top, below bottom, then above top */ public static final String VALUE_X_AXIS_LOCATION_DEFAULT = "Default"; /** * X Axis Location hint values -- bottom */ public static final String VALUE_X_AXIS_LOCATION_BOTTOM = "Bottom"; /** * X Axis Location hint values -- top */ public static final String VALUE_X_AXIS_LOCATION_TOP = "Top"; /** * Y Axis Location hint key */ public static final String KEY_Y_AXIS_LOCATION = "YAxis Location"; /** * Y Axis Location hint values -- default * First try left, right, outside right, then outside left */ public static final String VALUE_Y_AXIS_LOCATION_DEFAULT = "Default"; /** * Y Axis Location hint values -- left */ public static final String VALUE_Y_AXIS_LOCATION_LEFT = "Left"; /** * Y Axis Location hint values -- right */ public static final String VALUE_Y_AXIS_LOCATION_RIGHT = "Right"; /** * Decision to create a new transform or re-use an existing * transform should follow the following steps * * 1) Use transform from same LayerStack * 2) Use transform from same JPlotPane * 3) Create a new transform * * to use existing transform * 1) both must be space or both time (test cant be defeated) * 2) must have units that are convertable to existing transform * 3) must have identical units * * * X Transform hint key */ public static final String KEY_X_TRANSFORM = "XTransform"; /** * X Transform hint values -- default * First try LayerStack, JPlotPane, then create new transform */ public static final String VALUE_X_TRANSFORM_DEFAULT = "Default"; /** * X Transform hint values -- new */ public static final String VALUE_X_TRANSFORM_NEW = "New"; /** * X Transform hint values -- use JPlotPane */ public static final String VALUE_X_TRANSFORM_USEPLOTPANE = "UsePlotPane"; /** * Y Transform hint key */ public static final String KEY_Y_TRANSFORM = "YTransform"; /** * Y Transform hint values -- default * First try LayerStack, JPlotPane, then create new transform */ public static final String VALUE_Y_TRANSFORM_DEFAULT = "Default"; /** * Y Transform hint values -- new */ public static final String VALUE_Y_TRANSFORM_NEW = "New"; /** * Y Transform hint values -- use JPlotPane */ public static final String VALUE_Y_TRANSFORM_USEPLOTPANE = "UsePlotPane"; public PlotLayerHints(Map init) { if(init != null) { hintmap.putAll(init); } } public PlotLayerHints(String key, String value) { hintmap.put(key, value); } public int size() { return hintmap.size(); } public boolean isEmpty() { return hintmap.isEmpty(); } public boolean containsKey(Object key) { return hintmap.containsKey((String)key); } public boolean containsValue(Object value) { return hintmap.containsValue((String)value); } public Object get(Object key) { return hintmap.get((String)key); } public Object put(Object key, Object value) { return hintmap.put((String) key, (String)value); } public void add(PlotLayerHints hints) { hintmap.putAll(hints.hintmap); } public void clear() { hintmap.clear(); } public Object remove(Object key) { return hintmap.remove((String)key); } public void putAll(Map m) { if(m instanceof PlotLayerHints) { hintmap.putAll(((PlotLayerHints)m).hintmap); } else { // Funnel each key/value pair though our method Iterator iter = m.entrySet().iterator(); while(iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); put(entry.getKey(), entry.getValue()); } } } public Set keySet() { return hintmap.keySet(); } public Collection values() { return hintmap.values(); } public Set entrySet() { return Collections.unmodifiableMap(hintmap).entrySet(); } public boolean equals(Object o) { if(o instanceof PlotLayerHints) { return hintmap.equals(((PlotLayerHints)o).hintmap); } else if(o instanceof Map) { return hintmap.equals(o); } return false; } public int hashCode() { return hintmap.hashCode(); } public Object clone() { PlotLayerHints plh; try { plh = (PlotLayerHints) super.clone(); if(hintmap != null) { plh.hintmap = (HashMap) hintmap.clone(); } } catch (CloneNotSupportedException e) { // this shouldnt happend since we are Cloneable throw new InternalError(); } return plh; } public String toString() { if(hintmap == null) { return getClass().getName() + "@" + Integer.toHexString(hashCode()) + " (0 hints)"; } return hintmap.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.union; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.apache.calcite.util.Pair; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.expression.ErrorCollector; import org.apache.drill.common.expression.ErrorCollectorImpl; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.Types; import org.apache.drill.exec.exception.ClassTransformationException; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.exception.SchemaChangeException; import org.apache.drill.exec.expr.ClassGenerator; import org.apache.drill.exec.expr.CodeGenerator; import org.apache.drill.exec.expr.ExpressionTreeMaterializer; import org.apache.drill.exec.expr.ValueVectorWriteExpression; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.physical.config.UnionAll; import org.apache.drill.exec.record.AbstractBinaryRecordBatch; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.exec.record.TransferPair; import org.apache.drill.exec.record.TypedFieldId; import org.apache.drill.exec.record.VectorAccessibleUtilities; import org.apache.drill.exec.record.VectorWrapper; import org.apache.drill.exec.resolver.TypeCastRules; import org.apache.drill.exec.util.VectorUtil; import org.apache.drill.exec.vector.FixedWidthVector; import org.apache.drill.exec.vector.SchemaChangeCallBack; import org.apache.drill.exec.vector.ValueVector; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Stack; public class UnionAllRecordBatch extends AbstractBinaryRecordBatch<UnionAll> { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UnionAllRecordBatch.class); private SchemaChangeCallBack callBack = new SchemaChangeCallBack(); private UnionAller unionall; private final List<TransferPair> transfers = Lists.newArrayList(); private List<ValueVector> allocationVectors = Lists.newArrayList(); private int recordCount = 0; private UnionInputIterator unionInputIterator; public UnionAllRecordBatch(UnionAll config, List<RecordBatch> children, FragmentContext context) throws OutOfMemoryException { super(config, context, true, children.get(0), children.get(1)); } @Override protected void killIncoming(boolean sendUpstream) { left.kill(sendUpstream); right.kill(sendUpstream); } protected void buildSchema() throws SchemaChangeException { if (! prefetchFirstBatchFromBothSides()) { return; } unionInputIterator = new UnionInputIterator(leftUpstream, left, rightUpstream, right); if (leftUpstream == IterOutcome.NONE && rightUpstream == IterOutcome.OK_NEW_SCHEMA) { inferOutputFieldsOneSide(right.getSchema()); } else if (rightUpstream == IterOutcome.NONE && leftUpstream == IterOutcome.OK_NEW_SCHEMA) { inferOutputFieldsOneSide((left.getSchema())); } else if (leftUpstream == IterOutcome.OK_NEW_SCHEMA && rightUpstream == IterOutcome.OK_NEW_SCHEMA) { inferOutputFieldsBothSide(left.getSchema(), right.getSchema()); } container.buildSchema(BatchSchema.SelectionVectorMode.NONE); VectorAccessibleUtilities.allocateVectors(container, 0); VectorAccessibleUtilities.setValueCount(container,0); } @Override public IterOutcome innerNext() { try { while (true) { if (!unionInputIterator.hasNext()) { return IterOutcome.NONE; } Pair<IterOutcome, RecordBatch> nextBatch = unionInputIterator.next(); IterOutcome upstream = nextBatch.left; RecordBatch incoming = nextBatch.right; switch (upstream) { case NONE: case OUT_OF_MEMORY: case STOP: return upstream; case OK_NEW_SCHEMA: return doWork(nextBatch.right, true); case OK: // skip batches with same schema as the previous one yet having 0 row. if (incoming.getRecordCount() == 0) { VectorAccessibleUtilities.clear(incoming); continue; } return doWork(nextBatch.right, false); default: throw new IllegalStateException(String.format("Unknown state %s.", upstream)); } } } catch (ClassTransformationException | IOException | SchemaChangeException ex) { context.fail(ex); killIncoming(false); return IterOutcome.STOP; } } @Override public int getRecordCount() { return recordCount; } @SuppressWarnings("resource") private IterOutcome doWork(RecordBatch inputBatch, boolean newSchema) throws ClassTransformationException, IOException, SchemaChangeException { Preconditions.checkArgument(inputBatch.getSchema().getFieldCount() == container.getSchema().getFieldCount(), "Input batch and output batch have different field counthas!"); if (newSchema) { createUnionAller(inputBatch); } container.zeroVectors(); VectorUtil.allocateVectors(allocationVectors, inputBatch.getRecordCount()); recordCount = unionall.unionRecords(0, inputBatch.getRecordCount(), 0); VectorUtil.setValueCount(allocationVectors, recordCount); if (callBack.getSchemaChangedAndReset()) { return IterOutcome.OK_NEW_SCHEMA; } else { return IterOutcome.OK; } } private void createUnionAller(RecordBatch inputBatch) throws ClassTransformationException, IOException, SchemaChangeException { transfers.clear(); allocationVectors.clear(); final ClassGenerator<UnionAller> cg = CodeGenerator.getRoot(UnionAller.TEMPLATE_DEFINITION, context.getOptions()); cg.getCodeGenerator().plainJavaCapable(true); // Uncomment out this line to debug the generated code. // cg.getCodeGenerator().saveCodeForDebugging(true); int index = 0; for(VectorWrapper<?> vw : inputBatch) { ValueVector vvIn = vw.getValueVector(); ValueVector vvOut = container.getValueVector(index).getValueVector(); final ErrorCollector collector = new ErrorCollectorImpl(); // According to input data names, Minortypes, Datamodes, choose to // transfer directly, // rename columns or // cast data types (Minortype or DataMode) if (container.getSchema().getColumn(index).hasSameTypeAndMode(vvIn.getField()) && vvIn.getField().getType().getMinorType() != TypeProtos.MinorType.MAP // Per DRILL-5521, existing bug for map transfer ) { // Transfer column TransferPair tp = vvIn.makeTransferPair(vvOut); transfers.add(tp); } else if (vvIn.getField().getType().getMinorType() == TypeProtos.MinorType.NULL) { continue; } else { // Copy data in order to rename the column SchemaPath inputPath = SchemaPath.getSimplePath(vvIn.getField().getName()); MaterializedField inField = vvIn.getField(); MaterializedField outputField = vvOut.getField(); LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, inputBatch, collector, context.getFunctionRegistry()); if (collector.hasErrors()) { throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } // If the inputs' DataMode is required and the outputs' DataMode is not required // cast to the one with the least restriction if(inField.getType().getMode() == TypeProtos.DataMode.REQUIRED && outputField.getType().getMode() != TypeProtos.DataMode.REQUIRED) { expr = ExpressionTreeMaterializer.convertToNullableType(expr, inField.getType().getMinorType(), context.getFunctionRegistry(), collector); if (collector.hasErrors()) { throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } } // If two inputs' MinorTypes are different, // Insert a cast before the Union operation if(inField.getType().getMinorType() != outputField.getType().getMinorType()) { expr = ExpressionTreeMaterializer.addCastExpression(expr, outputField.getType(), context.getFunctionRegistry(), collector); if (collector.hasErrors()) { throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString())); } } TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getName())); boolean useSetSafe = !(vvOut instanceof FixedWidthVector); ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe); cg.addExpr(write); allocationVectors.add(vvOut); } ++index; } unionall = context.getImplementationClass(cg.getCodeGenerator()); unionall.setup(context, inputBatch, this, transfers); } // The output table's column names always follow the left table, // where the output type is chosen based on DRILL's implicit casting rules private void inferOutputFieldsBothSide(final BatchSchema leftSchema, final BatchSchema rightSchema) { // outputFields = Lists.newArrayList(); final Iterator<MaterializedField> leftIter = leftSchema.iterator(); final Iterator<MaterializedField> rightIter = rightSchema.iterator(); int index = 1; while (leftIter.hasNext() && rightIter.hasNext()) { MaterializedField leftField = leftIter.next(); MaterializedField rightField = rightIter.next(); if (leftField.hasSameTypeAndMode(rightField)) { TypeProtos.MajorType.Builder builder = TypeProtos.MajorType.newBuilder().setMinorType(leftField.getType().getMinorType()).setMode(leftField.getDataMode()); builder = Types.calculateTypePrecisionAndScale(leftField.getType(), rightField.getType(), builder); container.addOrGet(MaterializedField.create(leftField.getName(), builder.build()), callBack); } else if (Types.isUntypedNull(rightField.getType())) { container.addOrGet(leftField, callBack); } else if (Types.isUntypedNull(leftField.getType())) { container.addOrGet(MaterializedField.create(leftField.getName(), rightField.getType()), callBack); } else { // If the output type is not the same, // cast the column of one of the table to a data type which is the Least Restrictive TypeProtos.MajorType.Builder builder = TypeProtos.MajorType.newBuilder(); if (leftField.getType().getMinorType() == rightField.getType().getMinorType()) { builder.setMinorType(leftField.getType().getMinorType()); builder = Types.calculateTypePrecisionAndScale(leftField.getType(), rightField.getType(), builder); } else { List<TypeProtos.MinorType> types = Lists.newLinkedList(); types.add(leftField.getType().getMinorType()); types.add(rightField.getType().getMinorType()); TypeProtos.MinorType outputMinorType = TypeCastRules.getLeastRestrictiveType(types); if (outputMinorType == null) { throw new DrillRuntimeException("Type mismatch between " + leftField.getType().getMinorType().toString() + " on the left side and " + rightField.getType().getMinorType().toString() + " on the right side in column " + index + " of UNION ALL"); } builder.setMinorType(outputMinorType); } // The output data mode should be as flexible as the more flexible one from the two input tables List<TypeProtos.DataMode> dataModes = Lists.newLinkedList(); dataModes.add(leftField.getType().getMode()); dataModes.add(rightField.getType().getMode()); builder.setMode(TypeCastRules.getLeastRestrictiveDataMode(dataModes)); container.addOrGet(MaterializedField.create(leftField.getName(), builder.build()), callBack); } ++index; } assert !leftIter.hasNext() && ! rightIter.hasNext() : "Mis-match of column count should have been detected when validating sqlNode at planning"; } private void inferOutputFieldsOneSide(final BatchSchema schema) { for (MaterializedField field : schema) { container.addOrGet(field, callBack); } } private static boolean hasSameTypeAndMode(MaterializedField leftField, MaterializedField rightField) { return (leftField.getType().getMinorType() == rightField.getType().getMinorType()) && (leftField.getType().getMode() == rightField.getType().getMode()); } private class BatchStatusWrappper { boolean prefetched; final RecordBatch batch; final int inputIndex; final IterOutcome outcome; BatchStatusWrappper(boolean prefetched, IterOutcome outcome, RecordBatch batch, int inputIndex) { this.prefetched = prefetched; this.outcome = outcome; this.batch = batch; this.inputIndex = inputIndex; } } private class UnionInputIterator implements Iterator<Pair<IterOutcome, RecordBatch>> { private Stack<BatchStatusWrappper> batchStatusStack = new Stack<>(); UnionInputIterator(IterOutcome leftOutCome, RecordBatch left, IterOutcome rightOutCome, RecordBatch right) { if (rightOutCome == IterOutcome.OK_NEW_SCHEMA) { batchStatusStack.push(new BatchStatusWrappper(true, IterOutcome.OK_NEW_SCHEMA, right, 1)); } if (leftOutCome == IterOutcome.OK_NEW_SCHEMA) { batchStatusStack.push(new BatchStatusWrappper(true, IterOutcome.OK_NEW_SCHEMA, left, 0)); } } @Override public boolean hasNext() { return ! batchStatusStack.isEmpty(); } @Override public Pair<IterOutcome, RecordBatch> next() { while (!batchStatusStack.isEmpty()) { BatchStatusWrappper topStatus = batchStatusStack.peek(); if (topStatus.prefetched) { topStatus.prefetched = false; return Pair.of(topStatus.outcome, topStatus.batch); } else { IterOutcome outcome = UnionAllRecordBatch.this.next(topStatus.inputIndex, topStatus.batch); switch (outcome) { case OK: case OK_NEW_SCHEMA: return Pair.of(outcome, topStatus.batch); case OUT_OF_MEMORY: case STOP: batchStatusStack.pop(); return Pair.of(outcome, topStatus.batch); case NONE: batchStatusStack.pop(); if (batchStatusStack.isEmpty()) { return Pair.of(IterOutcome.NONE, null); } break; default: throw new IllegalStateException(String.format("Unexpected state %s", outcome)); } } } throw new NoSuchElementException(); } @Override public void remove() { throw new UnsupportedOperationException(); } } }
package com.github.davidmoten.rtree; import static com.github.davidmoten.rtree.RTreeTest.e; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Test; import org.mockito.Mockito; import rx.Subscriber; import rx.Subscription; import rx.functions.Func1; import com.github.davidmoten.rtree.geometry.Geometry; import com.github.davidmoten.rtree.geometry.Rectangle; import com.github.davidmoten.util.ImmutableStack; import com.github.davidmoten.util.TestingUtil; public class BackpressureTest { @Test public void testConstructorIsPrivate() { TestingUtil.callConstructorAndCheckIsPrivate(Backpressure.class); } @SuppressWarnings("unchecked") @Test public void testBackpressureSearch() { Subscriber<Object> sub = Mockito.mock(Subscriber.class); ImmutableStack<NodePosition<Object, Geometry>> stack = ImmutableStack.empty(); Func1<Geometry, Boolean> condition = Mockito.mock(Func1.class); Backpressure.search(condition, sub, stack, 1); Mockito.verify(sub, Mockito.never()).onNext(Mockito.any()); } @Test public void testBackpressureSearchNodeWithConditionThatAlwaysReturnsFalse() { RTree<Object, Rectangle> tree = RTree.maxChildren(3).<Object, Rectangle> create().add(e(1)) .add(e(3)).add(e(5)).add(e(7)); Set<Entry<Object, Rectangle>> found = new HashSet<Entry<Object, Rectangle>>(); tree.search(e(1).geometry()).subscribe(backpressureSubscriber(found)); assertEquals(1, found.size()); } @SuppressWarnings("unchecked") @Test public void testRequestZero() { Subscriber<Object> sub = new Subscriber<Object>() { @Override public void onCompleted() { } @Override public void onError(Throwable e) { } @Override public void onNext(Object t) { } }; sub.add(new Subscription() { volatile boolean subscribed = true; @Override public void unsubscribe() { subscribed = false; } @Override public boolean isUnsubscribed() { return !subscribed; } }); Node<Object, Geometry> node = Mockito.mock(Node.class); NodePosition<Object, Geometry> np = new NodePosition<Object, Geometry>(node, 1); ImmutableStack<NodePosition<Object, Geometry>> stack = ImmutableStack .<NodePosition<Object, Geometry>> empty().push(np); Func1<Geometry, Boolean> condition = Mockito.mock(Func1.class); ImmutableStack<NodePosition<Object, Geometry>> stack2 = Backpressure.search(condition, sub, stack, 0); assertTrue(stack2 == stack); } @SuppressWarnings("unchecked") @Test public void testRequestZeroWhenUnsubscribed() { Subscriber<Object> sub = new Subscriber<Object>() { @Override public void onCompleted() { } @Override public void onError(Throwable e) { } @Override public void onNext(Object t) { } }; sub.add(new Subscription() { volatile boolean subscribed = true; @Override public void unsubscribe() { subscribed = false; } @Override public boolean isUnsubscribed() { return !subscribed; } }); sub.unsubscribe(); Node<Object, Geometry> node = Mockito.mock(Node.class); NodePosition<Object, Geometry> np = new NodePosition<Object, Geometry>(node, 1); ImmutableStack<NodePosition<Object, Geometry>> stack = ImmutableStack .<NodePosition<Object, Geometry>> empty().push(np); Func1<Geometry, Boolean> condition = Mockito.mock(Func1.class); ImmutableStack<NodePosition<Object, Geometry>> stack2 = Backpressure.search(condition, sub, stack, 1); assertTrue(stack2.isEmpty()); } @Test public void testBackpressureIterateWhenNodeHasMaxChildrenAndIsRoot() { Entry<Object, Rectangle> e1 = RTreeTest.e(1); @SuppressWarnings("unchecked") List<Entry<Object, Rectangle>> list = Arrays.asList(e1, e1, e1, e1); RTree<Object, Rectangle> tree = RTree.star().maxChildren(4).<Object, Rectangle> create() .add(list); HashSet<Entry<Object, Rectangle>> expected = new HashSet<Entry<Object, Rectangle>>(list); final HashSet<Entry<Object, Rectangle>> found = new HashSet<Entry<Object, Rectangle>>(); tree.entries().subscribe(backpressureSubscriber(found)); assertEquals(expected, found); } @Test public void testBackpressureRequestZero() { Entry<Object, Rectangle> e1 = RTreeTest.e(1); @SuppressWarnings("unchecked") List<Entry<Object, Rectangle>> list = Arrays.asList(e1, e1, e1, e1); RTree<Object, Rectangle> tree = RTree.star().maxChildren(4).<Object, Rectangle> create() .add(list); HashSet<Entry<Object, Rectangle>> expected = new HashSet<Entry<Object, Rectangle>>(list); final HashSet<Entry<Object, Rectangle>> found = new HashSet<Entry<Object, Rectangle>>(); tree.entries().subscribe(new Subscriber<Entry<Object, Rectangle>>() { @Override public void onStart() { request(1); } @Override public void onCompleted() { } @Override public void onError(Throwable e) { } @Override public void onNext(Entry<Object, Rectangle> t) { found.add(t); request(0); } }); assertEquals(expected, found); } @Test public void testBackpressureIterateWhenNodeHasMaxChildrenAndIsNotRoot() { Entry<Object, Rectangle> e1 = RTreeTest.e(1); List<Entry<Object, Rectangle>> list = new ArrayList<Entry<Object, Rectangle>>(); for (int i = 1; i <= 17; i++) list.add(e1); RTree<Object, Rectangle> tree = RTree.star().maxChildren(4).<Object, Rectangle> create() .add(list); HashSet<Entry<Object, Rectangle>> expected = new HashSet<Entry<Object, Rectangle>>(list); final HashSet<Entry<Object, Rectangle>> found = new HashSet<Entry<Object, Rectangle>>(); tree.entries().subscribe(backpressureSubscriber(found)); assertEquals(expected, found); } @Test public void testBackpressureIterateWhenConditionFailsAgainstNonLeafNode() { Entry<Object, Rectangle> e1 = e(1); List<Entry<Object, Rectangle>> list = new ArrayList<Entry<Object, Rectangle>>(); for (int i = 1; i <= 17; i++) list.add(e1); list.add(e(2)); RTree<Object, Rectangle> tree = RTree.star().maxChildren(4).<Object, Rectangle> create() .add(list); HashSet<Entry<Object, Rectangle>> expected = new HashSet<Entry<Object, Rectangle>>(list); final HashSet<Entry<Object, Rectangle>> found = new HashSet<Entry<Object, Rectangle>>(); tree.entries().subscribe(backpressureSubscriber(found)); assertEquals(expected, found); } @Test public void testBackpressureIterateWhenConditionFailsAgainstLeafNode() { Entry<Object, Rectangle> e3 = e(3); RTree<Object, Rectangle> tree = RTree.star().maxChildren(4).<Object, Rectangle> create() .add(e(1)).add(e3); Set<Entry<Object, Rectangle>> expected = Collections.singleton(e3); final Set<Entry<Object, Rectangle>> found = new HashSet<Entry<Object, Rectangle>>(); tree.search(e3.geometry()).subscribe(backpressureSubscriber(found)); assertEquals(expected, found); } private static Subscriber<Entry<Object, Rectangle>> backpressureSubscriber( final Set<Entry<Object, Rectangle>> found) { return new Subscriber<Entry<Object, Rectangle>>() { @Override public void onStart() { request(1); } @Override public void onCompleted() { } @Override public void onError(Throwable e) { } @Override public void onNext(Entry<Object, Rectangle> t) { found.add(t); request(1); } }; } }
package lighthouse.files; import com.google.common.base.*; import com.google.common.collect.*; import javafx.beans.*; import javafx.collections.*; import lighthouse.*; import lighthouse.protocol.*; import lighthouse.threading.*; import net.jcip.annotations.*; import org.bitcoinj.core.*; import org.bitcoinj.protocols.payments.*; import org.slf4j.*; import javax.annotation.*; import java.io.*; import java.nio.file.*; import java.util.*; import static com.google.common.base.Preconditions.*; import static lighthouse.protocol.LHUtils.*; /** * Provides an observable list of projects and pledges that this app is managing, using the AppDirectory class. * Projects can be indirect: the app dir can contain text files containing the real path of the project. This is useful * to let the user gather pledges using the file system e.g. a shared folder on Google Drive/Dropbox/etc. * * Note that pledges can be in two places. One is the users wallet. That's the pledges they have made. The other * is on disk. That's the pledges other people have made, when in decentralised mode. * * The logic implemented here is a bit complicated: * * - Projects and pledges are automatically imported when they are dropped into the app directory. The app will copy * project files to the app directory for the user so if the original file is deleted it doesn't disappear. * - Pledges are automatically loaded from disk from the directory that a project was originally imported from in * client mode. * - The user is allowed to delete / rename any directories other than the app directory at any time and we have to * handle that. * * The projects.txt file stores a list of paths. If a path is a file that ends with .lighthouse-project, it is loaded. * Paths can be just filenames, in that case they are expected to be relative to the app directory. If it is a path * to a directory then all pledges there will be loaded and the directory will be watched. The app directory is * always watched. */ public class DiskManager { private static final Logger log = LoggerFactory.getLogger(DiskManager.class); public static final String PROJECT_FILE_EXTENSION = ".lighthouse-project"; public static final String PLEDGE_FILE_EXTENSION = ".lighthouse-pledge"; public static final String PROJECT_STATUS_FILENAME = "project-status.txt"; // For historical reasons this is called projects.txt although it contains paths of directories to watch for // pledges. Older files may also contain absolute file paths to project files. public static final String PLEDGE_PATHS_FILENAME = "projects.txt"; // All private methods and private variables are used from this executor. private final AffinityExecutor.ServiceAffinityExecutor executor; private final ObservableList<Project> projects; private final Map<Path, Project> projectsByPath; private final Map<Path, LHProtos.Pledge> pledgesByPath; // These are locked so other threads can reach in and read them without having to do cross-thread RPCs. @GuardedBy("this") private final ObservableMap<String, Project> projectsById; @GuardedBy("this") private final Map<Project, ObservableSet<LHProtos.Pledge>> pledges; private final List<Path> pledgePaths; private final NetworkParameters params; private DirectoryWatcher directoryWatcher; // Ordered map: the ordering is needed to keep the UI showing projects in import order instead of whatever order // is returned by the disk file system. Keys are hex hashes (project.getId()). private final ObservableMap<String, LighthouseBackend.ProjectStateInfo> projectStates; private final LinkedHashMap<String, LighthouseBackend.ProjectStateInfo> projectStatesMap; /** * Creates a disk manager that reloads data from disk when a new project path is added or the directories change. * This object should be owned by the thread backing owningExecutor: changes will all be queued onto this * thread. */ public DiskManager(NetworkParameters params, AffinityExecutor.ServiceAffinityExecutor owningExecutor) { // Initialize projects by selecting files matching the right name pattern and then trying to load, ignoring // failures (nulls). this.params = params; executor = owningExecutor; projects = FXCollections.observableArrayList(); projectsById = FXCollections.observableHashMap(); projectsByPath = new HashMap<>(); projectStatesMap = new LinkedHashMap<>(); projectStates = FXCollections.observableMap(projectStatesMap); pledgesByPath = new HashMap<>(); pledges = new HashMap<>(); pledgePaths = new ArrayList<>(); // Use execute() rather than executeASAP() so that if we're being invoked from the owning thread, the caller // has a chance to set up observers and the like before the thread event loops and starts loading stuff. That // way the observers will run for the newly loaded data. owningExecutor.execute(() -> uncheck(this::init)); } private void init() throws IOException { executor.checkOnThread(); if (Files.exists(getPledgePathsFile())) readPledgePaths(); // Reload them on the UI thread if any files show up behind our back, i.e. from Drive/Dropbox/being put there // manually by the user. loadAll(); directoryWatcher = createDirWatcher(); } public void shutdown() { directoryWatcher.stop(); } private DirectoryWatcher createDirWatcher() { Set<Path> directories = new HashSet<>(); // We always watch the app directory. directories.add(AppDirectory.dir()); // We also watch the list of origin directories where serverless projects were imported from. for (Path path : pledgePaths) { if (!path.isAbsolute()) continue; // Old projects.txt that has names of imported projects in it. if (Files.isDirectory(path)) directories.add(path); else if (path.toString().endsWith(PROJECT_FILE_EXTENSION)) // For backwards compat: watch origin dirs recorded as paths to project files. directories.add(path.getParent()); } return new DirectoryWatcher(ImmutableSet.copyOf(directories), this::onDirectoryChanged, executor); } private void onDirectoryChanged(Path path, WatchEvent.Kind<Path> kind) { executor.checkOnThread(); boolean isProject = path.toString().endsWith(PROJECT_FILE_EXTENSION); boolean isPledge = path.toString().endsWith(PLEDGE_FILE_EXTENSION); boolean isCreate = kind == StandardWatchEventKinds.ENTRY_CREATE; boolean isDelete = kind == StandardWatchEventKinds.ENTRY_DELETE; boolean isModify = kind == StandardWatchEventKinds.ENTRY_MODIFY; if (isProject || isPledge) log.info("{} -> {}", path, kind); // Project files are only auto loaded from the app directory. If the user downloads a serverless project to their // Downloads folder, imports it, then downloads a second project, we don't want it to automatically appear. // // TODO: This is all a load of crap. Windows especially has weird habits when it comes to reporting file changes. // We should just scrap file watching for projects at least, and do things the old fashioned way. if (isProject && path.getParent().equals(AppDirectory.dir())) { if (isDelete || isModify) { log.info("Project file deleted/modified: {}", path); Project project = projectsByPath.get(path); if (project != null) { if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { log.info("Project file modified, reloading ..."); tryLoadProject(path, projects.indexOf(project)); } else { log.info("Project file deleted, removing ..."); projects.remove(project); projectsByPath.remove(path); synchronized (this) { projectsById.remove(project.getID()); } } } else if (isModify) { log.info("Project file modified, but we don't know about it: last load might have failed. Retrying"); this.tryLoadProject(path); } } else if (isCreate) { log.info("New project found: {}", path); this.tryLoadProject(path); } } else if (isPledge) { if (isDelete || isModify) { LHProtos.Pledge pledge = pledgesByPath.get(path); if (pledge != null) { log.info("Pledge file deleted/modified: {}", path); synchronized (this) { Project project = projectsById.get(pledge.getPledgeDetails().getProjectId()); ObservableSet<LHProtos.Pledge> projectPledges = this.getPledgesFor(project); checkNotNull(projectPledges); // Project should be in both sets or neither. projectPledges.remove(pledge); } pledgesByPath.remove(path); } else { log.error("Got delete event for a pledge we had not loaded, maybe missing project? {}", path); } } if (isCreate || isModify) { this.tryLoadPledge(path); } } } private void loadPledgesFromDirectory(Path directory) throws IOException { executor.checkOnThread(); for (Path path : LHUtils.listDir(directory)) { if (!path.toString().endsWith(PLEDGE_FILE_EXTENSION)) continue; tryLoadPledge(path); } } private void tryLoadPledge(Path path) { LHProtos.Pledge pledge = loadPledge(path); if (pledge != null) { Project project = getProjectById(pledge.getPledgeDetails().getProjectId()); if (project != null) { pledgesByPath.put(path, pledge); getPledgesOrCreate(project).add(pledge); } else { // TODO: This can happen if we're importing a project that already has pledges next to the file. // In that case, the app will copy the project into the app dir, then ask us to watch the origin dir // for pledges, but then the project load and scanning the origin dir are racing. So we need to // just put these pledges to one side and try again next time we find a new project. log.error("Found pledge on disk we don't have the project for: {}", path); } } else { log.error("Unable to load pledge from {}", path); } } @Nullable private LHProtos.Pledge loadPledge(Path file) { executor.checkOnThread(); log.info("Attempting to load {}", file); try (InputStream stream = Files.newInputStream(file)) { return LHProtos.Pledge.parseFrom(stream); } catch (IOException e) { log.error("Failed to load pledge from " + file, e); return null; } } private void readPledgePaths() throws IOException { executor.checkOnThread(); pledgePaths.clear(); Files.readAllLines(getPledgePathsFile()).forEach(line -> pledgePaths.add(Paths.get(line))); log.info("{} project dirs read", pledgePaths.size()); for (Path dir : pledgePaths) { log.info(dir.toString()); } } private void writePledgePaths() throws IOException { executor.checkOnThread(); log.info("Writing {}", getPledgePathsFile()); Path path = getPledgePathsFile(); Files.write(path, (Iterable<String>) pledgePaths.stream().map(Path::toString)::iterator, Charsets.UTF_8); } private Path getPledgePathsFile() { return AppDirectory.dir().resolve(PLEDGE_PATHS_FILENAME); } private void loadAll() throws IOException { executor.checkOnThread(); log.info("Updating all data from disk"); loadProjectStatuses(); List<String> ids = new ArrayList<>(projectStatesMap.keySet()); for (Path path : LHUtils.listDir(AppDirectory.dir())) { if (!path.toString().endsWith(PROJECT_FILE_EXTENSION)) continue; if (!Files.isRegularFile(path)) continue; if (tryLoadProject(path) == null) log.warn("Failed to load project {}", path); } projects.sort(new Comparator<Project>() { @Override public int compare(Project o1, Project o2) { int o1i = ids.indexOf(o1.getID()); int o2i = ids.indexOf(o2.getID()); // Project might have appeared on disk when we were not running and thus not have a status. This should // not happen in GUI mode unless the user dicks around with our private app directory so we can just // allow an unstable sort in this case. For servers it is expected but they don't care about the order. if (o1i == -1) o1i = Integer.MAX_VALUE; if (o2i == -1) o2i = Integer.MAX_VALUE; return Integer.compare(o1i, o2i); } }); // Load pledges from each project path. loadPledgesFromDirectory(AppDirectory.dir()); for (Path path : pledgePaths) { if (!Files.isDirectory(path)) continue; // Can be from an old version or deleted by user. loadPledgesFromDirectory(path); } log.info("... disk data loaded"); } private void loadProjectStatuses() throws IOException { Path path = AppDirectory.dir().resolve(PROJECT_STATUS_FILENAME); projectStates.addListener((InvalidationListener) x -> saveProjectStatuses()); if (!Files.exists(path)) return; // Parse, paying attention to ordering. List<String> lines = Files.readAllLines(path); for (String line : lines) { if (line.startsWith("#")) continue; // Backwards compat. List<String> parts = Splitter.on("=").splitToList(line); String key = parts.get(0); String val = parts.get(1); if (val.equals("OPEN")) { projectStates.put(key, new LighthouseBackend.ProjectStateInfo(LighthouseBackend.ProjectState.OPEN, null)); } else { Sha256Hash claimedBy = new Sha256Hash(val); // Treat as hex string. log.info("Project {} is marked as claimed by {}", key, claimedBy); projectStates.put(key, new LighthouseBackend.ProjectStateInfo(LighthouseBackend.ProjectState.CLAIMED, claimedBy)); } } } private void saveProjectStatuses() { log.info("Saving project statuses"); Path path = AppDirectory.dir().resolve(PROJECT_STATUS_FILENAME); List<String> lines = new ArrayList<>(); for (Map.Entry<String, LighthouseBackend.ProjectStateInfo> entry : projectStates.entrySet()) { String val = entry.getValue().state == LighthouseBackend.ProjectState.OPEN ? "OPEN" : checkNotNull(entry.getValue().claimedBy).toString(); lines.add(entry.getKey() + "=" + val); } uncheck(() -> Files.write(path, lines)); } @Nullable public Project tryLoadProject(Path path) { return tryLoadProject(path, -1); } @Nullable public Project tryLoadProject(Path path, int indexToReplace) { executor.checkOnThread(); Project p = loadProject(path); if (p != null) { synchronized (this) { Project preExisting = projectsById.get(p.getID()); if (preExisting != null) { if (indexToReplace < 0) { log.info("Already have project id {}, skipping load", p.getID()); return preExisting; } if (preExisting.equals(p)) { // This can happen on Windows: the OS tells us the file was modified, but then we load it and // discover it's really not different at all. This seems to happen a lot just after a file was // created. Hack: to avoid weird races and problems elsewhere like the UI trying to update a // project ui widget that is waiting for an animation to finish, we just ignore this here. log.info("Got bogus project modify notification, ignoring"); return null; // Not used. } } projectsById.put(p.getID(), p); } if (indexToReplace >= 0) { projects.set(indexToReplace, p); log.info("Replaced project at index {} with newly loaded project", indexToReplace); } else { projects.add(p); } projectsByPath.put(path, p); if (!projectStates.containsKey(p.getID())) { // Assume new projects are open: we have no other way to tell for now: would require a block explorer // lookup to detect that the project came and went already. But we do remember even if the project // is deleted and came back. projectStates.put(p.getID(), new LighthouseBackend.ProjectStateInfo(LighthouseBackend.ProjectState.OPEN, null)); } } return p; } @Nullable private Project loadProject(Path from) { log.info("Attempting to load project file {}", from); try (InputStream is = Files.newInputStream(from)) { LHProtos.Project proto = LHProtos.Project.parseFrom(is); Project project = new Project(proto); if (!project.getParams().equals(params)) { log.warn("Ignoring project with mismatched network params: {} vs {}", project.getParams(), params); return null; } return project; } catch (IOException e) { log.error("File appeared in directory but could not be read, ignoring: {}", e.getMessage()); return null; } catch (PaymentProtocolException e) { // Don't know how to load this file! log.error("Failed reading file", e); return null; } } public Project saveProject(LHProtos.Project project, String fileID) throws IOException { // Probably on the UI thread here. Do the IO write on the UI thread to simplify error handling. final Project obj = unchecked(() -> new Project(project)); final Path filename = Paths.get(obj.getSuggestedFileName()); final Path path = AppDirectory.dir().resolve(filename + ".tmp"); log.info("Saving project to: {}", path); // Do a write to a temp file name here to ensure a project file is not partially written and becomes partially // visible via directory notifications. try (OutputStream stream = new BufferedOutputStream(Files.newOutputStream(path))) { project.writeTo(stream); } // This should trigger a directory change notification that loads the project. if (Files.exists(AppDirectory.dir().resolve(filename))) log.info("... and replacing"); Files.move(path, AppDirectory.dir().resolve(filename), StandardCopyOption.REPLACE_EXISTING); return obj; } /** Adds a directory that will be watched for pledge files. */ public void addPledgePath(Path dir) { checkArgument(Files.isDirectory(dir)); executor.executeASAP(() -> { log.info("Adding pledge path {}", dir); pledgePaths.add(dir); ignoreAndLog(this::writePledgePaths); loadPledgesFromDirectory(dir); directoryWatcher.stop(); directoryWatcher = createDirWatcher(); }); } public void observeProjects(ListChangeListener<Project> listener) { projects.addListener(listener); } public ObservableList<Project> mirrorProjects(AffinityExecutor executor) { if (executor == this.executor) return projects; else return ObservableMirrors.mirrorList(projects, executor); } @Nullable public synchronized Project getProjectById(String id) { return projectsById.get(id); } /** Returns an observable set of pledges for the project. */ public synchronized ObservableSet<LHProtos.Pledge> getPledgesOrCreate(Project forProject) { ObservableSet<LHProtos.Pledge> result = pledges.get(forProject); if (result == null) { result = FXCollections.observableSet(); pledges.put(forProject, result); } return result; } /** Returns an observable set of pledges if this project was found on disk, otherwise null. */ @Nullable public synchronized ObservableSet<LHProtos.Pledge> getPledgesFor(Project forProject) { return pledges.get(forProject); } @Nullable public Project getProjectFromClaim(Transaction claim) { executor.checkOnThread(); for (Project project : projects) { if (LHUtils.compareOutputsStructurally(claim, project)) return project; } return null; } // TODO: Remove me when a new block doesn't require checking every new project. public Set<Project> getProjects() { return new HashSet<>(projects); } public void setProjectState(Project project, LighthouseBackend.ProjectStateInfo state) { executor.executeASAP(() -> projectStates.put(project.getID(), state)); } public LighthouseBackend.ProjectStateInfo getProjectState(Project project) { executor.checkOnThread(); return projectStates.get(project.getID()); } public ObservableMap<String, LighthouseBackend.ProjectStateInfo> mirrorProjectStates(AffinityExecutor runChangesIn) { if (executor == runChangesIn) return projectStates; else return executor.fetchFrom(() -> ObservableMirrors.mirrorMap(projectStates, runChangesIn)); } }