gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2012-2015 org.opencloudb. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * (created at 2012-8-14) */ package org.opencloudb.paser.ast.ddl; import java.util.List; import org.opencloudb.paser.ast.ASTNode; import org.opencloudb.paser.ast.expression.Expression; import org.opencloudb.paser.ast.expression.function.literal.LiteralString; import org.opencloudb.paser.ast.expression.primary.Identifier; import org.opencloudb.paser.visitor.SQLASTVisitor; /** * @author mycat */ public class TableOptions implements ASTNode { public static enum InsertMethod { NO, FIRST, LAST } public static enum PackKeys { FALSE, TRUE, DEFAULT } public static enum RowFormat { DEFAULT, DYNAMIC, FIXED, COMPRESSED, REDUNDANT, COMPACT } private Identifier engine; private Expression autoIncrement; private Expression avgRowLength; private Identifier charSet; private Identifier collation; private Boolean checkSum; private LiteralString comment; private LiteralString connection; private LiteralString dataDir; private LiteralString indexDir; private Boolean delayKeyWrite; private InsertMethod insertMethod; private Expression keyBlockSize; private Expression maxRows; private Expression minRows; private PackKeys packKeys; private LiteralString password; private RowFormat rowFormat; private List<Identifier> union; // table_option: // ENGINE [=] engine_name // | AUTO_INCREMENT [=] value // | AVG_ROW_LENGTH [=] value // | [DEFAULT] CHARACTER SET [=] charset_name // | CHECKSUM [=] {0 | 1} // | [DEFAULT] COLLATE [=] collation_name // | COMMENT [=] 'string' // | CONNECTION [=] 'connect_string' // | DATA DIRECTORY [=] 'absolute path to directory' // | DELAY_KEY_WRITE [=] {0 | 1} // | INDEX DIRECTORY [=] 'absolute path to directory' // | INSERT_METHOD [=] { NO | FIRST | LAST } // | KEY_BLOCK_SIZE [=] value // | MAX_ROWS [=] value // | MIN_ROWS [=] value // | PACK_KEYS [=] {0 | 1 | DEFAULT} // | PASSWORD [=] 'string' // | ROW_FORMAT [=] {DEFAULT|DYNAMIC|FIXED|COMPRESSED|REDUNDANT|COMPACT} // | UNION [=] (tbl_name[,tbl_name]...) public TableOptions() { } public Identifier getEngine() { return engine; } public void setEngine(Identifier engine) { this.engine = engine; } public Expression getAutoIncrement() { return autoIncrement; } public void setAutoIncrement(Expression autoIncrement) { this.autoIncrement = autoIncrement; } public Expression getAvgRowLength() { return avgRowLength; } public void setAvgRowLength(Expression avgRowLength) { this.avgRowLength = avgRowLength; } public Identifier getCharSet() { return charSet; } public void setCharSet(Identifier charSet) { this.charSet = charSet; } public Identifier getCollation() { return collation; } public void setCollation(Identifier collation) { this.collation = collation; } public Boolean getCheckSum() { return checkSum; } public void setCheckSum(Boolean checkSum) { this.checkSum = checkSum; } public LiteralString getComment() { return comment; } public void setComment(LiteralString comment) { this.comment = comment; } public LiteralString getConnection() { return connection; } public void setConnection(LiteralString connection) { this.connection = connection; } public LiteralString getDataDir() { return dataDir; } public void setDataDir(LiteralString dataDir) { this.dataDir = dataDir; } public LiteralString getIndexDir() { return indexDir; } public void setIndexDir(LiteralString indexDir) { this.indexDir = indexDir; } public Boolean getDelayKeyWrite() { return delayKeyWrite; } public void setDelayKeyWrite(Boolean delayKeyWrite) { this.delayKeyWrite = delayKeyWrite; } public InsertMethod getInsertMethod() { return insertMethod; } public void setInsertMethod(InsertMethod insertMethod) { this.insertMethod = insertMethod; } public Expression getKeyBlockSize() { return keyBlockSize; } public void setKeyBlockSize(Expression keyBlockSize) { this.keyBlockSize = keyBlockSize; } public Expression getMaxRows() { return maxRows; } public void setMaxRows(Expression maxRows) { this.maxRows = maxRows; } public Expression getMinRows() { return minRows; } public void setMinRows(Expression minRows) { this.minRows = minRows; } public PackKeys getPackKeys() { return packKeys; } public void setPackKeys(PackKeys packKeys) { this.packKeys = packKeys; } public LiteralString getPassword() { return password; } public void setPassword(LiteralString password) { this.password = password; } public RowFormat getRowFormat() { return rowFormat; } public void setRowFormat(RowFormat rowFormat) { this.rowFormat = rowFormat; } public List<Identifier> getUnion() { return union; } public void setUnion(List<Identifier> union) { this.union = union; } @Override public void accept(SQLASTVisitor visitor) { visitor.visit(this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.interpreter; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.dep.Dependency; import org.apache.zeppelin.display.AngularObjectRegistryListener; import org.apache.zeppelin.helium.ApplicationEventListener; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; import org.junit.Test; import org.sonatype.aether.RepositoryException; import org.sonatype.aether.repository.RemoteRepository; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; public class InterpreterSettingManagerTest extends AbstractInterpreterTest { @Test public void testInitInterpreterSettingManager() throws IOException, RepositoryException { assertEquals(5, interpreterSettingManager.get().size()); InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); assertEquals("test", interpreterSetting.getName()); assertEquals("test", interpreterSetting.getGroup()); assertEquals(3, interpreterSetting.getInterpreterInfos().size()); // 3 other builtin properties: // * zeppelin.interpreter.output.limit // * zeppelin.interpreter.localRepo // * zeppelin.interpreter.max.poolsize assertEquals(6, interpreterSetting.getJavaProperties().size()); assertEquals("value_1", interpreterSetting.getJavaProperties().getProperty("property_1")); assertEquals("new_value_2", interpreterSetting.getJavaProperties().getProperty("property_2")); assertEquals("value_3", interpreterSetting.getJavaProperties().getProperty("property_3")); assertEquals("shared", interpreterSetting.getOption().perNote); assertEquals("shared", interpreterSetting.getOption().perUser); assertEquals(0, interpreterSetting.getDependencies().size()); assertNotNull(interpreterSetting.getAngularObjectRegistryListener()); assertNotNull(interpreterSetting.getRemoteInterpreterProcessListener()); assertNotNull(interpreterSetting.getAppEventListener()); assertNotNull(interpreterSetting.getDependencyResolver()); assertNotNull(interpreterSetting.getInterpreterSettingManager()); List<RemoteRepository> repositories = interpreterSettingManager.getRepositories(); assertEquals(2, repositories.size()); assertEquals("central", repositories.get(0).getId()); // Load it again InterpreterSettingManager interpreterSettingManager2 = new InterpreterSettingManager(conf, mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); assertEquals(5, interpreterSettingManager2.get().size()); interpreterSetting = interpreterSettingManager2.getByName("test"); assertEquals("test", interpreterSetting.getName()); assertEquals("test", interpreterSetting.getGroup()); assertEquals(3, interpreterSetting.getInterpreterInfos().size()); assertEquals(6, interpreterSetting.getJavaProperties().size()); assertEquals("value_1", interpreterSetting.getJavaProperties().getProperty("property_1")); assertEquals("new_value_2", interpreterSetting.getJavaProperties().getProperty("property_2")); assertEquals("value_3", interpreterSetting.getJavaProperties().getProperty("property_3")); assertEquals("shared", interpreterSetting.getOption().perNote); assertEquals("shared", interpreterSetting.getOption().perUser); assertEquals(0, interpreterSetting.getDependencies().size()); repositories = interpreterSettingManager2.getRepositories(); assertEquals(2, repositories.size()); assertEquals("central", repositories.get(0).getId()); } @Test public void testCreateUpdateRemoveSetting() throws IOException, InterpreterException { // create new interpreter setting InterpreterOption option = new InterpreterOption(); option.setPerNote("scoped"); option.setPerUser("scoped"); Map<String, InterpreterProperty> properties = new HashMap<>(); properties.put("property_4", new InterpreterProperty("property_4","value_4")); try { interpreterSettingManager.createNewSetting("test2", "test", new ArrayList<Dependency>(), option, properties); fail("Should fail due to interpreter already existed"); } catch (IOException e) { assertTrue(e.getMessage().contains("already existed")); } interpreterSettingManager.createNewSetting("test3", "test", new ArrayList<Dependency>(), option, properties); assertEquals(6, interpreterSettingManager.get().size()); InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test3"); assertEquals("test3", interpreterSetting.getName()); assertEquals("test", interpreterSetting.getGroup()); // 3 other builtin properties: // * zeppelin.interpeter.output.limit // * zeppelin.interpreter.localRepo // * zeppelin.interpreter.max.poolsize assertEquals(4, interpreterSetting.getJavaProperties().size()); assertEquals("value_4", interpreterSetting.getJavaProperties().getProperty("property_4")); assertEquals("scoped", interpreterSetting.getOption().perNote); assertEquals("scoped", interpreterSetting.getOption().perUser); assertEquals(0, interpreterSetting.getDependencies().size()); assertNotNull(interpreterSetting.getAngularObjectRegistryListener()); assertNotNull(interpreterSetting.getRemoteInterpreterProcessListener()); assertNotNull(interpreterSetting.getAppEventListener()); assertNotNull(interpreterSetting.getDependencyResolver()); assertNotNull(interpreterSetting.getInterpreterSettingManager()); // load it again, it should be saved in interpreter-setting.json. So we can restore it properly InterpreterSettingManager interpreterSettingManager2 = new InterpreterSettingManager(conf, mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); assertEquals(6, interpreterSettingManager2.get().size()); interpreterSetting = interpreterSettingManager2.getByName("test3"); assertEquals("test3", interpreterSetting.getName()); assertEquals("test", interpreterSetting.getGroup()); assertEquals(6, interpreterSetting.getJavaProperties().size()); assertEquals("value_4", interpreterSetting.getJavaProperties().getProperty("property_4")); assertEquals("scoped", interpreterSetting.getOption().perNote); assertEquals("scoped", interpreterSetting.getOption().perUser); assertEquals(0, interpreterSetting.getDependencies().size()); // update interpreter setting InterpreterOption newOption = new InterpreterOption(); newOption.setPerNote("scoped"); newOption.setPerUser("isolated"); Map<String, InterpreterProperty> newProperties = new HashMap<>(properties); newProperties.put("property_4", new InterpreterProperty("property_4", "new_value_4")); List<Dependency> newDependencies = new ArrayList<>(); newDependencies.add(new Dependency("com.databricks:spark-avro_2.11:3.1.0")); interpreterSettingManager.setPropertyAndRestart(interpreterSetting.getId(), newOption, newProperties, newDependencies); interpreterSetting = interpreterSettingManager.get(interpreterSetting.getId()); assertEquals("test3", interpreterSetting.getName()); assertEquals("test", interpreterSetting.getGroup()); assertEquals(4, interpreterSetting.getJavaProperties().size()); assertEquals("new_value_4", interpreterSetting.getJavaProperties().getProperty("property_4")); assertEquals("scoped", interpreterSetting.getOption().perNote); assertEquals("isolated", interpreterSetting.getOption().perUser); assertEquals(1, interpreterSetting.getDependencies().size()); assertNotNull(interpreterSetting.getAngularObjectRegistryListener()); assertNotNull(interpreterSetting.getRemoteInterpreterProcessListener()); assertNotNull(interpreterSetting.getAppEventListener()); assertNotNull(interpreterSetting.getDependencyResolver()); assertNotNull(interpreterSetting.getInterpreterSettingManager()); // restart in note page interpreterSettingManager.setInterpreterBinding("user1", "note1", interpreterSettingManager.getSettingIds()); interpreterSettingManager.setInterpreterBinding("user2", "note2", interpreterSettingManager.getSettingIds()); interpreterSettingManager.setInterpreterBinding("user3", "note3", interpreterSettingManager.getSettingIds()); // create 3 sessions as it is scoped mode interpreterSetting.getOption().setPerUser("scoped"); interpreterSetting.getDefaultInterpreter("user1", "note1"); interpreterSetting.getDefaultInterpreter("user2", "note2"); interpreterSetting.getDefaultInterpreter("user3", "note3"); InterpreterGroup interpreterGroup = interpreterSetting.getInterpreterGroup("user1", "note1"); assertEquals(3, interpreterGroup.getSessionNum()); // only close user1's session interpreterSettingManager.restart(interpreterSetting.getId(), "note1", "user1"); assertEquals(2, interpreterGroup.getSessionNum()); // remove interpreter setting interpreterSettingManager.remove(interpreterSetting.getId()); assertEquals(5, interpreterSettingManager.get().size()); // load it again InterpreterSettingManager interpreterSettingManager3 = new InterpreterSettingManager(new ZeppelinConfiguration(), mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); assertEquals(5, interpreterSettingManager3.get().size()); } @Test public void testInterpreterBinding() throws IOException { assertNull(interpreterSettingManager.getInterpreterBinding("note1")); interpreterSettingManager.setInterpreterBinding("user1", "note1", interpreterSettingManager.getInterpreterSettingIds()); assertEquals(interpreterSettingManager.getInterpreterSettingIds(), interpreterSettingManager.getInterpreterBinding("note1")); } @Test public void testUpdateInterpreterBinding_PerNoteShared() throws IOException { InterpreterSetting defaultInterpreterSetting = interpreterSettingManager.get().get(0); defaultInterpreterSetting.getOption().setPerNote("shared"); interpreterSettingManager.setInterpreterBinding("user1", "note1", interpreterSettingManager.getInterpreterSettingIds()); // create interpreter of the first binded interpreter setting interpreterFactory.getInterpreter("user1", "note1", ""); assertEquals(1, defaultInterpreterSetting.getAllInterpreterGroups().size()); // choose the first setting List<String> newSettingIds = new ArrayList<>(); newSettingIds.add(interpreterSettingManager.getInterpreterSettingIds().get(1)); interpreterSettingManager.setInterpreterBinding("user1", "note1", newSettingIds); assertEquals(newSettingIds, interpreterSettingManager.getInterpreterBinding("note1")); // InterpreterGroup will still be alive as it is shared assertEquals(1, defaultInterpreterSetting.getAllInterpreterGroups().size()); } @Test public void testUpdateInterpreterBinding_PerNoteIsolated() throws IOException { InterpreterSetting defaultInterpreterSetting = interpreterSettingManager.get().get(0); defaultInterpreterSetting.getOption().setPerNote("isolated"); interpreterSettingManager.setInterpreterBinding("user1", "note1", interpreterSettingManager.getInterpreterSettingIds()); // create interpreter of the first binded interpreter setting interpreterFactory.getInterpreter("user1", "note1", ""); assertEquals(1, defaultInterpreterSetting.getAllInterpreterGroups().size()); // choose the first setting List<String> newSettingIds = new ArrayList<>(); newSettingIds.add(interpreterSettingManager.getInterpreterSettingIds().get(1)); interpreterSettingManager.setInterpreterBinding("user1", "note1", newSettingIds); assertEquals(newSettingIds, interpreterSettingManager.getInterpreterBinding("note1")); // InterpreterGroup will be closed as it is only belong to this note assertEquals(0, defaultInterpreterSetting.getAllInterpreterGroups().size()); } @Test public void testUpdateInterpreterBinding_PerNoteScoped() throws IOException { InterpreterSetting defaultInterpreterSetting = interpreterSettingManager.get().get(0); defaultInterpreterSetting.getOption().setPerNote("scoped"); interpreterSettingManager.setInterpreterBinding("user1", "note1", interpreterSettingManager.getInterpreterSettingIds()); interpreterSettingManager.setInterpreterBinding("user1", "note2", interpreterSettingManager.getInterpreterSettingIds()); // create 2 interpreter of the first binded interpreter setting for note1 and note2 interpreterFactory.getInterpreter("user1", "note1", ""); interpreterFactory.getInterpreter("user1", "note2", ""); assertEquals(1, defaultInterpreterSetting.getAllInterpreterGroups().size()); assertEquals(2, defaultInterpreterSetting.getAllInterpreterGroups().get(0).getSessionNum()); // choose the first setting List<String> newSettingIds = new ArrayList<>(); newSettingIds.add(interpreterSettingManager.getInterpreterSettingIds().get(1)); interpreterSettingManager.setInterpreterBinding("user1", "note1", newSettingIds); assertEquals(newSettingIds, interpreterSettingManager.getInterpreterBinding("note1")); // InterpreterGroup will be still alive but session belong to note1 will be closed assertEquals(1, defaultInterpreterSetting.getAllInterpreterGroups().size()); assertEquals(1, defaultInterpreterSetting.getAllInterpreterGroups().get(0).getSessionNum()); } @Test public void testGetEditor() throws IOException { interpreterSettingManager.setInterpreterBinding("user1", "note1", interpreterSettingManager.getInterpreterSettingIds()); Interpreter echoInterpreter = interpreterFactory.getInterpreter("user1", "note1", "test.echo"); // get editor setting from interpreter-setting.json Map<String, Object> editor = interpreterSettingManager.getEditorSetting(echoInterpreter, "user1", "note1", "test.echo"); assertEquals("java", editor.get("language")); // when editor setting doesn't exit, return the default editor Interpreter mock1Interpreter = interpreterFactory.getInterpreter("user1", "note1", "mock1"); editor = interpreterSettingManager.getEditorSetting(mock1Interpreter,"user1", "note1", "mock1"); assertEquals("text", editor.get("language")); } @Test public void testRestartShared() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("shared"); interpreterSetting.getOption().setPerNote("shared"); interpreterSetting.getOrCreateSession("user1", "note1"); interpreterSetting.getOrCreateInterpreterGroup("user2", "note2"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); interpreterSettingManager.restart(interpreterSetting.getId(), "user1", "note1"); assertEquals(0, interpreterSetting.getAllInterpreterGroups().size()); } @Test public void testRestartPerUserIsolated() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("isolated"); interpreterSetting.getOption().setPerNote("shared"); interpreterSetting.getOrCreateSession("user1", "note1"); interpreterSetting.getOrCreateSession("user2", "note2"); assertEquals(2, interpreterSetting.getAllInterpreterGroups().size()); interpreterSettingManager.restart(interpreterSetting.getId(), "note1", "user1"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); } @Test public void testRestartPerNoteIsolated() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("shared"); interpreterSetting.getOption().setPerNote("isolated"); interpreterSetting.getOrCreateSession("user1", "note1"); interpreterSetting.getOrCreateSession("user2", "note2"); assertEquals(2, interpreterSetting.getAllInterpreterGroups().size()); interpreterSettingManager.restart(interpreterSetting.getId(), "note1", "user1"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); } @Test public void testRestartPerUserScoped() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("scoped"); interpreterSetting.getOption().setPerNote("shared"); interpreterSetting.getOrCreateSession("user1", "note1"); interpreterSetting.getOrCreateSession("user2", "note2"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum()); interpreterSettingManager.restart(interpreterSetting.getId(), "note1", "user1"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum()); } @Test public void testRestartPerNoteScoped() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("shared"); interpreterSetting.getOption().setPerNote("scoped"); interpreterSetting.getOrCreateSession("user1", "note1"); interpreterSetting.getOrCreateSession("user2", "note2"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); assertEquals(2, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum()); interpreterSettingManager.restart(interpreterSetting.getId(), "note1", "user1"); assertEquals(1, interpreterSetting.getAllInterpreterGroups().size()); assertEquals(1, interpreterSetting.getAllInterpreterGroups().get(0).getSessionNum()); } }
/* * Copyright 2007 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.relaxNG.convert; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ArrayUtilRt; import com.thaiopensource.relaxng.edit.SchemaCollection; import com.thaiopensource.relaxng.input.InputFailedException; import com.thaiopensource.relaxng.input.InputFormat; import com.thaiopensource.relaxng.input.MultiInputFormat; import com.thaiopensource.relaxng.input.dtd.DtdInputFormat; import com.thaiopensource.relaxng.input.parse.compact.CompactParseInputFormat; import com.thaiopensource.relaxng.input.parse.sax.SAXParseInputFormat; import com.thaiopensource.relaxng.input.xml.XmlInputFormat; import com.thaiopensource.relaxng.output.LocalOutputDirectory; import com.thaiopensource.relaxng.output.OutputDirectory; import com.thaiopensource.relaxng.output.OutputFailedException; import com.thaiopensource.relaxng.output.OutputFormat; import com.thaiopensource.relaxng.output.dtd.DtdOutputFormat; import com.thaiopensource.relaxng.output.rnc.RncOutputFormat; import com.thaiopensource.relaxng.output.rng.RngOutputFormat; import com.thaiopensource.relaxng.output.xsd.XsdOutputFormat; import com.thaiopensource.relaxng.translate.util.InvalidParamsException; import com.thaiopensource.util.UriOrFile; import org.intellij.plugins.relaxNG.RelaxngBundle; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.nio.charset.Charset; import java.util.ArrayList; public class IdeaDriver { private static final int DEFAULT_LINE_LENGTH = 72; private static final int DEFAULT_INDENT = 2; private final ConvertSchemaSettings settings; private final Project myProject; public IdeaDriver(ConvertSchemaSettings settings, Project project) { this.settings = settings; myProject = project; } public void convert(SchemaType inputType, IdeaErrorHandler errorHandler, VirtualFile... inputFiles) { if (inputFiles.length == 0) { throw new IllegalArgumentException(); } try { final InputFormat inFormat = getInputFormat(inputType); if (inputFiles.length > 1) { if (!(inFormat instanceof MultiInputFormat)) { throw new IllegalArgumentException(); } } final VirtualFile inputFile = inputFiles[0]; final SchemaType type = settings.getOutputType(); final String outputType = StringUtil.toLowerCase(type.toString()); final ArrayList<String> inputParams = new ArrayList<>(); if (inputType != SchemaType.DTD) { final Charset charset = inputFile.getCharset(); inputParams.add("encoding=" + charset.name()); } final ArrayList<String> outputParams = new ArrayList<>(); settings.addAdvancedSettings(inputParams, outputParams); // System.out.println("INPUT: " + inputParams); // System.out.println("OUTPUT: " + outputParams); final SchemaCollection sc; final String input = inputFile.getPath(); final String uri = UriOrFile.toUri(input); try { if (inFormat instanceof MultiInputFormat) { final MultiInputFormat format = (MultiInputFormat)inFormat; final String[] uris = new String[inputFiles.length]; for (int i = 0; i < inputFiles.length; i++) { uris[i] = UriOrFile.toUri(inputFiles[i].getPath()); } sc = format.load(uris, ArrayUtilRt.toStringArray(inputParams), outputType, errorHandler); } else { sc = inFormat.load(uri, ArrayUtilRt.toStringArray(inputParams), outputType, errorHandler); } } catch (IOException e) { errorHandler.fatalError(new SAXParseException(e.getMessage(), null, uri, -1, -1, e)); return; } final File destination = new File(settings.getOutputDestination()); final File outputFile; if (destination.isDirectory()) { final String name = new File(input).getName(); final int ext = name.lastIndexOf('.'); outputFile = new File(destination, (ext > 0 ? name.substring(0, ext) : name) + "." + outputType); } else { outputFile = destination; } try { final int indent = settings.getIndent(); final int length = settings.getLineLength(); final OutputDirectory od = new LocalOutputDirectory(sc.getMainUri(), outputFile, "." + outputType, settings.getOutputEncoding(), length > 0 ? length : DEFAULT_LINE_LENGTH, indent > 0 ? indent : DEFAULT_INDENT) { @Override public Stream open(String sourceUri, String encoding) throws IOException { final String s = reference(null, sourceUri); final File file = new File(outputFile.getParentFile(), s); if (file.exists()) { final String msg = RelaxngBundle.message("relaxng.convert-schema.dialog.file-exists.message", file.getAbsolutePath()); final int choice = Messages.showYesNoDialog(myProject, msg, RelaxngBundle.message( "relaxng.convert-schema.dialog.file-exists.title"), Messages.getWarningIcon()); if (choice == Messages.YES) { return super.open(sourceUri, encoding); } else if (choice == 1) { throw new CanceledException(); } } return super.open(sourceUri, encoding); } }; final OutputFormat of = getOutputFormat(settings.getOutputType()); of.output(sc, od, ArrayUtilRt.toStringArray(outputParams), StringUtil.toLowerCase(inputType.toString()), errorHandler); } catch (IOException e) { errorHandler.fatalError(new SAXParseException(e.getMessage(), null, UriOrFile.fileToUri(outputFile), -1, -1, e)); } } catch (CanceledException e) { // user abort } catch (SAXParseException e) { errorHandler.error(e); } catch (MalformedURLException e) { Logger.getInstance(getClass().getName()).error(e); } catch (InputFailedException e) { // handled by ErrorHandler } catch (InvalidParamsException e) { // handled by ErrorHandler } catch (OutputFailedException e) { // handled by ErrorHandler } catch (SAXException e) { // cannot happen or is already handled } } private static OutputFormat getOutputFormat(SchemaType outputType) { switch (outputType) { case DTD: return new DtdOutputFormat(); case RNC: return new RncOutputFormat(); case RNG: return new RngOutputFormat(); case XSD: return new XsdOutputFormat(); default: assert false : "Unsupported output type: " + outputType; return null; } } private static InputFormat getInputFormat(SchemaType type) { switch (type) { case DTD: return new DtdInputFormat(); case RNC: return new CompactParseInputFormat(); case RNG: return new SAXParseInputFormat(); case XML: return new XmlInputFormat(); default: assert false : "Unsupported input type: " + type; return null; } } private static class CanceledException extends RuntimeException { } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.cmmn.test.logging; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.util.HashMap; import java.util.Map; import org.flowable.cmmn.api.repository.CaseDefinition; import org.flowable.cmmn.api.runtime.CaseInstance; import org.flowable.cmmn.api.runtime.PlanItemInstance; import org.flowable.cmmn.engine.CmmnEngineConfiguration; import org.flowable.cmmn.engine.test.CmmnDeployment; import org.flowable.cmmn.engine.test.impl.CmmnJobTestHelper; import org.flowable.cmmn.test.impl.CustomCmmnConfigurationFlowableTestCase; import org.flowable.common.engine.api.FlowableException; import org.flowable.common.engine.api.scope.ScopeTypes; import org.flowable.common.engine.impl.logging.CmmnLoggingSessionConstants; import org.flowable.common.engine.impl.logging.LoggingSessionConstants; import org.flowable.common.engine.impl.logging.LoggingSessionUtil; import org.flowable.job.api.Job; import org.flowable.task.api.Task; import org.junit.Test; import com.fasterxml.jackson.databind.node.ObjectNode; public class ServiceTaskLoggingTest extends CustomCmmnConfigurationFlowableTestCase { protected Task task; @Override protected String getEngineName() { return "cmmnEngineWithServiceTaskLogging"; } @Override protected void configureConfiguration(CmmnEngineConfiguration cmmnEngineConfiguration) { cmmnEngineConfiguration.setLoggingListener(new FlowableLoggingListener()); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/logging/oneHumanTaskCase.cmmn") public void testBasicLogging() { FlowableLoggingListener.clear(); CaseDefinition caseDefinition = cmmnRepositoryService.createCaseDefinitionQuery().caseDefinitionKey("oneHumanTaskCase").latestVersion().singleResult(); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneHumanTaskCase").start(); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(6); int loggingItemCounter = 0; int loggingNumberCounter = 1; ObjectNode loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_STARTED); assertThat(loggingNode.get("message").asText()).isEqualTo("Started case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.has("elementId")).isFalse(); assertThat(loggingNode.has("elementName")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_CREATED); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance created with type humantask, new state available"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("available"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_HUMAN_TASK_CREATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Human task 'The Task' created"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("taskId").asText()).isEqualTo(task.getId()); assertThat(loggingNode.get("taskName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_HUMAN_TASK_SET_ASSIGNEE); assertThat(loggingNode.get("message").asText()).isEqualTo("Set task assignee value to johnDoe"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("taskId").asText()).isEqualTo(task.getId()); assertThat(loggingNode.get("taskName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance state change with type humantask, old state available, new state active"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("active"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/logging/oneServiceTaskCase.cmmn") public void testServiceTaskLogging() { FlowableLoggingListener.clear(); CaseDefinition caseDefinition = cmmnRepositoryService.createCaseDefinitionQuery().caseDefinitionKey("oneServiceTaskCase").latestVersion().singleResult(); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneServiceTaskCase").start(); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(9); int loggingItemCounter = 0; int loggingNumberCounter = 1; ObjectNode loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_STARTED); assertThat(loggingNode.get("message").asText()).isEqualTo("Started case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.has("elementId")).isFalse(); assertThat(loggingNode.has("elementName")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_CREATED); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance created with type servicetask, new state available"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("available"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_ENTER); assertThat(loggingNode.get("message").asText()).isEqualTo("Executing service task with java class org.flowable.cmmn.test.delegate.TestJavaDelegate"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_VARIABLE_CREATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Variable 'javaDelegate' created"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.has("subScopeId")).isFalse(); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("variableName").asText()).isEqualTo("javaDelegate"); assertThat(loggingNode.get("variableType").asText()).isEqualTo("string"); assertThat(loggingNode.get("variableValue").asText()).isEqualTo("executed"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get("variableRawValue").asText()).isEqualTo("executed"); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_EXIT); assertThat(loggingNode.get("message").asText()).isEqualTo("Executed service task with java class org.flowable.cmmn.test.delegate.TestJavaDelegate"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()) .isEqualTo("Plan item instance state change with type servicetask, old state available, new state active"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("active"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()) .isEqualTo("Plan item instance state change with type servicetask, old state active, new state completed"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("completed"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("active"); assertThat(loggingNode.get("newState").asText()).isEqualTo("completed"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_COMPLETED); assertThat(loggingNode.get("message").asText()).isEqualTo("Completed case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.has("elementId")).isFalse(); assertThat(loggingNode.has("elementName")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); } @Test @CmmnDeployment(resources="org/flowable/cmmn/test/logging/oneHumanTaskCase.cmmn") public void testCompleteTaskLogging() { FlowableLoggingListener.clear(); CaseDefinition caseDefinition = cmmnRepositoryService.createCaseDefinitionQuery().caseDefinitionKey("oneHumanTaskCase").latestVersion().singleResult(); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .variable("newVariable", "test") .caseDefinitionKey("oneHumanTaskCase") .start(); PlanItemInstance planItemInstance = cmmnRuntimeService.createPlanItemInstanceQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(7); FlowableLoggingListener.clear(); Map<String, Object> variableMap = new HashMap<>(); variableMap.put("newVariable", "newValue"); variableMap.put("numVar", 123); cmmnRuntimeService.setVariables(caseInstance.getId(), variableMap); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); cmmnTaskService.complete(task.getId()); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(7); ObjectNode loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(0); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_VARIABLE_UPDATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Variable 'newVariable' updated"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.has("subScopeId")).isFalse(); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("variableName").asText()).isEqualTo("newVariable"); assertThat(loggingNode.get("variableType").asText()).isEqualTo("string"); assertThat(loggingNode.get("variableValue").asText()).isEqualTo("newValue"); assertThat(loggingNode.get("variableRawValue").asText()).isEqualTo("newValue"); assertThat(loggingNode.get("oldVariableType").asText()).isEqualTo("string"); assertThat(loggingNode.get("oldVariableValue").asText()).isEqualTo("test"); assertThat(loggingNode.get("oldVariableRawValue").asText()).isEqualTo("test"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(1); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(1); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_VARIABLE_CREATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Variable 'numVar' created"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.has("subScopeId")).isFalse(); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("variableName").asText()).isEqualTo("numVar"); assertThat(loggingNode.get("variableType").asText()).isEqualTo("integer"); assertThat(loggingNode.get("variableValue").asInt()).isEqualTo(123); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(2); assertThat(loggingNode.get("variableRawValue").asText()).isEqualTo("123"); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(2); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(3); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(3); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_HUMAN_TASK_COMPLETE); assertThat(loggingNode.get("message").asText()).isEqualTo("Human task 'The Task' completed"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("taskId").asText()).isNotNull(); assertThat(loggingNode.get("taskName").asText()).isEqualTo("The Task"); assertThat(loggingNode.has("taskCategory")).isFalse(); assertThat(loggingNode.has("taskDescription")).isFalse(); assertThat(loggingNode.get("taskFormKey").asText()).isEqualTo("someKey"); assertThat(loggingNode.get("taskPriority").asInt()).isEqualTo(50); assertThat(loggingNode.has("taskDueDate")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(1); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(4); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance state change with type humantask, old state active, new state completed"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("completed"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("active"); assertThat(loggingNode.get("newState").asText()).isEqualTo("completed"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(2); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(5); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_COMPLETED); assertThat(loggingNode.get("message").asText()).isEqualTo("Completed case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(3); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(6); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(4); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); FlowableLoggingListener.clear(); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/logging/sentryConditionCase.cmmn") public void testSentryConditionLogging() { FlowableLoggingListener.clear(); CaseDefinition caseDefinition = cmmnRepositoryService.createCaseDefinitionQuery().caseDefinitionKey("conditionCase").latestVersion().singleResult(); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("conditionCase").start(); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); PlanItemInstance planItemInstance = cmmnRuntimeService.createPlanItemInstanceQuery().caseInstanceId(caseInstance.getId()) .planItemDefinitionId(task.getTaskDefinitionKey()) .singleResult(); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(6); int loggingItemCounter = 0; int loggingNumberCounter = 1; ObjectNode loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_STARTED); assertThat(loggingNode.get("message").asText()).isEqualTo("Started case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.has("elementId")).isFalse(); assertThat(loggingNode.has("elementName")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_CREATED); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance created with type humantask, new state available"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("taskA"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("available"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_CREATED); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance created with type stage, new state available"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("stage1"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Stage 1"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("Stage"); assertThat(loggingNode.get("state").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("available"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_HUMAN_TASK_CREATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Human task 'Task A' created"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("taskId").asText()).isEqualTo(task.getId()); assertThat(loggingNode.get("taskName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get("elementId").asText()).isEqualTo("taskA"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance state change with type humantask, old state available, new state active"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("taskA"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("active"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); FlowableLoggingListener.clear(); assertThatThrownBy(() -> cmmnTaskService.complete(task.getId())) .isInstanceOf(FlowableException.class); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(5); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(0); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_HUMAN_TASK_COMPLETE); assertThat(loggingNode.get("message").asText()).isEqualTo("Human task 'Task A' completed"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("taskA"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get("taskId").asText()).isNotNull(); assertThat(loggingNode.get("taskName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(1); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(1); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance state change with type humantask, old state active, new state completed"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("taskA"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Task A"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("HumanTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("completed"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("active"); assertThat(loggingNode.get("newState").asText()).isEqualTo("completed"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(2); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(2); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_EVALUATE_SENTRY); assertThat(loggingNode.get("message").asText()).isEqualTo("Evaluate sentry parts for Stage 1"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("stage1"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Stage 1"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("Stage"); assertThat(loggingNode.get("onParts")).hasSize(1); assertThat(loggingNode.get("onParts").get(0).get("id").asText()).isEqualTo("sentryOnPart1"); assertThat(loggingNode.get("onParts").get(0).get("source").asText()).isEqualTo("planItem1"); assertThat(loggingNode.get("onParts").get(0).get("elementId").asText()).isEqualTo("taskA"); assertThat(loggingNode.get("onParts").get(0).get("standardEvent").asText()).isEqualTo("complete"); assertThat(loggingNode.get("ifPart").get("condition").asText()).isEqualTo("${gotoStage1}"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(3); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(3); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_EVALUATE_SENTRY_FAILED); assertThat(loggingNode.get("message").asText()).isEqualTo("IfPart evaluation failed for Stage 1"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("stage1"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("Stage 1"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("Stage"); assertThat(loggingNode.get("exception").get("message").asText()).isEqualTo("Unknown property used in expression: ${gotoStage1}"); assertThat(loggingNode.get("exception").get("stackTrace").asText()).isNotNull(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(4); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(4); assertThat(loggingNode.get("type").asText()).isEqualTo(LoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE_FAILURE); assertThat(loggingNode.get("message").asText()).isEqualTo("Exception at closing command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(5); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); FlowableLoggingListener.clear(); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/logging/oneAsyncServiceTaskCase.cmmn") public void testAsyncServiceTaskLogging() { FlowableLoggingListener.clear(); CaseDefinition caseDefinition = cmmnRepositoryService.createCaseDefinitionQuery().caseDefinitionKey("oneServiceTaskCase").latestVersion().singleResult(); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder().caseDefinitionKey("oneServiceTaskCase").start(); PlanItemInstance planItemInstance = cmmnRuntimeService.createPlanItemInstanceQuery().caseInstanceId(caseInstance.getId()).planItemDefinitionId("theTask").singleResult(); Job job = cmmnManagementService.createJobQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(5); int loggingItemCounter = 0; int loggingNumberCounter = 1; ObjectNode loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_STARTED); assertThat(loggingNode.get("message").asText()).isEqualTo("Started case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.has("elementId")).isFalse(); assertThat(loggingNode.has("elementName")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_CREATED); assertThat(loggingNode.get("message").asText()).isEqualTo("Plan item instance created with type servicetask, new state available"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("available"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_ASYNC_JOB); assertThat(loggingNode.get("message").asText()).isEqualTo("Created async job for theTask, with job id " + job.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("jobId").asText()).isEqualTo(job.getId()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()) .isEqualTo("Plan item instance state change with type servicetask, old state available, new state async-active"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("async-active"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("available"); assertThat(loggingNode.get("newState").asText()).isEqualTo("async-active"); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(loggingItemCounter++); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(loggingNumberCounter++); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); FlowableLoggingListener.clear(); CmmnJobTestHelper.waitForJobExecutorToProcessAllJobs(cmmnEngineConfiguration, 5000, 200, true); assertThat(FlowableLoggingListener.TEST_LOGGING_NODES).hasSize(11); loggingNode = FlowableLoggingListener.TEST_LOGGING_NODES.get(0); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_LOCK_JOB); assertThat(loggingNode.get("message").asText()).isEqualTo("Locking job for theTask, with job id " + job.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("elementSubType").asText()).isEqualTo("org.flowable.cmmn.test.delegate.TestJavaDelegate"); assertThat(loggingNode.get("jobId").asText()).isEqualTo(job.getId()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt()).isEqualTo(1); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); Map<String, ObjectNode> loggingMap = new HashMap<>(); int planItemNewStateCounter = 1; int commandContextCounter = 1; for (ObjectNode logObjectNode : FlowableLoggingListener.TEST_LOGGING_NODES) { String logType = logObjectNode.get("type").asText(); if (CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE.equals(logType)) { logType = planItemNewStateCounter + logType; planItemNewStateCounter++; } else if (CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE.equals(logType)) { logType = commandContextCounter + logType; commandContextCounter++; } loggingMap.put(logType, logObjectNode); } loggingNode = loggingMap.get(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_EXECUTE_ASYNC_JOB); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_EXECUTE_ASYNC_JOB); assertThat(loggingNode.get("message").asText()).isEqualTo("Executing async job for theTask, with job id " + job.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("jobId").asText()).isEqualTo(job.getId()); int beforeJobNumber = loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt(); assertThat(beforeJobNumber).isGreaterThan(0); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = loggingMap.get(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_ENTER); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_ENTER); assertThat(loggingNode.get("message").asText()).isEqualTo("Executing service task with java class org.flowable.cmmn.test.delegate.TestJavaDelegate"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); int newJobNumber = loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt(); assertThat(newJobNumber).isGreaterThan(beforeJobNumber); beforeJobNumber = newJobNumber; assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = loggingMap.get(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_EXIT); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_EXIT); assertThat(loggingNode.get("message").asText()).isEqualTo("Executed service task with java class org.flowable.cmmn.test.delegate.TestJavaDelegate"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); newJobNumber = loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt(); assertThat(newJobNumber).isGreaterThan(beforeJobNumber); beforeJobNumber = newJobNumber; assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = loggingMap.get(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_UNLOCK_JOB); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_SERVICE_TASK_UNLOCK_JOB); assertThat(loggingNode.get("message").asText()).isEqualTo("Unlocking job for theTask, with job id " + job.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("jobId").asText()).isEqualTo(job.getId()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER)).isNotNull(); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = loggingMap.get("1" + CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()) .isEqualTo("Plan item instance state change with type servicetask, old state async-active, new state active"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("active"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("async-active"); assertThat(loggingNode.get("newState").asText()).isEqualTo("active"); newJobNumber = loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt(); assertThat(newJobNumber).isGreaterThan(beforeJobNumber); beforeJobNumber = newJobNumber; assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = loggingMap.get("2" + CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_PLAN_ITEM_NEW_STATE); assertThat(loggingNode.get("message").asText()) .isEqualTo("Plan item instance state change with type servicetask, old state active, new state completed"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("subScopeId").asText()).isEqualTo(planItemInstance.getId()); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get("elementId").asText()).isEqualTo("theTask"); assertThat(loggingNode.get("elementName").asText()).isEqualTo("The Task"); assertThat(loggingNode.get("elementType").asText()).isEqualTo("ServiceTask"); assertThat(loggingNode.get("state").asText()).isEqualTo("completed"); assertThat(loggingNode.get("oldState").asText()).isEqualTo("active"); assertThat(loggingNode.get("newState").asText()).isEqualTo("completed"); newJobNumber = loggingNode.get(LoggingSessionUtil.LOG_NUMBER).asInt(); assertThat(newJobNumber).isGreaterThan(beforeJobNumber); beforeJobNumber = newJobNumber; assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).asText()).isNotNull(); loggingNode = loggingMap.get(CmmnLoggingSessionConstants.TYPE_CASE_COMPLETED); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_CASE_COMPLETED); assertThat(loggingNode.get("message").asText()).isEqualTo("Completed case instance with id " + caseInstance.getId()); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.has("elementId")).isFalse(); assertThat(loggingNode.has("elementName")).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).isNull()).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).isNull()).isFalse(); loggingNode = loggingMap.get("1" + CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).isNull()).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).isNull()).isFalse(); loggingNode = loggingMap.get("2" + CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("type").asText()).isEqualTo(CmmnLoggingSessionConstants.TYPE_COMMAND_CONTEXT_CLOSE); assertThat(loggingNode.get("message").asText()).isEqualTo("Closed command context for cmmn engine"); assertThat(loggingNode.get("engineType").asText()).isEqualTo("cmmn"); assertThat(loggingNode.get("scopeId").asText()).isEqualTo(caseInstance.getId()); assertThat(loggingNode.get("scopeType").asText()).isEqualTo(ScopeTypes.CMMN); assertThat(loggingNode.get("scopeDefinitionId").asText()).isEqualTo(caseDefinition.getId()); assertThat(loggingNode.get("scopeDefinitionKey").asText()).isEqualTo(caseDefinition.getKey()); assertThat(loggingNode.get("scopeDefinitionName").asText()).isEqualTo(caseDefinition.getName()); assertThat(loggingNode.get(LoggingSessionUtil.LOG_NUMBER).isNull()).isFalse(); assertThat(loggingNode.get(LoggingSessionUtil.TIMESTAMP).isNull()).isFalse(); } }
/*** * Copyright (c) 2010 Attentec AB, http://www.attentec.se * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.attentec; import java.io.ByteArrayInputStream; import java.text.ParseException; import java.util.Date; import android.app.Dialog; import android.content.SharedPreferences; import android.content.res.Resources; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.util.Log; import com.attentec.AttentecService.ServiceUpdateUIListener; import com.google.android.maps.GeoPoint; import com.google.android.maps.MapActivity; import com.google.android.maps.MapController; import com.google.android.maps.MapView; import com.google.android.maps.MyLocationOverlay; /** * Shows a map with contacts at their location, with * clickable markers for easy contact. * * @author David Granqvist * @author Malte Lenz * @author Johannes Nordkvist * */ public class CloseToYou extends MapActivity { /** * Tag used for logging. */ private static final String TAG = "Attentec"; /** Minimum distance to show on screen, so defines a limit on max zoom-in. */ private static final int MAX_ZOOM = 3000; /** Maximum distance to show on screen. */ private static final int MIN_ZOOM = 3000000; // Declares map and its controller. /** Instance of the map view. */ private MapView mapView; /** Instance of the map controller. */ private MapController mc; /** Database adapter. */ private DatabaseAdapter dbh; /** Overlay that should show my location. */ private MyLocationOverlay mlo; /** Database cursor for handling locations. */ private Cursor locationcursor; /** Used to show contact info dialog. */ private long dialogid; /** Used to show contact info dialog. */ private ContactDialogCreator cdc; /** Resources . */ private Resources res; /** Maximum latitude that should be showed on the map. */ private int maxLat = Integer.MIN_VALUE; /** Maximum longitude that should be showed on the map. */ private int maxLng = Integer.MIN_VALUE; /** Minimum latitude that should be showed on the map. */ private int minLat = Integer.MAX_VALUE; /** Minimum longitude that should be showed on the map. */ private int minLng = Integer.MAX_VALUE; /** Vertical padding for map bounds. */ private final Float vpadding = 0.1f; /** Bottom padding for map bounds. */ private final Float hpaddingBottom = 0.1f; /** Top padding for map bounds. */ private final Float hpaddingTop = 0.4f; /** Used for converting to GeoPoint integer. */ private static final int ONE_MILLION = 1000000; /** Minimum length for contact info (email/phone). */ private static final int MINIMUM_ACCEPTED_CONTACT_INFO_LENGTH = 4; private ContactItemizedOverlay itemizedOverlay; /* Called when the activity is first created. */ @Override public final void onCreate(final Bundle savedInstanceState) { Log.d(TAG, "CloseToYou onCreate"); super.onCreate(savedInstanceState); setContentView(R.layout.closetoyou); res = getResources(); //Setup database connection dbh = new DatabaseAdapter(this); dbh.open(); locationcursor = dbh.getLocations(); locationcursor.moveToFirst(); startManagingCursor(locationcursor); //Setup the map mapView = (MapView) findViewById(R.id.mapView); //Create ContactItemizedOverlay for mapView itemizedOverlay = new ContactItemizedOverlay(new ColorDrawable(), mapView); mapView.setBuiltInZoomControls(true); mapView.displayZoomControls(true); mapView.setSatellite(true); mc = mapView.getController(); AttentecService.setCloseToYouUpdateListener(new ServiceUpdateUIListener() { public void updateUI() { //update the list of contacts updateLocations(); } public void endActivity() { //Close activity (go back to login screen) finish(); } }); cdc = new ContactDialogCreator(this, res, 0, dbh); } @Override protected final void onPause() { //disable own location, as it will keep the GPS alive mlo.disableMyLocation(); super.onPause(); } @Override protected final void onResume() { mlo = new MyLocationOverlay(this, mapView); mlo.enableMyLocation(); updateLocations(); maxLat += (int) ((maxLat - minLat) * hpaddingTop); minLat -= (int) ((maxLat - minLat) * hpaddingBottom); maxLng += (int) ((maxLng - minLng) * vpadding); minLng -= (int) ((maxLng - minLng) * vpadding); mc.zoomToSpan(Math.max(Math.abs(maxLat - minLat), MAX_ZOOM), Math.abs(maxLng - minLng)); mc.animateTo(new GeoPoint((maxLat + minLat) / 2, (maxLng + minLng) / 2)); super.onResume(); } /** * Paint all contacts on map. */ protected final void updateLocations() { this.runOnUiThread(new Runnable() { public void run() { if (locationcursor.isClosed()) { Log.w(TAG, "locationcursor was closed when we tried accessing"); return; } maxLat = Integer.MIN_VALUE; maxLng = Integer.MIN_VALUE; minLat = Integer.MAX_VALUE; minLng = Integer.MAX_VALUE; //add own location into the calculation for zoom SharedPreferences sp = getSharedPreferences("attentec_preferences", MODE_PRIVATE); int ownLat = (int) (sp.getFloat("latitude", DevelopmentSettings.DEFAULT_LATITUDE) * ONE_MILLION); int ownLng = (int) (sp.getFloat("longitude", DevelopmentSettings.DEFAULT_LONGITUDE) * ONE_MILLION); boolean haveOwnLocation = true; if (ownLat == DevelopmentSettings.DEFAULT_LATITUDE * ONE_MILLION && ownLng == DevelopmentSettings.DEFAULT_LONGITUDE * ONE_MILLION) { haveOwnLocation = false; } maxLat = Math.max(ownLat, maxLat); minLat = Math.min(ownLat, minLat); maxLng = Math.max(ownLng, maxLng); minLng = Math.min(ownLng, minLng); //Add own position mapView.getOverlays().clear(); mapView.getOverlays().add(mlo); mapView.invalidate(); locationcursor.requery(); if (locationcursor.getCount() <= 0) { Log.d(TAG, "Close to you, locationcursor is empty"); return; } locationcursor.moveToFirst(); //Observe that Views has to be removed since the balloons are views. mapView.removeAllViews(); itemizedOverlay.clear(); int lat, lng; do { lat = (int) (Double.valueOf(locationcursor.getString(locationcursor.getColumnIndex(DatabaseAdapter.KEY_LATITUDE))) * ONE_MILLION); lng = (int) (Double.valueOf(locationcursor.getString(locationcursor.getColumnIndex(DatabaseAdapter.KEY_LONGITUDE))) * ONE_MILLION); maxLat = Math.max(lat, maxLat); minLat = Math.min(lat, minLat); maxLng = Math.max(lng, maxLng); minLng = Math.min(lng, minLng); if (haveOwnLocation) { maxLat = Math.min(maxLat, ownLat + MIN_ZOOM); minLat = Math.max(minLat, ownLat - MIN_ZOOM); maxLng = Math.min(maxLng, ownLng + MIN_ZOOM); minLng = Math.max(minLng, ownLng - MIN_ZOOM); } String firstName = locationcursor.getString(locationcursor.getColumnIndex(DatabaseAdapter.KEY_FIRST_NAME)); String lastName = locationcursor.getString(locationcursor.getColumnIndex(DatabaseAdapter.KEY_LAST_NAME)); String name = firstName + " " + lastName.charAt(0) + "."; Long contactId = locationcursor.getLong(locationcursor.getColumnIndex(DatabaseAdapter.KEY_ROWID)); byte[] photo = locationcursor.getBlob(locationcursor.getColumnIndex(DatabaseAdapter.KEY_PHOTO)); Bitmap photoBitmap; if (photo != null) { ByteArrayInputStream imageStream = new ByteArrayInputStream(photo); photoBitmap = BitmapFactory.decodeStream(imageStream); } else { photoBitmap = null; } GeoPoint point = new GeoPoint(lat, lng); Drawable statusCircle; String connectedAt = locationcursor.getString(locationcursor.getColumnIndex(DatabaseAdapter.KEY_CONNECTED_AT)); Date d = new Date(0); //long time ago if (connectedAt != null) { try { d = DatabaseAdapter.DATE_FORMAT.parse(connectedAt); } catch (ParseException e) { Log.e(TAG, "Could not parse locationUpdatedAt: " + e); } } //Log.d(TAG, "Old:" + (d.getTime())); //Log.d(TAG, "New:" + (new Date().getTime())); //Log.d(TAG, "Difference" + (new Date().getTime() - d.getTime())); long offset = new Date().getTime() - d.getTime(); if (offset < ContactsActivity.TIME_INTERVAL_ONLINE_MILLISECONDS) { String statusString = locationcursor.getString(locationcursor.getColumnIndex(DatabaseAdapter.KEY_STATUS)); statusCircle = res.getDrawable(Status.buildStatusFromString(statusString).getStatusCircleResourceId()); } else { Log.d(TAG, "OFFLINE"); statusCircle = res.getDrawable(Status.buildStatusFromString(Status.STATUS_OFFLINE_STRING).getStatusCircleResourceId()); } ContactOverlayItem overlayItem = new ContactOverlayItem(point, name, "", contactId, photoBitmap, statusCircle); itemizedOverlay.addOverlay(overlayItem); } while(locationcursor.moveToNext()); mapView.invalidate(); } }); } /** * Show contact dialog for one contact. * @param id row for user in database */ protected final void showContact(final long id) { dialogid = id; int hasThings = 0; if (dbh.getContactPhone(dialogid).length() > MINIMUM_ACCEPTED_CONTACT_INFO_LENGTH) { hasThings += ContactDialogCreator.DIALOG_HAS_PHONE; } if (dbh.getContactEmail(dialogid).length() > MINIMUM_ACCEPTED_CONTACT_INFO_LENGTH) { hasThings += ContactDialogCreator.DIALOG_HAS_EMAIL; } if (dbh.isLocationFresh(dialogid)) { String lat = dbh.getContactLatitude(dialogid); if (lat != null && !lat.equals("null") && !lat.equals("")) { hasThings += ContactDialogCreator.DIALOG_HAS_LOCATION; } } showDialog(hasThings); } @Override protected final void onPrepareDialog(final int id, final Dialog dialog) { dialog.setTitle(res.getString(R.string.contact) + " " + dbh.getContactName(dialogid) + ":"); cdc.setDialogId(dialogid); } @Override protected final Dialog onCreateDialog(final int id) { cdc.setDialogType(id); return cdc.getDialog(); } @Override protected final void onDestroy() { cdc = null; AttentecService.setCloseToYouUpdateListener(null); dbh.close(); Log.d(TAG, "CloseToYou onDestroy"); super.onDestroy(); } @Override protected final boolean isRouteDisplayed() { return false; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; import java.util.concurrent.Semaphore; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure; import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; @Category({ MasterTests.class, SmallTests.class }) public class TestSchedulerQueueDeadLock { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSchedulerQueueDeadLock.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); private static final TableName TABLE_NAME = TableName.valueOf("deadlock"); private static final class TestEnv { private final MasterProcedureScheduler scheduler; public TestEnv(MasterProcedureScheduler scheduler) { this.scheduler = scheduler; } public MasterProcedureScheduler getScheduler() { return scheduler; } } public static class TableSharedProcedure extends NoopProcedure<TestEnv> implements TableProcedureInterface { private final Semaphore latch = new Semaphore(0); @Override protected Procedure<TestEnv>[] execute(TestEnv env) throws ProcedureYieldException, ProcedureSuspendedException, InterruptedException { latch.acquire(); return null; } @Override protected LockState acquireLock(TestEnv env) { if (env.getScheduler().waitTableSharedLock(this, getTableName())) { return LockState.LOCK_EVENT_WAIT; } return LockState.LOCK_ACQUIRED; } @Override protected void releaseLock(TestEnv env) { env.getScheduler().wakeTableSharedLock(this, getTableName()); } @Override protected boolean holdLock(TestEnv env) { return true; } @Override public TableName getTableName() { return TABLE_NAME; } @Override public TableOperationType getTableOperationType() { return TableOperationType.READ; } } public static class TableExclusiveProcedure extends NoopProcedure<TestEnv> implements TableProcedureInterface { private final Semaphore latch = new Semaphore(0); @Override protected Procedure<TestEnv>[] execute(TestEnv env) throws ProcedureYieldException, ProcedureSuspendedException, InterruptedException { latch.acquire(); return null; } @Override protected LockState acquireLock(TestEnv env) { if (env.getScheduler().waitTableExclusiveLock(this, getTableName())) { return LockState.LOCK_EVENT_WAIT; } return LockState.LOCK_ACQUIRED; } @Override protected void releaseLock(TestEnv env) { env.getScheduler().wakeTableExclusiveLock(this, getTableName()); } @Override protected boolean holdLock(TestEnv env) { return true; } @Override public TableName getTableName() { return TABLE_NAME; } @Override public TableOperationType getTableOperationType() { return TableOperationType.EDIT; } } @AfterClass public static void tearDownAfterClass() throws IOException { UTIL.cleanupTestDir(); } private WALProcedureStore procStore; private ProcedureExecutor<TestEnv> procExec; @Rule public final TestName name = new TestName(); @Before public void setUp() throws IOException { UTIL.getConfiguration().setInt("hbase.procedure.worker.stuck.threshold.msec", 6000000); procStore = ProcedureTestingUtility.createWalStore(UTIL.getConfiguration(), UTIL.getDataTestDir(name.getMethodName())); procStore.start(1); MasterProcedureScheduler scheduler = new MasterProcedureScheduler(pid -> null); procExec = new ProcedureExecutor<>(UTIL.getConfiguration(), new TestEnv(scheduler), procStore, scheduler); procExec.init(1, false); } @After public void tearDown() { procExec.stop(); procStore.stop(false); } public static final class TableSharedProcedureWithId extends TableSharedProcedure { @Override protected void setProcId(long procId) { // this is a hack to make this procedure be loaded after the procedure below as we will sort // the procedures by id when loading. super.setProcId(2L); } } public static final class TableExclusiveProcedureWithId extends TableExclusiveProcedure { @Override protected void setProcId(long procId) { // this is a hack to make this procedure be loaded before the procedure above as we will // sort the procedures by id when loading. super.setProcId(1L); } } @Test public void testTableProcedureDeadLockAfterRestarting() throws Exception { // let the shared procedure run first, but let it have a greater procId so when loading it will // be loaded at last. long procId1 = procExec.submitProcedure(new TableSharedProcedureWithId()); long procId2 = procExec.submitProcedure(new TableExclusiveProcedureWithId()); procExec.startWorkers(); UTIL.waitFor(10000, () -> ((TableSharedProcedure) procExec.getProcedure(procId1)).latch.hasQueuedThreads()); ProcedureTestingUtility.restart(procExec); ((TableSharedProcedure) procExec.getProcedure(procId1)).latch.release(); ((TableExclusiveProcedure) procExec.getProcedure(procId2)).latch.release(); UTIL.waitFor(10000, () -> procExec.isFinished(procId1)); UTIL.waitFor(10000, () -> procExec.isFinished(procId2)); } public static final class TableShardParentProcedure extends NoopProcedure<TestEnv> implements TableProcedureInterface { private boolean scheduled; @Override protected Procedure<TestEnv>[] execute(TestEnv env) throws ProcedureYieldException, ProcedureSuspendedException, InterruptedException { if (!scheduled) { scheduled = true; return new Procedure[] { new TableSharedProcedure() }; } return null; } @Override protected LockState acquireLock(TestEnv env) { if (env.getScheduler().waitTableSharedLock(this, getTableName())) { return LockState.LOCK_EVENT_WAIT; } return LockState.LOCK_ACQUIRED; } @Override protected void releaseLock(TestEnv env) { env.getScheduler().wakeTableSharedLock(this, getTableName()); } @Override protected boolean holdLock(TestEnv env) { return true; } @Override public TableName getTableName() { return TABLE_NAME; } @Override public TableOperationType getTableOperationType() { return TableOperationType.READ; } } @Test public void testTableProcedureSubProcedureDeadLock() throws Exception { // the shared procedure will also schedule a shared procedure, but after the exclusive procedure long procId1 = procExec.submitProcedure(new TableShardParentProcedure()); long procId2 = procExec.submitProcedure(new TableExclusiveProcedure()); procExec.startWorkers(); UTIL.waitFor(10000, () -> procExec.getProcedures().stream().anyMatch(p -> p instanceof TableSharedProcedure)); procExec.getProcedures().stream().filter(p -> p instanceof TableSharedProcedure) .map(p -> (TableSharedProcedure) p).forEach(p -> p.latch.release()); ((TableExclusiveProcedure) procExec.getProcedure(procId2)).latch.release(); UTIL.waitFor(10000, () -> procExec.isFinished(procId1)); UTIL.waitFor(10000, () -> procExec.isFinished(procId2)); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and package com.google.devtools.build.skyframe; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.events.ExtendedEventHandler.Postable; import java.util.Objects; import javax.annotation.Nullable; /** * Encapsulation of data stored by {@link NodeEntry} when the value has finished building. * * <p>This is intended only for use in alternative {@code MemoizingEvaluator} implementations. */ public abstract class ValueWithMetadata implements SkyValue { protected final SkyValue value; private static final NestedSet<TaggedEvents> NO_EVENTS = NestedSetBuilder.<TaggedEvents>emptySet(Order.STABLE_ORDER); private static final NestedSet<Postable> NO_POSTS = NestedSetBuilder.<Postable>emptySet(Order.STABLE_ORDER); private ValueWithMetadata(SkyValue value) { this.value = value; } /** * Builds a value entry value that has an error (and no value value). * * <p>This is intended only for use in alternative {@code MemoizingEvaluator} implementations. */ public static ValueWithMetadata error( ErrorInfo errorInfo, NestedSet<TaggedEvents> transitiveEvents, NestedSet<Postable> transitivePostables) { return (ValueWithMetadata) normal(null, errorInfo, transitiveEvents, transitivePostables); } /** * Builds a SkyValue that has a value, and possibly an error, and possibly events/postables. If it * has only a value, returns just the value in order to save memory. * * <p>This is public only for use in alternative {@code MemoizingEvaluator} implementations. */ public static SkyValue normal( @Nullable SkyValue value, @Nullable ErrorInfo errorInfo, NestedSet<TaggedEvents> transitiveEvents, NestedSet<Postable> transitivePostables) { Preconditions.checkState(value != null || errorInfo != null, "Value and error cannot both be null"); if (errorInfo == null) { return (transitiveEvents.isEmpty() && transitivePostables.isEmpty()) ? value : ValueWithEvents.createValueWithEvents(value, transitiveEvents, transitivePostables); } return new ErrorInfoValue(errorInfo, value, transitiveEvents, transitivePostables); } @Nullable SkyValue getValue() { return value; } @Nullable abstract ErrorInfo getErrorInfo(); public abstract NestedSet<TaggedEvents> getTransitiveEvents(); public abstract NestedSet<Postable> getTransitivePostables(); /** Implementation of {@link ValueWithMetadata} for the value case. */ @VisibleForTesting public static class ValueWithEvents extends ValueWithMetadata { private final NestedSet<TaggedEvents> transitiveEvents; private final NestedSet<Postable> transitivePostables; private ValueWithEvents( SkyValue value, NestedSet<TaggedEvents> transitiveEvents, NestedSet<Postable> transitivePostables) { super(Preconditions.checkNotNull(value)); this.transitiveEvents = Preconditions.checkNotNull(transitiveEvents); this.transitivePostables = Preconditions.checkNotNull(transitivePostables); } private static ValueWithEvents createValueWithEvents( SkyValue value, NestedSet<TaggedEvents> transitiveEvents, NestedSet<Postable> transitivePostables) { if (value instanceof NotComparableSkyValue) { return new NotComparableValueWithEvents(value, transitiveEvents, transitivePostables); } else { return new ValueWithEvents(value, transitiveEvents, transitivePostables); } } @Nullable @Override ErrorInfo getErrorInfo() { return null; } @Override public NestedSet<TaggedEvents> getTransitiveEvents() { return transitiveEvents; } @Override public NestedSet<Postable> getTransitivePostables() { return transitivePostables; } /** * We override equals so that if the same value is written to a {@link NodeEntry} twice, it can * verify that the two values are equal, and avoid incrementing its version. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ValueWithEvents that = (ValueWithEvents) o; // Shallow equals is a middle ground between using default equals, which might miss // nested sets with the same elements, and deep equality checking, which would be expensive. // All three choices are sound, since shallow equals and default equals are more // conservative than deep equals. Using shallow equals means that we may unnecessarily // consider some values unequal that are actually equal, but this is still a net win over // deep equals. return value.equals(that.value) && transitiveEvents.shallowEquals(that.transitiveEvents) && transitivePostables.shallowEquals(that.transitivePostables); } @Override public int hashCode() { return 31 * value.hashCode() + transitiveEvents.shallowHashCode() + 3 * transitivePostables.shallowHashCode(); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("value", value) .add("transitiveEvents size", Iterables.size(transitiveEvents)) .add("transitivePostables size", Iterables.size(transitivePostables)) .toString(); } } private static final class NotComparableValueWithEvents extends ValueWithEvents implements NotComparableSkyValue { private NotComparableValueWithEvents( SkyValue value, NestedSet<TaggedEvents> transitiveEvents, NestedSet<Postable> transitivePostables) { super(value, transitiveEvents, transitivePostables); } } /** * Implementation of {@link ValueWithMetadata} for the error case. * * <p>Mark NotComparableSkyValue because it's unlikely that re-evaluation gives the same error. */ private static final class ErrorInfoValue extends ValueWithMetadata implements NotComparableSkyValue { private final ErrorInfo errorInfo; private final NestedSet<TaggedEvents> transitiveEvents; private final NestedSet<Postable> transitivePostables; public ErrorInfoValue( ErrorInfo errorInfo, @Nullable SkyValue value, NestedSet<TaggedEvents> transitiveEvents, NestedSet<Postable> transitivePostables) { super(value); this.errorInfo = Preconditions.checkNotNull(errorInfo); this.transitiveEvents = Preconditions.checkNotNull(transitiveEvents); this.transitivePostables = Preconditions.checkNotNull(transitivePostables); } @Nullable @Override ErrorInfo getErrorInfo() { return errorInfo; } @Override public NestedSet<TaggedEvents> getTransitiveEvents() { return transitiveEvents; } @Override public NestedSet<Postable> getTransitivePostables() { return transitivePostables; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ErrorInfoValue that = (ErrorInfoValue) o; // Shallow equals is a middle ground between using default equals, which might miss // nested sets with the same elements, and deep equality checking, which would be expensive. // All three choices are sound, since shallow equals and default equals are more // conservative than deep equals. Using shallow equals means that we may unnecessarily // consider some values unequal that are actually equal, but this is still a net win over // deep equals. return Objects.equals(this.value, that.value) && Objects.equals(this.errorInfo, that.errorInfo) && transitiveEvents.shallowEquals(that.transitiveEvents) && transitivePostables.shallowEquals(that.transitivePostables); } @Override public int hashCode() { return 31 * Objects.hash(value, errorInfo) + transitiveEvents.shallowHashCode() + 3 * transitivePostables.shallowHashCode(); } @Override public String toString() { StringBuilder result = new StringBuilder(); if (value != null) { result.append("Value: ").append(value); } if (errorInfo != null) { if (result.length() > 0) { result.append("; "); } result.append("Error: ").append(errorInfo); } return result.toString(); } } public static SkyValue justValue(SkyValue value) { if (value instanceof ValueWithMetadata) { return ((ValueWithMetadata) value).getValue(); } return value; } public static ValueWithMetadata wrapWithMetadata(SkyValue value) { if (value instanceof ValueWithMetadata) { return (ValueWithMetadata) value; } return ValueWithEvents.createValueWithEvents(value, NO_EVENTS, NO_POSTS); } @Nullable public static ErrorInfo getMaybeErrorInfo(SkyValue value) { if (value.getClass() == ErrorInfoValue.class) { return ((ValueWithMetadata) value).getErrorInfo(); } return null; } @VisibleForTesting public static NestedSet<TaggedEvents> getEvents(SkyValue value) { if (value instanceof ValueWithMetadata) { return ((ValueWithMetadata) value).getTransitiveEvents(); } return NestedSetBuilder.emptySet(Order.STABLE_ORDER); } static NestedSet<Postable> getPosts(SkyValue value) { if (value instanceof ValueWithMetadata) { return ((ValueWithMetadata) value).getTransitivePostables(); } return NestedSetBuilder.emptySet(Order.STABLE_ORDER); } }
/* * #%L * BroadleafCommerce Open Admin Platform * %% * Copyright (C) 2009 - 2013 Broadleaf Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.broadleafcommerce.openadmin.server.service.persistence.module; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.reflect.MethodUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.broadleafcommerce.common.admin.domain.AdminMainEntity; import org.broadleafcommerce.common.exception.ExceptionHelper; import org.broadleafcommerce.common.exception.SecurityServiceException; import org.broadleafcommerce.common.exception.ServiceException; import org.broadleafcommerce.common.money.Money; import org.broadleafcommerce.common.presentation.client.OperationType; import org.broadleafcommerce.common.presentation.client.PersistencePerspectiveItemType; import org.broadleafcommerce.common.presentation.client.SupportedFieldType; import org.broadleafcommerce.common.presentation.client.VisibilityEnum; import org.broadleafcommerce.common.util.FormatUtil; import org.broadleafcommerce.common.util.dao.TQJoin; import org.broadleafcommerce.common.util.dao.TQOrder; import org.broadleafcommerce.common.util.dao.TQRestriction; import org.broadleafcommerce.common.util.dao.TypedQueryBuilder; import org.broadleafcommerce.common.web.BroadleafRequestContext; import org.broadleafcommerce.openadmin.dto.BasicFieldMetadata; import org.broadleafcommerce.openadmin.dto.CriteriaTransferObject; import org.broadleafcommerce.openadmin.dto.DynamicResultSet; import org.broadleafcommerce.openadmin.dto.Entity; import org.broadleafcommerce.openadmin.dto.EntityResult; import org.broadleafcommerce.openadmin.dto.FieldMetadata; import org.broadleafcommerce.openadmin.dto.ForeignKey; import org.broadleafcommerce.openadmin.dto.MergedPropertyType; import org.broadleafcommerce.openadmin.dto.PersistencePackage; import org.broadleafcommerce.openadmin.dto.PersistencePerspective; import org.broadleafcommerce.openadmin.dto.Property; import org.broadleafcommerce.openadmin.dto.SortDirection; import org.broadleafcommerce.openadmin.server.dao.provider.metadata.AdvancedCollectionFieldMetadataProvider; import org.broadleafcommerce.openadmin.server.service.ValidationException; import org.broadleafcommerce.openadmin.server.service.persistence.PersistenceException; import org.broadleafcommerce.openadmin.server.service.persistence.PersistenceManager; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.CriteriaConversionException; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.CriteriaTranslator; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.FieldPath; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.FilterMapping; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.RestrictionFactory; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.converter.FilterValueConverter; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.predicate.EqPredicateProvider; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.predicate.LikePredicateProvider; import org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.predicate.PredicateProvider; import org.broadleafcommerce.openadmin.server.service.persistence.module.provider.FieldPersistenceProvider; import org.broadleafcommerce.openadmin.server.service.persistence.module.provider.request.AddFilterPropertiesRequest; import org.broadleafcommerce.openadmin.server.service.persistence.module.provider.request.AddSearchMappingRequest; import org.broadleafcommerce.openadmin.server.service.persistence.module.provider.request.ExtractValueRequest; import org.broadleafcommerce.openadmin.server.service.persistence.module.provider.request.PopulateValueRequest; import org.broadleafcommerce.openadmin.server.service.persistence.validation.EntityValidatorService; import org.broadleafcommerce.openadmin.server.service.persistence.validation.PopulateValueRequestValidator; import org.broadleafcommerce.openadmin.server.service.persistence.validation.PropertyValidationResult; import org.broadleafcommerce.openadmin.server.service.type.FieldProviderResponse; import org.hibernate.FlushMode; import org.hibernate.Session; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.springframework.util.Assert; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.math.BigDecimal; import java.sql.Timestamp; import java.text.DecimalFormat; import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.StringTokenizer; import javax.annotation.Resource; /** * @author jfischer */ @Component("blBasicPersistenceModule") @Scope("prototype") public class BasicPersistenceModule implements PersistenceModule, RecordHelper, ApplicationContextAware { private static final Log LOG = LogFactory.getLog(BasicPersistenceModule.class); public static final String MAIN_ENTITY_NAME_PROPERTY = "MAIN_ENTITY_NAME"; public static final String ALTERNATE_ID_PROPERTY = "ALTERNATE_ID"; protected ApplicationContext applicationContext; protected PersistenceManager persistenceManager; @Resource(name = "blEntityValidatorService") protected EntityValidatorService entityValidatorService; @Resource(name="blPersistenceProviders") protected List<FieldPersistenceProvider> fieldPersistenceProviders = new ArrayList<FieldPersistenceProvider>(); @Resource(name="blPopulateValueRequestValidators") protected List<PopulateValueRequestValidator> populateValidators; @Resource(name= "blDefaultFieldPersistenceProvider") protected FieldPersistenceProvider defaultFieldPersistenceProvider; @Resource(name="blCriteriaTranslator") protected CriteriaTranslator criteriaTranslator; @Resource(name="blRestrictionFactory") protected RestrictionFactory restrictionFactory; @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } @Override public boolean isCompatible(OperationType operationType) { return OperationType.BASIC == operationType || OperationType.NONDESTRUCTIVEREMOVE == operationType; } @Override public FieldManager getFieldManager() { return persistenceManager.getDynamicEntityDao().getFieldManager(); } @Override public DecimalFormat getDecimalFormatter() { BroadleafRequestContext brc = BroadleafRequestContext.getBroadleafRequestContext(); Locale locale = brc.getJavaLocale(); DecimalFormat format = (DecimalFormat) NumberFormat.getInstance(locale); format.applyPattern("0.########"); format.setGroupingUsed(false); return format; } @Override public SimpleDateFormat getSimpleDateFormatter() { return FormatUtil.getDateFormat(); } protected Map<String, FieldMetadata> filterOutCollectionMetadata(Map<String, FieldMetadata> metadata) { if (metadata == null) { return null; } Map<String, FieldMetadata> newMap = new HashMap<String, FieldMetadata>(); for (Map.Entry<String, FieldMetadata> entry : metadata.entrySet()) { String fieldName = entry.getKey(); FieldMetadata md = entry.getValue(); // Detect instances where the actual metadata for the field is some sort of CollectionMetadata but also corresponds // to a ForeignKey and ensure that gets included in the filtered map. That way the {@link BasicPersistenceModule} // can appropriate handle filtration and population if (entry.getValue() instanceof BasicFieldMetadata) { newMap.put(fieldName, md); } else if (md.getAdditionalMetadata().containsKey(AdvancedCollectionFieldMetadataProvider.FOREIGN_KEY_ADDITIONAL_METADATA_KEY)) { newMap.put(fieldName, (BasicFieldMetadata) md.getAdditionalMetadata().get(AdvancedCollectionFieldMetadataProvider.FOREIGN_KEY_ADDITIONAL_METADATA_KEY)); } } return newMap; } protected Class<?> getBasicBroadleafType(SupportedFieldType fieldType) { Class<?> response; switch (fieldType) { case BOOLEAN: response = Boolean.TYPE; break; case DATE: response = Date.class; break; case DECIMAL: response = BigDecimal.class; break; case MONEY: response = Money.class; break; case INTEGER: response = Integer.TYPE; break; case UNKNOWN: response = null; break; default: response = String.class; break; } return response; } @Override public Serializable createPopulatedInstance(Serializable instance, Entity entity, Map<String, FieldMetadata> unfilteredProperties, Boolean setId) throws ValidationException { return createPopulatedInstance(instance, entity, unfilteredProperties, setId, true); } @Override public Serializable createPopulatedInstance(Serializable instance, Entity entity, Map<String, FieldMetadata> unfilteredProperties, Boolean setId, Boolean validateUnsubmittedProperties) throws ValidationException { Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(unfilteredProperties); FieldManager fieldManager = getFieldManager(); boolean handled = false; for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.filterProperties(new AddFilterPropertiesRequest(entity), unfilteredProperties); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.filterProperties(new AddFilterPropertiesRequest(entity), unfilteredProperties); } Session session = getPersistenceManager().getDynamicEntityDao().getStandardEntityManager().unwrap(Session.class); FlushMode originalFlushMode = session.getFlushMode(); try { session.setFlushMode(FlushMode.MANUAL); for (Property property : entity.getProperties()) { BasicFieldMetadata metadata = (BasicFieldMetadata) mergedProperties.get(property.getName()); Class<?> returnType; if (!property.getName().contains(FieldManager.MAPFIELDSEPARATOR) && !property.getName().startsWith("__")) { Field field = fieldManager.getField(instance.getClass(), property.getName()); if (field == null) { LOG.debug("Unable to find a bean property for the reported property: " + property.getName() + ". Ignoring property."); continue; } returnType = field.getType(); } else { if (metadata == null) { LOG.debug("Unable to find a metadata property for the reported property: " + property.getName() + ". Ignoring property."); continue; } returnType = getMapFieldType(instance, fieldManager, property); if (returnType == null) { returnType = getBasicBroadleafType(metadata.getFieldType()); } } if (returnType == null) { throw new IllegalAccessException("Unable to determine the value type for the property ("+property.getName()+")"); } String value = property.getValue(); if (metadata != null) { Boolean mutable = metadata.getMutable(); Boolean readOnly = metadata.getReadOnly(); if (metadata.getFieldType().equals(SupportedFieldType.BOOLEAN)) { if (value == null) { value = "false"; } } if ((mutable == null || mutable) && (readOnly == null || !readOnly)) { if (value != null) { handled = false; PopulateValueRequest request = new PopulateValueRequest(setId, fieldManager, property, metadata, returnType, value, persistenceManager, this); boolean attemptToPopulate = true; for (PopulateValueRequestValidator validator : populateValidators) { PropertyValidationResult validationResult = validator.validate(request, instance); if (!validationResult.isValid()) { entity.addValidationError(property.getName(), validationResult.getErrorMessage()); attemptToPopulate = false; } } if (attemptToPopulate) { for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.populateValue(request, instance); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.populateValue(new PopulateValueRequest(setId, fieldManager, property, metadata, returnType, value, persistenceManager, this), instance); } } } else { try { if (fieldManager.getFieldValue(instance, property.getName()) != null && (metadata.getFieldType() != SupportedFieldType.ID || setId) && metadata.getFieldType() != SupportedFieldType.PASSWORD) { if (fieldManager.getFieldValue(instance, property.getName()) != null) { property.setIsDirty(true); } fieldManager.setFieldValue(instance, property.getName(), null); } } catch (FieldNotAvailableException e) { throw new IllegalArgumentException(e); } } } } } validate(entity, instance, mergedProperties, validateUnsubmittedProperties); //if validation failed, refresh the current instance so that none of the changes will be persisted if (entity.isValidationFailure()) { //only refresh the instance if it was managed to begin with if (persistenceManager.getDynamicEntityDao().getStandardEntityManager().contains(instance)) { persistenceManager.getDynamicEntityDao().refresh(instance); } //re-initialize the valid properties for the entity in order to deal with the potential of not //completely sending over all checkbox/radio fields List<Serializable> entityList = new ArrayList<Serializable>(1); entityList.add(instance); Entity invalid = getRecords(mergedProperties, entityList, null, null)[0]; invalid.setPropertyValidationErrors(entity.getPropertyValidationErrors()); invalid.overridePropertyValues(entity); StringBuilder sb = new StringBuilder(); for (Map.Entry<String, List<String>> entry : invalid.getPropertyValidationErrors().entrySet()) { Iterator<String> itr = entry.getValue().iterator(); while(itr.hasNext()) { sb.append(entry.getKey()); sb.append(" : "); sb.append(itr.next()); if (itr.hasNext()) { sb.append(" / "); } } } throw new ValidationException(invalid, "The entity has failed validation - " + sb.toString()); } else { fieldManager.persistMiddleEntities(); } } catch (IllegalAccessException e) { throw new PersistenceException(e); } catch (InstantiationException e) { throw new PersistenceException(e); } finally { session.setFlushMode(originalFlushMode); } return instance; } protected Class<?> getMapFieldType(Serializable instance, FieldManager fieldManager, Property property) { Class<?> returnType = null; Field field = fieldManager.getField(instance.getClass(), property.getName().substring(0, property.getName().indexOf(FieldManager.MAPFIELDSEPARATOR))); java.lang.reflect.Type type = field.getGenericType(); if (type instanceof ParameterizedType) { ParameterizedType pType = (ParameterizedType) type; Class<?> clazz = (Class<?>) pType.getActualTypeArguments()[1]; Class<?>[] entities = persistenceManager.getDynamicEntityDao().getAllPolymorphicEntitiesFromCeiling(clazz); if (!ArrayUtils.isEmpty(entities)) { returnType = entities[entities.length-1]; } } return returnType; } @Override public Entity getRecord(Map<String, FieldMetadata> primaryMergedProperties, Serializable record, Map<String, FieldMetadata> alternateMergedProperties, String pathToTargetObject) { List<Serializable> records = new ArrayList<Serializable>(1); records.add(record); Entity[] productEntities = getRecords(primaryMergedProperties, records, alternateMergedProperties, pathToTargetObject); return productEntities[0]; } @Override public Entity getRecord(Class<?> ceilingEntityClass, PersistencePerspective persistencePerspective, Serializable record) { Map<String, FieldMetadata> mergedProperties = getSimpleMergedProperties(ceilingEntityClass.getName(), persistencePerspective); return getRecord(mergedProperties, record, null, null); } @Override public Entity[] getRecords(Class<?> ceilingEntityClass, PersistencePerspective persistencePerspective, List<? extends Serializable> records) { Map<String, FieldMetadata> mergedProperties = getSimpleMergedProperties(ceilingEntityClass.getName(), persistencePerspective); return getRecords(mergedProperties, records, null, null); } @Override public Map<String, FieldMetadata> getSimpleMergedProperties(String entityName, PersistencePerspective persistencePerspective) { return persistenceManager.getDynamicEntityDao().getSimpleMergedProperties(entityName, persistencePerspective); } @Override public Entity[] getRecords(Map<String, FieldMetadata> primaryMergedProperties, List<? extends Serializable> records) { return getRecords(primaryMergedProperties, records, null, null); } @Override public Entity[] getRecords(Map<String, FieldMetadata> primaryUnfilteredMergedProperties, List<? extends Serializable> records, Map<String, FieldMetadata> alternateUnfilteredMergedProperties, String pathToTargetObject) { Map<String, FieldMetadata> primaryMergedProperties = filterOutCollectionMetadata(primaryUnfilteredMergedProperties); Map<String, FieldMetadata> alternateMergedProperties = filterOutCollectionMetadata(alternateUnfilteredMergedProperties); Entity[] entities = new Entity[records.size()]; int j = 0; for (Serializable recordEntity : records) { Serializable entity; if (pathToTargetObject != null) { try { entity = (Serializable) getFieldManager().getFieldValue(recordEntity, pathToTargetObject); } catch (Exception e) { throw new PersistenceException(e); } } else { entity = recordEntity; } Entity entityItem = new Entity(); entityItem.setType(new String[]{entity.getClass().getName()}); entities[j] = entityItem; List<Property> props = new ArrayList<Property>(primaryMergedProperties.size()); extractPropertiesFromPersistentEntity(primaryMergedProperties, entity, props); if (alternateMergedProperties != null) { extractPropertiesFromPersistentEntity(alternateMergedProperties, recordEntity, props); } // Try to add the "main name" property. Log a debug message if we can't try { Property p = new Property(); p.setName(MAIN_ENTITY_NAME_PROPERTY); String mainEntityName = (String) MethodUtils.invokeMethod(entity, "getMainEntityName"); p.setValue(mainEntityName); props.add(p); } catch (Exception e) { LOG.debug(String.format("Could not execute the getMainEntityName() method for [%s]", entity.getClass().getName()), e); } // Try to add the alternate id property if available if (alternateMergedProperties != null) { for (Entry<String, FieldMetadata> entry : alternateMergedProperties.entrySet()) { if (entry.getValue() instanceof BasicFieldMetadata) { if (((BasicFieldMetadata) entry.getValue()).getFieldType() == SupportedFieldType.ID) { Map<String, FieldMetadata> alternateOnEntity = new HashMap<String, FieldMetadata>(); alternateOnEntity.put(entry.getKey(), entry.getValue()); List<Property> props2 = new ArrayList<Property>(); extractPropertiesFromPersistentEntity(alternateOnEntity, recordEntity, props2); if (props2.size() == 1) { Property alternateIdProp = props2.get(0); alternateIdProp.setName(ALTERNATE_ID_PROPERTY); props.add(alternateIdProp); } } } } } Property[] properties = new Property[props.size()]; properties = props.toArray(properties); entityItem.setProperties(properties); j++; } return entities; } protected void extractPropertiesFromPersistentEntity(Map<String, FieldMetadata> mergedProperties, Serializable entity, List<Property> props) { FieldManager fieldManager = getFieldManager(); try { if (entity instanceof AdminMainEntity) { //Create an invisible property for the admin main entity name, if applicable. //This is useful for ToOneLookups if that ToOneLookup uses AdminMainEntity to drive //its display name. try { Property propertyItem = new Property(); propertyItem.setName(AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY); propertyItem.setValue(((AdminMainEntity) entity).getMainEntityName()); props.add(propertyItem); } catch (Exception e) { //do nothing here except for not add the property. Exceptions could occur when there is a validation //issue and some properties/relationships that are used for gleaning the main entity name end up //not being set } } for (Entry<String, FieldMetadata> entry : mergedProperties.entrySet()) { String property = entry.getKey(); BasicFieldMetadata metadata = (BasicFieldMetadata) entry.getValue(); if (Class.forName(metadata.getInheritedFromType()).isAssignableFrom(entity.getClass()) || entity.getClass().isAssignableFrom(Class.forName(metadata.getInheritedFromType()))) { boolean proceed = true; if (property.contains(".")) { StringTokenizer tokens = new StringTokenizer(property, "."); Object testObject = entity; while (tokens.hasMoreTokens()) { String token = tokens.nextToken(); if (tokens.hasMoreTokens()) { try { testObject = fieldManager.getFieldValue(testObject, token); } catch (FieldNotAvailableException e) { proceed = false; break; } if (testObject == null) { Property propertyItem = new Property(); propertyItem.setName(property); if (props.contains(propertyItem)) { proceed = false; break; } propertyItem.setValue(null); props.add(propertyItem); proceed = false; break; } } } } if (!proceed) { continue; } boolean isFieldAccessible = true; Object value = null; try { value = fieldManager.getFieldValue(entity, property); } catch (FieldNotAvailableException e) { isFieldAccessible = false; } checkField: { if (isFieldAccessible) { Property propertyItem = new Property(); propertyItem.setName(property); if (props.contains(propertyItem)) { continue; } props.add(propertyItem); String displayVal = propertyItem.getDisplayValue(); boolean handled = false; for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.extractValue( new ExtractValueRequest(props, fieldManager, metadata, value, displayVal, persistenceManager, this, entity), propertyItem); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.extractValue( new ExtractValueRequest(props, fieldManager, metadata, value, displayVal, persistenceManager, this, entity), propertyItem); } break checkField; } //try a direct property acquisition via reflection try { String strVal = null; Method method; try { //try a 'get' prefixed mutator first String temp = "get" + property.substring(0, 1).toUpperCase() + property.substring(1, property.length()); method = entity.getClass().getMethod(temp, new Class[]{}); } catch (NoSuchMethodException e) { method = entity.getClass().getMethod(property, new Class[]{}); } value = method.invoke(entity, new String[]{}); Property propertyItem = new Property(); propertyItem.setName(property); if (props.contains(propertyItem)) { continue; } props.add(propertyItem); if (value == null) { strVal = null; } else { if (Date.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format((Date) value); } else if (Timestamp.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(new Date(((Timestamp) value).getTime())); } else if (Calendar.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(((Calendar) value).getTime()); } else if (Double.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else if (BigDecimal.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else { strVal = value.toString(); } } propertyItem.setValue(strVal); } catch (NoSuchMethodException e) { LOG.debug("Unable to find a specified property in the entity: " + property); //do nothing - this property is simply not in the bean } } } } } catch (ClassNotFoundException e) { throw new PersistenceException(e); } catch (IllegalAccessException e) { throw new PersistenceException(e); } catch (InvocationTargetException e) { throw new PersistenceException(e); } } @Override public String getStringValueFromGetter(Serializable instance, String propertyName) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { Object value = PropertyUtils.getProperty(instance, propertyName); return formatValue(value); } @Override public String formatValue(Object value) { String strVal; if (value == null) { strVal = null; } else { if (Date.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format((Date) value); } else if (Timestamp.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(new Date(((Timestamp) value).getTime())); } else if (Calendar.class.isAssignableFrom(value.getClass())) { strVal = getSimpleDateFormatter().format(((Calendar) value).getTime()); } else if (Double.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else if (BigDecimal.class.isAssignableFrom(value.getClass())) { strVal = getDecimalFormatter().format(value); } else { strVal = value.toString(); } } return strVal; } protected EntityResult update(PersistencePackage persistencePackage, Object primaryKey, boolean includeRealEntity) throws ServiceException { EntityResult entityResult = new EntityResult(); Entity entity = persistencePackage.getEntity(); PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); ForeignKey foreignKey = (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); if (foreignKey != null && !foreignKey.getMutable()) { throw new SecurityServiceException("Entity not mutable"); } try { Class<?>[] entities = persistenceManager.getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname()); Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( persistencePackage.getCeilingEntityFullyQualifiedClassname(), entities, foreignKey, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); if (primaryKey == null) { primaryKey = getPrimaryKey(entity, mergedProperties); } Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()[0]), primaryKey); Assert.isTrue(instance != null, "Entity not found"); instance = createPopulatedInstance(instance, entity, mergedProperties, false, persistencePackage.isValidateUnsubmittedProperties()); if (!entity.isValidationFailure()) { instance = persistenceManager.getDynamicEntityDao().merge(instance); if (includeRealEntity) { entityResult.setEntityBackingObject(instance); } List<Serializable> entityList = new ArrayList<Serializable>(1); entityList.add(instance); entity = getRecords(mergedProperties, entityList, null, null)[0]; entityResult.setEntity(entity); return entityResult; } else { entityResult.setEntity(entity); return entityResult; } } catch (Exception e) { throw new ServiceException("Problem updating entity : " + e.getMessage(), e); } } @Override public Object getPrimaryKey(Entity entity, Map<String, FieldMetadata> mergedUnfilteredProperties) { Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); Object primaryKey = null; String idPropertyName = null; BasicFieldMetadata metaData = null; for (String property : mergedProperties.keySet()) { BasicFieldMetadata temp = (BasicFieldMetadata) mergedProperties.get(property); if (temp.getFieldType() == SupportedFieldType.ID && !property.contains(".")) { idPropertyName = property; metaData = temp; break; } } if (idPropertyName == null) { throw new RuntimeException("Could not find a primary key property in the passed entity with type: " + entity.getType()[0]); } for (Property property : entity.getProperties()) { if (property.getName().equals(idPropertyName)) { switch(metaData.getSecondaryType()) { case INTEGER: primaryKey = (property.getValue() == null) ? null : Long.valueOf(property.getValue()); break; case STRING: primaryKey = property.getValue(); break; } break; } } if (primaryKey == null) { throw new RuntimeException("Could not find the primary key property (" + idPropertyName + ") in the passed entity with type: " + entity.getType()[0]); } return primaryKey; } @Override public List<FilterMapping> getFilterMappings(PersistencePerspective persistencePerspective, CriteriaTransferObject cto, String ceilingEntityFullyQualifiedClassname, Map<String, FieldMetadata> mergedUnfilteredProperties, RestrictionFactory customRestrictionFactory) { Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); List<FilterMapping> filterMappings = new ArrayList<FilterMapping>(); for (String propertyId : cto.getCriteriaMap().keySet()) { if (mergedProperties.containsKey(propertyId)) { boolean handled = false; for (FieldPersistenceProvider fieldPersistenceProvider : fieldPersistenceProviders) { FieldProviderResponse response = fieldPersistenceProvider.addSearchMapping( new AddSearchMappingRequest(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, mergedProperties, propertyId, getFieldManager(), this, this, customRestrictionFactory==null?restrictionFactory :customRestrictionFactory), filterMappings); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { defaultFieldPersistenceProvider.addSearchMapping( new AddSearchMappingRequest(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, mergedProperties, propertyId, getFieldManager(), this, this, customRestrictionFactory==null?restrictionFactory :customRestrictionFactory), filterMappings); } } } return filterMappings; } @Override public List<FilterMapping> getFilterMappings(PersistencePerspective persistencePerspective, CriteriaTransferObject cto, String ceilingEntityFullyQualifiedClassname, Map<String, FieldMetadata> mergedUnfilteredProperties) { return getFilterMappings(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, mergedUnfilteredProperties, null); } @Override public void extractProperties(Class<?>[] inheritanceLine, Map<MergedPropertyType, Map<String, FieldMetadata>> mergedProperties, List<Property> properties) { extractPropertiesFromMetadata(inheritanceLine, mergedProperties.get(MergedPropertyType.PRIMARY), properties, false, MergedPropertyType.PRIMARY); } protected void extractPropertiesFromMetadata(Class<?>[] inheritanceLine, Map<String, FieldMetadata> mergedProperties, List<Property> properties, Boolean isHiddenOverride, MergedPropertyType type) { for (Map.Entry<String, FieldMetadata> entry : mergedProperties.entrySet()) { String property = entry.getKey(); Property prop = new Property(); FieldMetadata metadata = mergedProperties.get(property); prop.setName(property); Comparator<Property> comparator = new Comparator<Property>() { @Override public int compare(Property o1, Property o2) { return o1.getName().compareTo(o2.getName()); } }; Collections.sort(properties, comparator); int pos = Collections.binarySearch(properties, prop, comparator); if (pos >= 0 && MergedPropertyType.MAPSTRUCTUREKEY != type && MergedPropertyType.MAPSTRUCTUREVALUE != type) { logWarn: { if ((metadata instanceof BasicFieldMetadata) && SupportedFieldType.ID.equals(((BasicFieldMetadata) metadata).getFieldType())) { //don't warn for id field collisions, but still ignore the colliding fields break logWarn; } LOG.warn("Detected a field name collision (" + metadata.getTargetClass() + "." + property + ") during inspection for the inheritance line starting with (" + inheritanceLine[0].getName() + "). Ignoring the additional field. This can occur most commonly when using the @AdminPresentationAdornedTargetCollection and the collection type and target class have field names in common. This situation should be avoided, as the system will strip the repeated fields, which can cause unpredictable behavior."); } continue; } properties.add(prop); prop.setMetadata(metadata); if (isHiddenOverride && prop.getMetadata() instanceof BasicFieldMetadata) { //this only makes sense for non collection types ((BasicFieldMetadata) prop.getMetadata()).setVisibility(VisibilityEnum.HIDDEN_ALL); } } } @Override public void updateMergedProperties(PersistencePackage persistencePackage, Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties) throws ServiceException { String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); try { PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); Class<?>[] entities = persistenceManager.getPolymorphicEntities(ceilingEntityFullyQualifiedClassname); Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( ceilingEntityFullyQualifiedClassname, entities, (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY), persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); allMergedProperties.put(MergedPropertyType.PRIMARY, mergedProperties); } catch (Exception e) { throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e); } } @Override public EntityResult update(PersistencePackage persistencePackage, boolean includeRealEntityObject) throws ServiceException { return update(persistencePackage, null, true); } @Override public Entity update(PersistencePackage persistencePackage) throws ServiceException { EntityResult er = update(persistencePackage, null, false); return er.getEntity(); } @Override public Entity add(PersistencePackage persistencePackage) throws ServiceException { EntityResult entityResult = add(persistencePackage, false); return entityResult.getEntity(); } @Override public EntityResult add(PersistencePackage persistencePackage, boolean includeRealEntityObject) throws ServiceException { EntityResult entityResult = new EntityResult(); Entity entity = persistencePackage.getEntity(); PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); ForeignKey foreignKey = (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); if (foreignKey != null && !foreignKey.getMutable()) { throw new SecurityServiceException("Entity not mutable"); } try { Class<?>[] entities = persistenceManager.getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname()); Map<String, FieldMetadata> mergedUnfilteredProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( persistencePackage.getCeilingEntityFullyQualifiedClassname(), entities, foreignKey, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); String idProperty = null; for (String property : mergedProperties.keySet()) { if (((BasicFieldMetadata) mergedProperties.get(property)).getFieldType() == SupportedFieldType.ID) { idProperty = property; break; } } if (idProperty == null) { throw new RuntimeException("Could not find a primary key property in the passed entity with type: " + entity.getType()[0]); } Object primaryKey = null; try { primaryKey = getPrimaryKey(entity, mergedProperties); } catch (Exception e) { //don't do anything - this is a valid case } if (primaryKey == null) { Serializable instance = (Serializable) Class.forName(entity.getType()[0]).newInstance(); instance = createPopulatedInstance(instance, entity, mergedProperties, false); instance = persistenceManager.getDynamicEntityDao().merge(instance); if (includeRealEntityObject) { entityResult.setEntityBackingObject(instance); } List<Serializable> entityList = new ArrayList<Serializable>(1); entityList.add(instance); entity = getRecords(mergedProperties, entityList, null, null)[0]; entityResult.setEntity(entity); return entityResult; } else { return update(persistencePackage, primaryKey, includeRealEntityObject); } } catch (Exception e) { throw new ServiceException("Problem adding new entity : " + e.getMessage(), e); } } @Override public void remove(PersistencePackage persistencePackage) throws ServiceException { Entity entity = persistencePackage.getEntity(); PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); ForeignKey foreignKey = (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY); if (foreignKey != null && !foreignKey.getMutable()) { throw new SecurityServiceException("Entity not mutable"); } try { Class<?>[] entities = persistenceManager.getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname()); Map<String, FieldMetadata> mergedUnfilteredProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( persistencePackage.getCeilingEntityFullyQualifiedClassname(), entities, foreignKey, persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); Map<String, FieldMetadata> mergedProperties = filterOutCollectionMetadata(mergedUnfilteredProperties); Object primaryKey = getPrimaryKey(entity, mergedProperties); Serializable instance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(entity.getType()[0]), primaryKey); Assert.isTrue(instance != null, "Entity not found"); switch (persistencePerspective.getOperationTypes().getRemoveType()) { case NONDESTRUCTIVEREMOVE: FieldManager fieldManager = getFieldManager(); FieldMetadata manyToFieldMetadata = mergedUnfilteredProperties.get(foreignKey.getManyToField()); Object foreignKeyValue = entity.getPMap().get(foreignKey.getManyToField()).getValue(); try { foreignKeyValue = Long.valueOf((String) foreignKeyValue); } catch (NumberFormatException e) { LOG.warn("Foreign primary key is not of type Long, assuming String for remove lookup"); } Serializable foreignInstance = persistenceManager.getDynamicEntityDao().retrieve(Class.forName(foreignKey.getForeignKeyClass()), foreignKeyValue); Collection collection = (Collection) fieldManager.getFieldValue(foreignInstance, foreignKey.getOriginatingField()); collection.remove(instance); // if this is a bi-directional @OneToMany/@ManyToOne and there is no @JoinTable (just a foreign key on // the @ManyToOne side) then it will not be updated. In that instance, we have to explicitly // set the manyTo field to null so that subsequent lookups will not find it if (manyToFieldMetadata instanceof BasicFieldMetadata) { if (BooleanUtils.isTrue(((BasicFieldMetadata) manyToFieldMetadata).getRequired())) { throw new ServiceException("Could not remove from the collection as the ManyToOne side is a" + " non-optional relationship. Consider changing 'optional=true' in the @ManyToOne annotation" + " or nullable=true within the @JoinColumn annotation"); } Field manyToField = fieldManager.getField(instance.getClass(), foreignKey.getManyToField()); Object manyToObject = manyToField.get(instance); if (manyToObject != null && !(manyToObject instanceof Collection) && !(manyToObject instanceof Map)) { manyToField.set(instance, null); instance = persistenceManager.getDynamicEntityDao().merge(instance); } } break; case BASIC: persistenceManager.getDynamicEntityDao().remove(instance); break; } } catch (Exception e) { throw new ServiceException("Problem removing entity : " + e.getMessage(), e); } } public Map<String, FieldMetadata> getMergedProperties(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException { PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); if (StringUtils.isEmpty(persistencePackage.getFetchTypeFullyQualifiedClassname())) { persistencePackage.setFetchTypeFullyQualifiedClassname(ceilingEntityFullyQualifiedClassname); } try { Class<?>[] entities = persistenceManager.getDynamicEntityDao().getAllPolymorphicEntitiesFromCeiling(Class.forName(ceilingEntityFullyQualifiedClassname)); Map<String, FieldMetadata> mergedProperties = persistenceManager.getDynamicEntityDao().getMergedProperties( ceilingEntityFullyQualifiedClassname, entities, (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY), persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); return mergedProperties; } catch (Exception e) { throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e); } } @Override public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException { Entity[] payload; int totalRecords; PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective(); String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname(); try { Map<String, FieldMetadata> mergedProperties = getMergedProperties(persistencePackage, cto); List<FilterMapping> filterMappings = getFilterMappings(persistencePerspective, cto, persistencePackage .getFetchTypeFullyQualifiedClassname(), mergedProperties); if (CollectionUtils.isNotEmpty(cto.getAdditionalFilterMappings())) { filterMappings.addAll(cto.getAdditionalFilterMappings()); } List<Serializable> records = getPersistentRecords(persistencePackage.getFetchTypeFullyQualifiedClassname(), filterMappings, cto.getFirstResult(), cto.getMaxResults()); totalRecords = getTotalRecords(persistencePackage.getFetchTypeFullyQualifiedClassname(), filterMappings); payload = getRecords(mergedProperties, records, null, null); } catch (Exception e) { throw new ServiceException("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname, e); } return new DynamicResultSet(null, payload, totalRecords); } @Override public Integer getTotalRecords(String ceilingEntity, List<FilterMapping> filterMappings) { try { return ((Long) criteriaTranslator.translateCountQuery(persistenceManager.getDynamicEntityDao(), ceilingEntity, filterMappings).getSingleResult()).intValue(); } catch (CriteriaConversionException e) { TypedQueryBuilder builder = getSpecialCaseQueryBuilder(e.getFieldPath(), filterMappings, ceilingEntity); return ((Long) builder.toCountQuery(getPersistenceManager().getDynamicEntityDao().getStandardEntityManager()).getSingleResult()).intValue(); } } @Override public Serializable getMaxValue(String ceilingEntity, List<FilterMapping> filterMappings, String maxField) { return criteriaTranslator.translateMaxQuery(persistenceManager.getDynamicEntityDao(), ceilingEntity, filterMappings, maxField).getSingleResult(); } @Override public List<Serializable> getPersistentRecords(String ceilingEntity, List<FilterMapping> filterMappings, Integer firstResult, Integer maxResults) { try { return criteriaTranslator.translateQuery(persistenceManager.getDynamicEntityDao(), ceilingEntity, filterMappings, firstResult, maxResults).getResultList(); } catch (CriteriaConversionException e) { TypedQueryBuilder builder = getSpecialCaseQueryBuilder(e.getFieldPath(), filterMappings, ceilingEntity); return builder.toQuery(getPersistenceManager().getDynamicEntityDao().getStandardEntityManager()).getResultList(); } } @Override public boolean validate(Entity entity, Serializable populatedInstance, Map<String, FieldMetadata> mergedProperties) { return validate(entity, populatedInstance, mergedProperties, true); } @Override public boolean validate(Entity entity, Serializable populatedInstance, Map<String, FieldMetadata> mergedProperties, boolean validateUnsubmittedProperties) { entityValidatorService.validate(entity, populatedInstance, mergedProperties, this, validateUnsubmittedProperties); return !entity.isValidationFailure(); } @Override public void setPersistenceManager(PersistenceManager persistenceManager) { this.persistenceManager = persistenceManager; } @Override public PersistenceModule getCompatibleModule(OperationType operationType) { return ((InspectHelper) persistenceManager).getCompatibleModule(operationType); } public FieldPersistenceProvider getDefaultFieldPersistenceProvider() { return defaultFieldPersistenceProvider; } public void setDefaultFieldPersistenceProvider(FieldPersistenceProvider defaultFieldPersistenceProvider) { this.defaultFieldPersistenceProvider = defaultFieldPersistenceProvider; } public List<FieldPersistenceProvider> getFieldPersistenceProviders() { return fieldPersistenceProviders; } public void setFieldPersistenceProviders(List<FieldPersistenceProvider> fieldPersistenceProviders) { this.fieldPersistenceProviders = fieldPersistenceProviders; } public CriteriaTranslator getCriteriaTranslator() { return criteriaTranslator; } public void setCriteriaTranslator(CriteriaTranslator criteriaTranslator) { this.criteriaTranslator = criteriaTranslator; } public EntityValidatorService getEntityValidatorService() { return entityValidatorService; } public void setEntityValidatorService(EntityValidatorService entityValidatorService) { this.entityValidatorService = entityValidatorService; } public RestrictionFactory getRestrictionFactory() { return restrictionFactory; } public void setRestrictionFactory(RestrictionFactory restrictionFactory) { this.restrictionFactory = restrictionFactory; } public PersistenceManager getPersistenceManager() { return persistenceManager; } /** * Use an alternate approach to generating a fetch query for a collection located inside of an @Embeddable object. Related * to https://hibernate.atlassian.net/browse/HHH-8802. The alternate approach leverages HQL rather than JPA criteria, * which seems to alleviate the problem. * * @param embeddedCollectionPath the path to the collection field itself * @param filterMappings all the fetch restrictions for this request * @param collectionClass the type of the collection members * @return the builder capable of generating an appropriate HQL query */ protected TypedQueryBuilder getSpecialCaseQueryBuilder(FieldPath embeddedCollectionPath, List<FilterMapping> filterMappings, String collectionClass) { String specialPath = embeddedCollectionPath.getTargetProperty(); String[] pieces = specialPath.split("\\."); if (pieces.length != 3) { throw new CriteriaConversionException(String.format("Expected to find a target property of format [embedded field].[collection field].[property] for the embedded collection path (%s)", specialPath), embeddedCollectionPath); } String expression = specialPath.substring(0, specialPath.lastIndexOf(".")); TypedQueryBuilder builder; try { builder = new TypedQueryBuilder(Class.forName(collectionClass), "specialEntity") .addJoin(new TQJoin("specialEntity." + expression, "embeddedCollection")); } catch (Exception e) { throw ExceptionHelper.refineException(e); } for (TQRestriction restriction : buildSpecialRestrictions(expression, filterMappings)) { builder = builder.addRestriction(restriction); } for (TQRestriction restriction : buildStandardRestrictions(embeddedCollectionPath, filterMappings)) { builder = builder.addRestriction(restriction); } for (FilterMapping mapping : filterMappings) { if (mapping.getSortDirection() != null) { String mappingProperty = mapping.getFieldPath()==null?null:mapping.getFieldPath().getTargetProperty(); if (StringUtils.isEmpty(mappingProperty)) { mappingProperty = mapping.getFullPropertyName(); } builder = builder.addOrder(new TQOrder("specialEntity." + mappingProperty, SortDirection.ASCENDING == mapping.getSortDirection())); } } return builder; } /** * Generate LIKE or EQUALS restrictions for any filter property specified on the root entity (not the collection field in the @Embeddable object) * * @see #getSpecialCaseQueryBuilder(org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.FieldPath, java.util.List, String) * @param embeddedCollectionPath the path for the collection field in the @Embeddable object - this is what caused the whole thing * @param filterMappings all the fetch restrictions for this request * @return the list of restrictions on the root entity */ protected List<TQRestriction> buildStandardRestrictions(FieldPath embeddedCollectionPath, List<FilterMapping> filterMappings) { String expression = embeddedCollectionPath.getTargetProperty().substring(0, embeddedCollectionPath.getTargetProperty().lastIndexOf(".")); List<TQRestriction> restrictions = new ArrayList<TQRestriction>(); for (FilterMapping mapping : filterMappings) { checkProperty: { String mappingProperty = mapping.getFieldPath()==null?null:mapping.getFieldPath().getTargetProperty(); if (StringUtils.isEmpty(mappingProperty)) { mappingProperty = mapping.getFullPropertyName(); } if (!embeddedCollectionPath.getTargetProperty().equals(mappingProperty) && !StringUtils.isEmpty(mappingProperty)) { PredicateProvider predicateProvider = mapping.getRestriction().getPredicateProvider(); if (predicateProvider != null) { FilterValueConverter converter = mapping.getRestriction().getFilterValueConverter(); if (converter != null && CollectionUtils.isNotEmpty(mapping.getFilterValues())) { Object val = converter.convert(mapping.getFilterValues().get(0)); if (predicateProvider instanceof LikePredicateProvider) { restrictions.add(new TQRestriction("specialEntity." + mappingProperty, "LIKE", val + "%")); break checkProperty; } else if (predicateProvider instanceof EqPredicateProvider) { restrictions.add(new TQRestriction("specialEntity." + mappingProperty, "=", val)); break checkProperty; } } } LOG.warn(String.format("Unable to filter the embedded collection (%s) on an additional property (%s)", expression, mappingProperty)); } } } return restrictions; } /** * Generate EQUALS restrictions for any filter property specified on the entity member of the collection field in the @Embeddable object * * @see #getSpecialCaseQueryBuilder(org.broadleafcommerce.openadmin.server.service.persistence.module.criteria.FieldPath, java.util.List, String) * @param specialExpression the String representation of the path for the collection field in the @Embeddable object * @param filterMappings all the fetch restrictions for this request * @return the list of restrictions on the collection in the @Embeddable object */ protected List<TQRestriction> buildSpecialRestrictions(String specialExpression, List<FilterMapping> filterMappings) { List<TQRestriction> restrictions = new ArrayList<TQRestriction>(); for (FilterMapping mapping : filterMappings) { if (mapping.getFieldPath() != null && mapping.getFieldPath().getTargetProperty() != null && mapping.getFieldPath().getTargetProperty().startsWith(specialExpression)) { FilterValueConverter converter = mapping.getRestriction().getFilterValueConverter(); if (converter != null && CollectionUtils.isNotEmpty(mapping.getFilterValues())) { Object val = converter.convert(mapping.getFilterValues().get(0)); String property = mapping.getFieldPath().getTargetProperty().substring(mapping.getFieldPath().getTargetProperty().lastIndexOf(".") + 1, mapping.getFieldPath().getTargetProperty().length()); restrictions.add(new TQRestriction("embeddedCollection." + property, "=", val)); } } } return restrictions; } }
/* * Copyright (c) 1995, 2008, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle or the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package components; import javax.swing.*; import javax.swing.SpringLayout; import java.awt.*; /** * A 1.4 file that provides utility methods for creating form- or grid-style * layouts with SpringLayout. These utilities are used by several programs, such * as SpringBox and SpringCompactGrid. */ public class SpringUtilities { /** * A debugging utility that prints to stdout the component's minimum, * preferred, and maximum sizes. */ public static void printSizes(Component c) { System.out.println("minimumSize = " + c.getMinimumSize()); System.out.println("preferredSize = " + c.getPreferredSize()); System.out.println("maximumSize = " + c.getMaximumSize()); } /** * Aligns the first <code>rows</code> * <code>cols</code> components of * <code>parent</code> in a grid. Each component is as big as the maximum * preferred width and height of the components. The parent is made just big * enough to fit them all. * * @param rows * number of rows * @param cols * number of columns * @param initialX * x location to start the grid at * @param initialY * y location to start the grid at * @param xPad * x padding between cells * @param yPad * y padding between cells */ public static void makeGrid(Container parent, int rows, int cols, int initialX, int initialY, int xPad, int yPad) { SpringLayout layout; try { layout = (SpringLayout) parent.getLayout(); } catch (ClassCastException exc) { System.err.println("The first argument to makeGrid must use SpringLayout."); return; } Spring xPadSpring = Spring.constant(xPad); Spring yPadSpring = Spring.constant(yPad); Spring initialXSpring = Spring.constant(initialX); Spring initialYSpring = Spring.constant(initialY); int max = rows * cols; // Calculate Springs that are the max of the width/height so that all // cells have the same size. Spring maxWidthSpring = layout.getConstraints(parent.getComponent(0)).getWidth(); Spring maxHeightSpring = layout.getConstraints(parent.getComponent(0)).getWidth(); for (int i = 1; i < max; i++) { SpringLayout.Constraints cons = layout.getConstraints(parent.getComponent(i)); maxWidthSpring = Spring.max(maxWidthSpring, cons.getWidth()); maxHeightSpring = Spring.max(maxHeightSpring, cons.getHeight()); } // Apply the new width/height Spring. This forces all the // components to have the same size. for (int i = 0; i < max; i++) { SpringLayout.Constraints cons = layout.getConstraints(parent.getComponent(i)); cons.setWidth(maxWidthSpring); cons.setHeight(maxHeightSpring); } // Then adjust the x/y constraints of all the cells so that they // are aligned in a grid. SpringLayout.Constraints lastCons = null; SpringLayout.Constraints lastRowCons = null; for (int i = 0; i < max; i++) { SpringLayout.Constraints cons = layout.getConstraints(parent.getComponent(i)); if (i % cols == 0) { // start of new row lastRowCons = lastCons; cons.setX(initialXSpring); } else { // x position depends on previous component cons.setX(Spring.sum(lastCons.getConstraint(SpringLayout.EAST), xPadSpring)); } if (i / cols == 0) { // first row cons.setY(initialYSpring); } else { // y position depends on previous row cons.setY(Spring.sum(lastRowCons.getConstraint(SpringLayout.SOUTH), yPadSpring)); } lastCons = cons; } // Set the parent's size. SpringLayout.Constraints pCons = layout.getConstraints(parent); pCons.setConstraint(SpringLayout.SOUTH, Spring.sum(Spring.constant(yPad), lastCons.getConstraint(SpringLayout.SOUTH))); pCons.setConstraint(SpringLayout.EAST, Spring.sum(Spring.constant(xPad), lastCons.getConstraint(SpringLayout.EAST))); } /* Used by makeCompactGrid. */ private static SpringLayout.Constraints getConstraintsForCell(int row, int col, Container parent, int cols) { SpringLayout layout = (SpringLayout) parent.getLayout(); Component c = parent.getComponent(row * cols + col); return layout.getConstraints(c); } /** * Aligns the first <code>rows</code> * <code>cols</code> components of * <code>parent</code> in a grid. Each component in a column is as wide as * the maximum preferred width of the components in that column; height is * similarly determined for each row. The parent is made just big enough to * fit them all. * * @param rows * number of rows * @param cols * number of columns * @param initialX * x location to start the grid at * @param initialY * y location to start the grid at * @param xPad * x padding between cells * @param yPad * y padding between cells */ public static void makeCompactGrid(Container parent, int rows, int cols, int initialX, int initialY, int xPad, int yPad) { SpringLayout layout; try { layout = (SpringLayout) parent.getLayout(); } catch (ClassCastException exc) { System.err.println("The first argument to makeCompactGrid must use SpringLayout."); return; } // Align all cells in each column and make them the same width. Spring x = Spring.constant(initialX); for (int c = 0; c < cols; c++) { Spring width = Spring.constant(0); for (int r = 0; r < rows; r++) { width = Spring.max(width, getConstraintsForCell(r, c, parent, cols).getWidth()); } for (int r = 0; r < rows; r++) { SpringLayout.Constraints constraints = getConstraintsForCell(r, c, parent, cols); constraints.setX(x); constraints.setWidth(width); } x = Spring.sum(x, Spring.sum(width, Spring.constant(xPad))); } // Align all cells in each row and make them the same height. Spring y = Spring.constant(initialY); for (int r = 0; r < rows; r++) { Spring height = Spring.constant(0); for (int c = 0; c < cols; c++) { height = Spring.max(height, getConstraintsForCell(r, c, parent, cols).getHeight()); } for (int c = 0; c < cols; c++) { SpringLayout.Constraints constraints = getConstraintsForCell(r, c, parent, cols); constraints.setY(y); constraints.setHeight(height); } y = Spring.sum(y, Spring.sum(height, Spring.constant(yPad))); } // Set the parent's size. SpringLayout.Constraints pCons = layout.getConstraints(parent); pCons.setConstraint(SpringLayout.SOUTH, y); pCons.setConstraint(SpringLayout.EAST, x); } }
// // This file was pubmed.openAccess.jaxb.generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.06.04 at 07:58:30 PM BST // package pubmed.openAccess.jaxb.generated; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import elsevier.jaxb.math.mathml.Math; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{}inline-supplementary-material"/> * &lt;element ref="{}related-article"/> * &lt;element ref="{}related-object"/> * &lt;element ref="{}hr"/> * &lt;element ref="{}break"/> * &lt;element ref="{}bold"/> * &lt;element ref="{}italic"/> * &lt;element ref="{}monospace"/> * &lt;element ref="{}overline"/> * &lt;element ref="{}overline-start"/> * &lt;element ref="{}overline-end"/> * &lt;element ref="{}roman"/> * &lt;element ref="{}sans-serif"/> * &lt;element ref="{}sc"/> * &lt;element ref="{}strike"/> * &lt;element ref="{}underline"/> * &lt;element ref="{}underline-start"/> * &lt;element ref="{}underline-end"/> * &lt;element ref="{}alternatives"/> * &lt;element ref="{}inline-graphic"/> * &lt;element ref="{}private-char"/> * &lt;element ref="{}chem-struct"/> * &lt;element ref="{}inline-formula"/> * &lt;element ref="{}label"/> * &lt;element ref="{}tex-math"/> * &lt;element ref="{http://www.w3.org/1998/Math/MathML}math"/> * &lt;element ref="{}abbrev"/> * &lt;element ref="{}milestone-end"/> * &lt;element ref="{}milestone-start"/> * &lt;element ref="{}named-content"/> * &lt;element ref="{}styled-content"/> * &lt;element ref="{}price"/> * &lt;element ref="{}annotation"/> * &lt;element ref="{}article-title"/> * &lt;element ref="{}chapter-title"/> * &lt;element ref="{}collab"/> * &lt;element ref="{}comment"/> * &lt;element ref="{}conf-date"/> * &lt;element ref="{}conf-loc"/> * &lt;element ref="{}conf-name"/> * &lt;element ref="{}conf-sponsor"/> * &lt;element ref="{}date"/> * &lt;element ref="{}date-in-citation"/> * &lt;element ref="{}day"/> * &lt;element ref="{}edition"/> * &lt;element ref="{}email"/> * &lt;element ref="{}elocation-id"/> * &lt;element ref="{}etal"/> * &lt;element ref="{}ext-link"/> * &lt;element ref="{}fpage"/> * &lt;element ref="{}gov"/> * &lt;element ref="{}institution"/> * &lt;element ref="{}isbn"/> * &lt;element ref="{}issn"/> * &lt;element ref="{}issue"/> * &lt;element ref="{}issue-id"/> * &lt;element ref="{}issue-part"/> * &lt;element ref="{}issue-title"/> * &lt;element ref="{}lpage"/> * &lt;element ref="{}month"/> * &lt;element ref="{}name"/> * &lt;element ref="{}object-id"/> * &lt;element ref="{}page-range"/> * &lt;element ref="{}part-title"/> * &lt;element ref="{}patent"/> * &lt;element ref="{}person-group"/> * &lt;element ref="{}pub-id"/> * &lt;element ref="{}publisher-loc"/> * &lt;element ref="{}publisher-name"/> * &lt;element ref="{}role"/> * &lt;element ref="{}season"/> * &lt;element ref="{}series"/> * &lt;element ref="{}size"/> * &lt;element ref="{}source"/> * &lt;element ref="{}std"/> * &lt;element ref="{}string-name"/> * &lt;element ref="{}supplement"/> * &lt;element ref="{}trans-source"/> * &lt;element ref="{}trans-title"/> * &lt;element ref="{}uri"/> * &lt;element ref="{}volume"/> * &lt;element ref="{}volume-id"/> * &lt;element ref="{}volume-series"/> * &lt;element ref="{}year"/> * &lt;element ref="{}fn"/> * &lt;element ref="{}target"/> * &lt;element ref="{}xref"/> * &lt;element ref="{}sub"/> * &lt;element ref="{}sup"/> * &lt;element ref="{}x"/> * &lt;/choice> * &lt;attribute name="id" type="{http://www.w3.org/2001/XMLSchema}ID" /> * &lt;attribute name="product-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute ref="{http://www.w3.org/1999/xlink}type"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}href"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}role"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}title"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}show"/> * &lt;attribute ref="{http://www.w3.org/1999/xlink}actuate"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "product") public class Product { @XmlElementRefs({ @XmlElementRef(name = "lpage", type = Lpage.class), @XmlElementRef(name = "hr", type = Hr.class), @XmlElementRef(name = "conf-name", type = ConfName.class), @XmlElementRef(name = "conf-sponsor", type = ConfSponsor.class), @XmlElementRef(name = "date-in-citation", type = DateInCitation.class), @XmlElementRef(name = "inline-formula", type = InlineFormula.class), @XmlElementRef(name = "day", type = Day.class), @XmlElementRef(name = "conf-date", type = ConfDate.class), @XmlElementRef(name = "annotation", type = Annotation.class), @XmlElementRef(name = "label", type = Label.class), @XmlElementRef(name = "volume", type = Volume.class), @XmlElementRef(name = "year", type = Year.class), @XmlElementRef(name = "issue-id", type = IssueId.class), @XmlElementRef(name = "issn", type = Issn.class), @XmlElementRef(name = "private-char", type = PrivateChar.class), @XmlElementRef(name = "related-object", type = RelatedObject.class), @XmlElementRef(name = "gov", type = Gov.class), @XmlElementRef(name = "overline", type = Overline.class), @XmlElementRef(name = "price", type = Price.class), @XmlElementRef(name = "etal", type = Etal.class), @XmlElementRef(name = "fpage", type = Fpage.class), @XmlElementRef(name = "uri", type = Uri.class), @XmlElementRef(name = "role", type = Role.class), @XmlElementRef(name = "strike", type = Strike.class), @XmlElementRef(name = "overline-end", type = OverlineEnd.class), @XmlElementRef(name = "milestone-start", type = MilestoneStart.class), @XmlElementRef(name = "chapter-title", type = ChapterTitle.class), @XmlElementRef(name = "series", type = Series.class), @XmlElementRef(name = "target", type = Target.class), @XmlElementRef(name = "trans-title", type = TransTitle.class), @XmlElementRef(name = "ext-link", type = ExtLink.class), @XmlElementRef(name = "underline-end", type = UnderlineEnd.class), @XmlElementRef(name = "edition", type = Edition.class), @XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = Math.class), @XmlElementRef(name = "named-content", type = NamedContent.class), @XmlElementRef(name = "underline", type = Underline.class), @XmlElementRef(name = "issue-title", type = IssueTitle.class), @XmlElementRef(name = "patent", type = Patent.class), @XmlElementRef(name = "volume-series", type = VolumeSeries.class), @XmlElementRef(name = "related-article", type = RelatedArticle.class), @XmlElementRef(name = "underline-start", type = UnderlineStart.class), @XmlElementRef(name = "xref", type = Xref.class), @XmlElementRef(name = "comment", type = Comment.class), @XmlElementRef(name = "trans-source", type = TransSource.class), @XmlElementRef(name = "roman", type = Roman.class), @XmlElementRef(name = "size", type = Size.class), @XmlElementRef(name = "volume-id", type = VolumeId.class), @XmlElementRef(name = "italic", type = Italic.class), @XmlElementRef(name = "fn", type = Fn.class), @XmlElementRef(name = "source", type = Source.class), @XmlElementRef(name = "break", type = Break.class), @XmlElementRef(name = "elocation-id", type = ElocationId.class), @XmlElementRef(name = "person-group", type = PersonGroup.class), @XmlElementRef(name = "publisher-name", type = PublisherName.class), @XmlElementRef(name = "part-title", type = PartTitle.class), @XmlElementRef(name = "month", type = Month.class), @XmlElementRef(name = "supplement", type = Supplement.class), @XmlElementRef(name = "issue-part", type = IssuePart.class), @XmlElementRef(name = "isbn", type = Isbn.class), @XmlElementRef(name = "object-id", type = ObjectId.class), @XmlElementRef(name = "email", type = Email.class), @XmlElementRef(name = "alternatives", type = Alternatives.class), @XmlElementRef(name = "article-title", type = ArticleTitle.class), @XmlElementRef(name = "inline-graphic", type = InlineGraphic.class), @XmlElementRef(name = "chem-struct", type = ChemStruct.class), @XmlElementRef(name = "x", type = X.class), @XmlElementRef(name = "styled-content", type = StyledContent.class), @XmlElementRef(name = "inline-supplementary-material", type = InlineSupplementaryMaterial.class), @XmlElementRef(name = "collab", type = Collab.class), @XmlElementRef(name = "pub-id", type = PubId.class), @XmlElementRef(name = "name", type = Name.class), @XmlElementRef(name = "conf-loc", type = ConfLoc.class), @XmlElementRef(name = "publisher-loc", type = PublisherLoc.class), @XmlElementRef(name = "monospace", type = Monospace.class), @XmlElementRef(name = "sup", type = Sup.class), @XmlElementRef(name = "abbrev", type = Abbrev.class), @XmlElementRef(name = "string-name", type = StringName.class), @XmlElementRef(name = "milestone-end", type = MilestoneEnd.class), @XmlElementRef(name = "bold", type = Bold.class), @XmlElementRef(name = "sub", type = Sub.class), @XmlElementRef(name = "issue", type = Issue.class), @XmlElementRef(name = "sans-serif", type = SansSerif.class), @XmlElementRef(name = "sc", type = Sc.class), @XmlElementRef(name = "season", type = Season.class), @XmlElementRef(name = "tex-math", type = TexMath.class), @XmlElementRef(name = "date", type = Date.class), @XmlElementRef(name = "overline-start", type = OverlineStart.class), @XmlElementRef(name = "std", type = Std.class), @XmlElementRef(name = "institution", type = Institution.class), @XmlElementRef(name = "page-range", type = PageRange.class) }) @XmlMixed protected List<Object> content; @XmlAttribute @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "product-type") @XmlSchemaType(name = "anySimpleType") protected String productType; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String type; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String role; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String title; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String show; @XmlAttribute(namespace = "http://www.w3.org/1999/xlink") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String actuate; /** * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Lpage } * {@link ConfName } * {@link Hr } * {@link ConfSponsor } * {@link InlineFormula } * {@link DateInCitation } * {@link Day } * {@link ConfDate } * {@link Annotation } * {@link Label } * {@link Volume } * {@link Year } * {@link IssueId } * {@link Issn } * {@link RelatedObject } * {@link PrivateChar } * {@link Overline } * {@link Gov } * {@link Price } * {@link Etal } * {@link Fpage } * {@link Uri } * {@link Role } * {@link OverlineEnd } * {@link Strike } * {@link MilestoneStart } * {@link Series } * {@link ChapterTitle } * {@link Target } * {@link TransTitle } * {@link ExtLink } * {@link UnderlineEnd } * {@link Edition } * {@link Underline } * {@link NamedContent } * {@link Math } * {@link IssueTitle } * {@link Patent } * {@link VolumeSeries } * {@link RelatedArticle } * {@link UnderlineStart } * {@link Xref } * {@link Comment } * {@link TransSource } * {@link String } * {@link Roman } * {@link Size } * {@link Fn } * {@link Italic } * {@link VolumeId } * {@link Break } * {@link Source } * {@link ElocationId } * {@link PersonGroup } * {@link PublisherName } * {@link PartTitle } * {@link Month } * {@link IssuePart } * {@link Supplement } * {@link Isbn } * {@link Email } * {@link ObjectId } * {@link Alternatives } * {@link ArticleTitle } * {@link ChemStruct } * {@link InlineGraphic } * {@link X } * {@link StyledContent } * {@link InlineSupplementaryMaterial } * {@link Collab } * {@link PubId } * {@link ConfLoc } * {@link Name } * {@link PublisherLoc } * {@link Abbrev } * {@link Sup } * {@link Monospace } * {@link MilestoneEnd } * {@link StringName } * {@link Bold } * {@link SansSerif } * {@link Issue } * {@link Sub } * {@link Sc } * {@link Season } * {@link TexMath } * {@link Date } * {@link Std } * {@link OverlineStart } * {@link Institution } * {@link PageRange } * * */ public List<Object> getContent() { if (content == null) { content = new ArrayList<Object>(); } return this.content; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the productType property. * * @return * possible object is * {@link String } * */ public String getProductType() { return productType; } /** * Sets the value of the productType property. * * @param value * allowed object is * {@link String } * */ public void setProductType(String value) { this.productType = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the role property. * * @return * possible object is * {@link String } * */ public String getRole() { return role; } /** * Sets the value of the role property. * * @param value * allowed object is * {@link String } * */ public void setRole(String value) { this.role = value; } /** * Gets the value of the title property. * * @return * possible object is * {@link String } * */ public String getTitle() { return title; } /** * Sets the value of the title property. * * @param value * allowed object is * {@link String } * */ public void setTitle(String value) { this.title = value; } /** * Gets the value of the show property. * * @return * possible object is * {@link String } * */ public String getShow() { return show; } /** * Sets the value of the show property. * * @param value * allowed object is * {@link String } * */ public void setShow(String value) { this.show = value; } /** * Gets the value of the actuate property. * * @return * possible object is * {@link String } * */ public String getActuate() { return actuate; } /** * Sets the value of the actuate property. * * @param value * allowed object is * {@link String } * */ public void setActuate(String value) { this.actuate = value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.session.tests; import static org.apache.geode.session.tests.ContainerInstall.TMP_DIR; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import org.apache.commons.io.FileUtils; import org.apache.logging.log4j.Logger; import org.codehaus.cargo.container.ContainerType; import org.codehaus.cargo.container.InstalledLocalContainer; import org.codehaus.cargo.container.State; import org.codehaus.cargo.container.configuration.ConfigurationType; import org.codehaus.cargo.container.configuration.LocalConfiguration; import org.codehaus.cargo.container.deployable.WAR; import org.codehaus.cargo.container.property.GeneralPropertySet; import org.codehaus.cargo.container.property.LoggingLevel; import org.codehaus.cargo.container.property.ServletPropertySet; import org.codehaus.cargo.container.tomcat.TomcatPropertySet; import org.codehaus.cargo.generic.DefaultContainerFactory; import org.codehaus.cargo.generic.configuration.DefaultConfigurationFactory; import org.apache.geode.internal.AvailablePortHelper; import org.apache.geode.internal.logging.LogService; /** * Base class for handling the setup and configuration of cargo containers * * This class contains common logic for setting up and configuring cargo containers for J2EE * container installations. Also includes some common methods for applying geode session replication * configuration to those containers. * * Subclasses provide setup and configuration of specific containers. */ public abstract class ServerContainer { private final File containerConfigHome; private InstalledLocalContainer container; private ContainerInstall install; private String locatorAddress; private int locatorPort; private File warFile; public String description; public File gemfireLogFile; public File cacheXMLFile; public File logDir; public String loggingLevel; public HashMap<String, String> cacheProperties; public HashMap<String, String> systemProperties; public final String DEFAULT_CONF_DIR; public static final String DEFAULT_LOGGING_LEVEL = LoggingLevel.LOW.getLevel(); public static final String DEFAULT_LOG_DIR = "cargo_logs/"; public static final String DEFAULT_CONFIG_DIR = TMP_DIR + "/cargo_configs/"; public static final Logger logger = LogService.getLogger(); /** * Sets up the container using the given installation * * Sets up a bunch of logging files, default locations, and container properties. * * Creates a whole new cargo configuration and cargo container for the {@link #container} * variable. * * @param containerConfigHome The folder that the container configuration folder should be setup * in * @param containerDescriptors A string of extra descriptors for the container used in the * containers {@link #description} */ public ServerContainer(ContainerInstall install, File containerConfigHome, String containerDescriptors) throws IOException { this.install = install; // Get a container description for logging and output description = generateUniqueContainerDescription(containerDescriptors); // Setup logging loggingLevel = DEFAULT_LOGGING_LEVEL; logDir = new File(DEFAULT_LOG_DIR + description); logDir.mkdirs(); logger.info("Creating new container " + description); DEFAULT_CONF_DIR = install.getHome() + "/conf/"; // Use the default configuration home path if not passed a config home this.containerConfigHome = containerConfigHome == null ? new File(DEFAULT_CONFIG_DIR + description) : containerConfigHome; // Init the property lists cacheProperties = new HashMap<>(); systemProperties = new HashMap<>(); // Set WAR file to session testing war warFile = new File(install.getWarFilePath()); // Create the Cargo Container instance wrapping our physical container LocalConfiguration configuration = (LocalConfiguration) new DefaultConfigurationFactory() .createConfiguration(install.getInstallId(), ContainerType.INSTALLED, ConfigurationType.STANDALONE, this.containerConfigHome.getAbsolutePath()); // Set configuration/container logging level configuration.setProperty(GeneralPropertySet.LOGGING, loggingLevel); // Removes secureRandom generation so that container startup is much faster configuration.setProperty(GeneralPropertySet.JVMARGS, "-Djava.security.egd=file:/dev/./urandom"); // Setup the gemfire log file for this container gemfireLogFile = new File(logDir.getAbsolutePath() + "/gemfire.log"); gemfireLogFile.getParentFile().mkdirs(); setSystemProperty("log-file", gemfireLogFile.getAbsolutePath()); logger.info("Gemfire logs can be found in " + gemfireLogFile.getAbsolutePath()); // Create the container container = (InstalledLocalContainer) (new DefaultContainerFactory()) .createContainer(install.getInstallId(), ContainerType.INSTALLED, configuration); // Set container's home dir to where it was installed container.setHome(install.getHome()); // Set container output log to directory setup for it container.setOutput(logDir.getAbsolutePath() + "/container.log"); // Set cacheXML file File installXMLFile = install.getCacheXMLFile(); // Sets the cacheXMLFile variable and adds the cache XML file server system property map setCacheXMLFile(new File(logDir.getAbsolutePath() + "/" + installXMLFile.getName())); // Copy the cacheXML file to a new, unique location for this container FileUtils.copyFile(installXMLFile, cacheXMLFile); } /** * Generates a unique, mostly human readable, description string of the container using the * installation's description, extraIdentifiers, and the current system nano time */ public String generateUniqueContainerDescription(String extraIdentifiers) { return String.join("_", Arrays.asList(install.getInstallDescription(), extraIdentifiers, Long.toString(System.nanoTime()))); } /** * Deploys the {@link #warFile} to the cargo container ({@link #container}). */ public void deployWar() { // Get the cargo war from the war file WAR war = new WAR(warFile.getAbsolutePath()); // Set context access to nothing war.setContext(""); // Deploy the war the container's configuration getConfiguration().addDeployable(war); logger.info("Deployed WAR file at " + war.getFile()); } /** * Starts this cargo container by picking the container's ports (RMI, AJP, and regular) and * calling the cargo container's start function */ public void start() { if (container.getState().isStarted()) throw new IllegalArgumentException("Container " + description + " failed to start because it is currently " + container.getState()); LocalConfiguration config = getConfiguration(); int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(4); // Set container ports from available ports config.setProperty(ServletPropertySet.PORT, Integer.toString(ports[0])); config.setProperty(GeneralPropertySet.RMI_PORT, Integer.toString(ports[1])); config.setProperty(TomcatPropertySet.AJP_PORT, Integer.toString(ports[2])); config.setProperty(GeneralPropertySet.PORT_OFFSET, "0"); config.setProperty(GeneralPropertySet.START_JVMARGS, "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=" + ports[3]); container.setConfiguration(config); try { logger.info("Starting container " + description + "RMI Port: " + ports[3]); // Writes settings to the expected form (either XML or WAR file) writeSettings(); // Start the container through cargo container.start(); } catch (Exception e) { throw new RuntimeException( "Something very bad happened to this container when starting. Check the cargo_logs folder for container logs.", e); } } /** * Stops this cargo container */ public void stop() { if (!container.getState().isStarted()) { throw new IllegalArgumentException("Container " + description + " failed to stop because it is currently " + container.getState()); } container.stop(); } public void dumpLogs() throws IOException { for (File file : logDir.listFiles()) { dumpToStdOut(file); } for (File file : new File(containerConfigHome, "logs").listFiles()) { dumpToStdOut(file); } } private void dumpToStdOut(final File file) throws IOException { System.out.println("-------------------------------------------"); System.out.println(file.getAbsolutePath()); System.out.println("-------------------------------------------"); FileUtils.copyFile(file, System.out); System.out.println("-------------------------------------------"); System.out.println(""); } /** * Copies the container configuration (found through {@link #getConfiguration()}) to the logging * directory specified by {@link #logDir} */ public void cleanUp() throws IOException { File configDir = new File(getConfiguration().getHome()); if (configDir.exists()) { logger.info("Deleting configuration folder " + configDir.getAbsolutePath()); FileUtils.deleteDirectory(configDir); } } /** * Sets the container's locator * * Sets the two variables {@link #locatorAddress} and {@link #locatorPort}. Also calls the * {@link #updateLocator()} function to write the updated locator properties to the file. */ public void setLocator(String address, int port) throws IOException { locatorAddress = address; locatorPort = port; updateLocator(); } /** * Sets the container's cache XML file */ public void setCacheXMLFile(File cacheXMLFile) throws IOException { setSystemProperty("cache-xml-file", cacheXMLFile.getAbsolutePath()); this.cacheXMLFile = cacheXMLFile; } /** * Set a geode session replication property */ public String setCacheProperty(String name, String value) throws IOException { return cacheProperties.put(name, value); } /** * Set geode distributed system property */ public String setSystemProperty(String name, String value) throws IOException { return systemProperties.put(name, value); } /** * Sets the war file for this container to deploy and use */ public void setWarFile(File warFile) { this.warFile = warFile; } /** * set the container's logging level */ public void setLoggingLevel(String loggingLevel) { this.loggingLevel = loggingLevel; LocalConfiguration config = getConfiguration(); config.setProperty(GeneralPropertySet.LOGGING, loggingLevel); container.setConfiguration(config); } public InstalledLocalContainer getContainer() { return container; } public ContainerInstall getInstall() { return install; } public File getWarFile() { return warFile; } public String getLoggingLevel() { return loggingLevel; } public LocalConfiguration getConfiguration() { return container.getConfiguration(); } public State getState() { return container.getState(); } public String getCacheProperty(String name) { return cacheProperties.get(name); } public String getSystemProperty(String name) { return systemProperties.get(name); } /** * Get the RMI port for the container * * Calls {@link #getPort()} with the {@link GeneralPropertySet#RMI_PORT} option. */ public String getRMIPort() { return getPort(GeneralPropertySet.RMI_PORT); } /** * Get the basic port for the container * * Calls {@link #getPort()} with the {@link ServletPropertySet#PORT} option. */ public String getPort() { return getPort(ServletPropertySet.PORT); } /** * The container's port for the specified port type */ public String getPort(String portType) { LocalConfiguration config = getConfiguration(); config.applyPortOffset(); if (!container.getState().isStarted()) throw new IllegalStateException( "Container is not started, thus a port has not yet been assigned to the container."); return config.getPropertyValue(portType); } /** * Called before each container startup * * This is mainly used to write properties to whatever format they need to be in for a given * container before the container is started. The reason for doing this is to make sure that * expensive property updates (such as writing to an XML file or building WAR files from the * command line) only happen as often as they are needed. These kinds of updates usually only need * to happen on container startup. */ public abstract void writeSettings() throws Exception; /** * Human readable description of the container * * @return The {@link #description} variable along with the state of this {@link #container} */ @Override public String toString() { return description + "_<" + container.getState() + ">"; } /** * Updates the address and port of the locator for this container * * For Client Server installations the {@link #cacheXMLFile} is updated with the new address and * port. For Peer to Peer installations the locator must be specified as a system property and so * is added to the {@link #systemProperties} map under the 'locators' key in the form of * '{@link #locatorAddress}[{@link #locatorPort}]'. */ private void updateLocator() throws IOException { if (getInstall().isClientServer()) { HashMap<String, String> attributes = new HashMap<>(); attributes.put("host", locatorAddress); attributes.put("port", Integer.toString(locatorPort)); ContainerInstall.editXMLFile(getSystemProperty("cache-xml-file"), "locator", "pool", attributes, true); } else { setSystemProperty("locators", locatorAddress + "[" + locatorPort + "]"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.structure.io.gryo; import org.apache.tinkerpop.gremlin.structure.io.IoRegistry; import org.apache.tinkerpop.shaded.kryo.Kryo; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.concurrent.LinkedBlockingQueue; import java.util.function.Consumer; import java.util.function.Function; /** * Gryo objects are somewhat expensive to construct (given the dependency on Kryo), therefore this pool helps re-use * those objects. * * @author Marko A. Rodriguez (http://markorodriguez.com) * @author Stephen Mallette (http://stephen.genoprime.com) */ public final class GryoPool { public static final String CONFIG_IO_REGISTRY = "gremlin.io.registry"; public static final String CONFIG_IO_GRYO_POOL_SIZE = "gremlin.io.gryo.poolSize"; public static final int CONFIG_IO_GRYO_POOL_SIZE_DEFAULT = 256; public enum Type {READER, WRITER, READER_WRITER} private Queue<GryoReader> gryoReaders; private Queue<GryoWriter> gryoWriters; private Queue<Kryo> kryos; private GryoMapper mapper; public static GryoPool.Builder build() { return new GryoPool.Builder(); } /** * Used by {@code GryoPool.Builder}. */ private GryoPool() { } public GryoMapper getMapper() { return mapper; } public Kryo takeKryo() { final Kryo kryo = kryos.poll(); return null == kryo ? mapper.createMapper() : kryo; } public GryoReader takeReader() { final GryoReader reader = this.gryoReaders.poll(); return null == reader ? GryoReader.build().mapper(mapper).create() : reader; } public GryoWriter takeWriter() { final GryoWriter writer = this.gryoWriters.poll(); return null == writer ? GryoWriter.build().mapper(mapper).create() : writer; } public void offerKryo(final Kryo kryo) { kryos.offer(kryo); } public void offerReader(final GryoReader gryoReader) { gryoReaders.offer(gryoReader); } public void offerWriter(final GryoWriter gryoWriter) { gryoWriters.offer(gryoWriter); } public <A> A readWithKryo(final Function<Kryo, A> kryoFunction) { final Kryo kryo = takeKryo(); final A a = kryoFunction.apply(kryo); offerKryo(kryo); return a; } public void writeWithKryo(final Consumer<Kryo> kryoConsumer) { final Kryo kryo = takeKryo(); kryoConsumer.accept(kryo); offerKryo(kryo); } public <A> A doWithReader(final Function<GryoReader, A> readerFunction) { final GryoReader gryoReader = takeReader(); final A a = readerFunction.apply(gryoReader); offerReader(gryoReader); return a; } public void doWithWriter(final Consumer<GryoWriter> writerFunction) { final GryoWriter gryoWriter = takeWriter(); writerFunction.accept(gryoWriter); offerWriter(gryoWriter); } private void createPool(final int poolSize, final Type type, final GryoMapper gryoMapper) { this.mapper = gryoMapper; if (type.equals(Type.READER) || type.equals(Type.READER_WRITER)) { gryoReaders = new LinkedBlockingQueue<>(poolSize); for (int i = 0; i < poolSize; i++) { gryoReaders.add(GryoReader.build().mapper(gryoMapper).create()); } } if (type.equals(Type.WRITER) || type.equals(Type.READER_WRITER)) { gryoWriters = new LinkedBlockingQueue<>(poolSize); for (int i = 0; i < poolSize; i++) { gryoWriters.add(GryoWriter.build().mapper(gryoMapper).create()); } } kryos = new LinkedBlockingQueue<>(poolSize); for (int i = 0; i < poolSize; i++) { kryos.add(gryoMapper.createMapper()); } } //// public static class Builder { private int poolSize = 256; private List<IoRegistry> ioRegistries = new ArrayList<>(); private Type type = Type.READER_WRITER; private Consumer<GryoMapper.Builder> gryoMapperConsumer = null; /** * The {@code IoRegistry} class names to use for the {@code GryoPool} * * @param ioRegistryClassNames a list of class names * @return the update builder */ public Builder ioRegistries(final List<Object> ioRegistryClassNames) { this.ioRegistries.addAll(tryCreateIoRegistry(ioRegistryClassNames)); return this; } /** * The {@code IoRegistry} class name to use for the {@code GryoPool} * * @param ioRegistryClassName a class name * @return the update builder */ public Builder ioRegistry(final Object ioRegistryClassName) { this.ioRegistries.addAll(tryCreateIoRegistry(Collections.singletonList(ioRegistryClassName))); return this; } /** * The size of the {@code GryoPool}. The size can not be changed once created. * * @param poolSize the pool size * @return the updated builder */ public Builder poolSize(int poolSize) { this.poolSize = poolSize; return this; } /** * The type of {@code GryoPool} to support -- see {@code Type} * * @param type the pool type * @return the updated builder */ public Builder type(final Type type) { this.type = type; return this; } /** * A consumer to update the {@code GryoMapper.Builder} once constructed. * * @param gryoMapperConsumer the {@code GryoMapper.Builder} consumer * @return the updated builder */ public Builder initializeMapper(final Consumer<GryoMapper.Builder> gryoMapperConsumer) { this.gryoMapperConsumer = gryoMapperConsumer; return this; } /** * Create the {@code GryoPool} from this builder. * * @return the new pool */ public GryoPool create() { final GryoMapper.Builder mapper = GryoMapper.build(); final GryoPool gryoPool = new GryoPool(); if (null != this.ioRegistries) this.ioRegistries.forEach(mapper::addRegistry); if (null != this.gryoMapperConsumer) this.gryoMapperConsumer.accept(mapper); gryoPool.createPool(this.poolSize, this.type, mapper.create()); return gryoPool; } ///// private static List<IoRegistry> tryCreateIoRegistry(final List<Object> classNames) { if (classNames.isEmpty()) return Collections.emptyList(); final List<IoRegistry> registries = new ArrayList<>(); classNames.forEach(c -> { try { final String className = c.toString(); final Class<?> clazz = Class.forName(className); try { final Method instanceMethod = clazz.getDeclaredMethod("getInstance"); if (IoRegistry.class.isAssignableFrom(instanceMethod.getReturnType())) registries.add((IoRegistry) instanceMethod.invoke(null)); else throw new Exception(); } catch (Exception methodex) { // tried getInstance() and that failed so try newInstance() no-arg constructor registries.add((IoRegistry) clazz.newInstance()); } } catch (Exception ex) { throw new IllegalStateException(ex); } }); return registries; } } }
/* Copyright (c) 2017 lib4j * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * You should have received a copy of The MIT License (MIT) along with this * program. If not, see <http://opensource.org/licenses/MIT/>. */ package org.lib4j.util; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Set; import org.junit.Assert; import org.junit.ComparisonFailure; import org.junit.Test; public class ObservableListTest { private boolean testingGetReplace = false; private String expectedString; private boolean beforeGet; private boolean afterGet; private boolean beforeAdd; private boolean afterAdd; private boolean beforeRemove; private boolean afterRemove; private boolean beforeSet; private boolean afterSet; private int fromIndex; private void reset() { beforeGet = false; afterGet = false; beforeAdd = false; afterAdd = false; beforeRemove = false; afterRemove = false; beforeSet = false; afterSet = false; } private void assertGot() { Assert.assertTrue(beforeGet && afterGet); if (!testingGetReplace) Assert.assertFalse(beforeRemove || afterRemove || beforeAdd || afterAdd || beforeSet || afterSet); reset(); } private void assertRemoved() { Assert.assertTrue(beforeRemove && afterRemove); Assert.assertFalse(beforeAdd || afterAdd || beforeSet || afterSet); reset(); } private void assertAdded() { Assert.assertTrue(beforeAdd && afterAdd); Assert.assertFalse(beforeRemove || afterRemove || beforeSet || afterSet); reset(); } private void assertSet() { Assert.assertTrue(beforeSet && afterSet); Assert.assertFalse(beforeRemove || afterRemove || beforeAdd || afterAdd); reset(); } @Test public void test() { final ObservableList<String> list = new ObservableList<>(new ArrayList<String>()) { @Override protected void beforeGet(final int index, final ListIterator<String> iterator) { beforeGet = true; } @Override protected void afterGet(final int index, final String e, final ListIterator<String> iterator, final RuntimeException exception) { afterGet = true; if (testingGetReplace) { if (iterator != null) iterator.set(e.intern()); else set(index, e.intern()); } } @Override protected boolean beforeAdd(final int index, final String e) { Assert.assertEquals(expectedString, e); Assert.assertFalse(contains(e)); beforeAdd = true; return true; } @Override protected boolean beforeRemove(final int index) { final String e = get(index + fromIndex); Assert.assertEquals(expectedString, e); Assert.assertTrue(contains(e)); beforeRemove = true; return true; } @Override protected void afterAdd(final int index, final String e, final RuntimeException exception) { Assert.assertEquals(expectedString, e); Assert.assertTrue(contains(e)); afterAdd = true; } @Override protected void afterRemove(final Object e, final RuntimeException exception) { Assert.assertEquals(expectedString, e); Assert.assertFalse(contains(e)); afterRemove = true; } @Override protected boolean beforeSet(final int index, final String newElement) { if (!testingGetReplace) { Assert.assertEquals(expectedString, newElement); Assert.assertFalse(contains(newElement)); } beforeSet = true; return true; } @Override protected void afterSet(final int index, final String oldElement, final RuntimeException exception) { if (!testingGetReplace) { final String e = get(index + fromIndex); Assert.assertEquals(expectedString, e); Assert.assertTrue(contains(e)); } afterSet = true; } }; // add() for (int i = 0; i < 100; i++) { list.add(expectedString = String.valueOf(i)); assertAdded(); } // get() for (int i = 0; i < 100; i++) { list.get(i); assertGot(); } // iterator.get() for (final String s : list) { Assert.assertNotNull(s); assertGot(); } // testingGetReplace testingGetReplace = true; for (int i = 0; i < 100; i++) { list.get(i); assertGot(); } Iterator<String> iterator = list.iterator(); while (iterator.hasNext()) { iterator.next(); assertGot(); } testingGetReplace = false; // addAll() list.addAll(Arrays.asList(expectedString = String.valueOf(101))); assertAdded(); final int size = list.size(); final List<String> subList = list.subList(fromIndex = 33, 44); Assert.assertEquals(11, subList.size()); subList.remove(expectedString = String.valueOf(40)); assertRemoved(); final Iterator<String> subListIterator = subList.iterator(); expectedString = subListIterator.next(); subListIterator.remove(); assertRemoved(); Assert.assertEquals(9, subList.size()); Assert.assertEquals(size, list.size() + 2); fromIndex = 0; // remove() for (int i = 0; i < 100; i += 7) { list.remove(expectedString = String.valueOf(i)); assertRemoved(); } // iterator() iterator = list.iterator(); while (iterator.hasNext()) { final String element = iterator.next(); if (String.valueOf(0).equals(element) || String.valueOf(33).equals(element) || String.valueOf(100).equals(element)) { expectedString = element; iterator.remove(); assertRemoved(); } } // listIterator() final ListIterator<String> listIterator = list.listIterator(); while (listIterator.hasNext()) { final String element = listIterator.next(); if (String.valueOf(0).equals(element) || String.valueOf(33).equals(element) || String.valueOf(100).equals(element)) { expectedString = element; listIterator.remove(); assertRemoved(); } else if (String.valueOf(8).equals(element) || String.valueOf(45).equals(element) || String.valueOf(76).equals(element)) { listIterator.set(expectedString = String.valueOf(100 + Math.random() * 10)); assertSet(); } else if (String.valueOf(12).equals(element) || String.valueOf(24).equals(element) || String.valueOf(73).equals(element)) { listIterator.add(expectedString = String.valueOf(200 + Math.random() * 10)); assertAdded(); } } // removeIf() for (int i = 9; i < 100; i += 23) { final int index = i; list.removeIf(s -> (expectedString = String.valueOf(index)).equals(s)); assertRemoved(); } // removeAll() list.removeAll(Arrays.asList(expectedString = "97")); assertRemoved(); // retainAll() final Set<String> set = new HashSet<>(list); Assert.assertTrue(set.remove(expectedString = String.valueOf(37))); list.retainAll(set); assertRemoved(); // clear() try { list.clear(); Assert.fail("Expected ComparisonFailure"); } catch (final ComparisonFailure e) { if (!"expected:<[37]> but was:<[1]>".equals(e.getMessage())) throw e; } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/UpdateServerCertificate" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateServerCertificateRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the server certificate that you want to update. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> */ private String serverCertificateName; /** * <p> * The new path for the server certificate. Include this only if you are updating the server certificate's path. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward * slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F), * including most punctuation characters, digits, and upper and lowercased letters. * </p> */ private String newPath; /** * <p> * The new name for the server certificate. Include this only if you are updating the server certificate's name. The * name of the certificate cannot contain any spaces. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> */ private String newServerCertificateName; /** * Default constructor for UpdateServerCertificateRequest object. Callers should use the setter or fluent setter * (with...) methods to initialize the object after creating it. */ public UpdateServerCertificateRequest() { } /** * Constructs a new UpdateServerCertificateRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize any additional object members. * * @param serverCertificateName * The name of the server certificate that you want to update.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public UpdateServerCertificateRequest(String serverCertificateName) { setServerCertificateName(serverCertificateName); } /** * <p> * The name of the server certificate that you want to update. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @param serverCertificateName * The name of the server certificate that you want to update.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public void setServerCertificateName(String serverCertificateName) { this.serverCertificateName = serverCertificateName; } /** * <p> * The name of the server certificate that you want to update. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @return The name of the server certificate that you want to update.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public String getServerCertificateName() { return this.serverCertificateName; } /** * <p> * The name of the server certificate that you want to update. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @param serverCertificateName * The name of the server certificate that you want to update.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServerCertificateRequest withServerCertificateName(String serverCertificateName) { setServerCertificateName(serverCertificateName); return this; } /** * <p> * The new path for the server certificate. Include this only if you are updating the server certificate's path. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward * slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F), * including most punctuation characters, digits, and upper and lowercased letters. * </p> * * @param newPath * The new path for the server certificate. Include this only if you are updating the server certificate's * path.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of either a forward slash (/) by itself or a string that must begin and end with * forward slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL * character (\u007F), including most punctuation characters, digits, and upper and lowercased letters. */ public void setNewPath(String newPath) { this.newPath = newPath; } /** * <p> * The new path for the server certificate. Include this only if you are updating the server certificate's path. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward * slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F), * including most punctuation characters, digits, and upper and lowercased letters. * </p> * * @return The new path for the server certificate. Include this only if you are updating the server certificate's * path.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of either a forward slash (/) by itself or a string that must begin and end with * forward slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL * character (\u007F), including most punctuation characters, digits, and upper and lowercased letters. */ public String getNewPath() { return this.newPath; } /** * <p> * The new path for the server certificate. Include this only if you are updating the server certificate's path. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward * slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F), * including most punctuation characters, digits, and upper and lowercased letters. * </p> * * @param newPath * The new path for the server certificate. Include this only if you are updating the server certificate's * path.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of either a forward slash (/) by itself or a string that must begin and end with * forward slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL * character (\u007F), including most punctuation characters, digits, and upper and lowercased letters. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServerCertificateRequest withNewPath(String newPath) { setNewPath(newPath); return this; } /** * <p> * The new name for the server certificate. Include this only if you are updating the server certificate's name. The * name of the certificate cannot contain any spaces. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @param newServerCertificateName * The new name for the server certificate. Include this only if you are updating the server certificate's * name. The name of the certificate cannot contain any spaces.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public void setNewServerCertificateName(String newServerCertificateName) { this.newServerCertificateName = newServerCertificateName; } /** * <p> * The new name for the server certificate. Include this only if you are updating the server certificate's name. The * name of the certificate cannot contain any spaces. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @return The new name for the server certificate. Include this only if you are updating the server certificate's * name. The name of the certificate cannot contain any spaces.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- */ public String getNewServerCertificateName() { return this.newServerCertificateName; } /** * <p> * The new name for the server certificate. Include this only if you are updating the server certificate's name. The * name of the certificate cannot contain any spaces. * </p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of * characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of * the following characters: _+=,.@- * </p> * * @param newServerCertificateName * The new name for the server certificate. Include this only if you are updating the server certificate's * name. The name of the certificate cannot contain any spaces.</p> * <p> * This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string * of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: _+=,.@- * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateServerCertificateRequest withNewServerCertificateName(String newServerCertificateName) { setNewServerCertificateName(newServerCertificateName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getServerCertificateName() != null) sb.append("ServerCertificateName: ").append(getServerCertificateName()).append(","); if (getNewPath() != null) sb.append("NewPath: ").append(getNewPath()).append(","); if (getNewServerCertificateName() != null) sb.append("NewServerCertificateName: ").append(getNewServerCertificateName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateServerCertificateRequest == false) return false; UpdateServerCertificateRequest other = (UpdateServerCertificateRequest) obj; if (other.getServerCertificateName() == null ^ this.getServerCertificateName() == null) return false; if (other.getServerCertificateName() != null && other.getServerCertificateName().equals(this.getServerCertificateName()) == false) return false; if (other.getNewPath() == null ^ this.getNewPath() == null) return false; if (other.getNewPath() != null && other.getNewPath().equals(this.getNewPath()) == false) return false; if (other.getNewServerCertificateName() == null ^ this.getNewServerCertificateName() == null) return false; if (other.getNewServerCertificateName() != null && other.getNewServerCertificateName().equals(this.getNewServerCertificateName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getServerCertificateName() == null) ? 0 : getServerCertificateName().hashCode()); hashCode = prime * hashCode + ((getNewPath() == null) ? 0 : getNewPath().hashCode()); hashCode = prime * hashCode + ((getNewServerCertificateName() == null) ? 0 : getNewServerCertificateName().hashCode()); return hashCode; } @Override public UpdateServerCertificateRequest clone() { return (UpdateServerCertificateRequest) super.clone(); } }
package info.fingo.urlopia.request; import info.fingo.urlopia.acceptance.Acceptance; import info.fingo.urlopia.request.occasional.OccasionalType; import info.fingo.urlopia.user.User; import javax.persistence.*; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; @Entity @Table(name = "Requests") public class Request { @Id @SequenceGenerator(name = "requests_id_seq", sequenceName = "requests_id_seq", allocationSize = 1) @GeneratedValue(strategy = GenerationType.IDENTITY, generator = "requests_id_seq") private Long id; @Column(nullable = false) private LocalDateTime created; @Column(nullable = false) private LocalDateTime modified; @ManyToOne @JoinColumn(nullable = false) private User requester; @Column(nullable = false) private LocalDate startDate; @Column(nullable = false) private LocalDate endDate; @Column(nullable = false) private Integer workingDays; @Column(nullable = false) @Enumerated(EnumType.STRING) private RequestType type; @Column private String typeInfo; @Column @Enumerated(EnumType.STRING) private Status status; @OneToMany(mappedBy = "request") private Set<Acceptance> acceptances; public Request() { this.created = LocalDateTime.now(); this.modified = LocalDateTime.now(); } public Request(User requester, LocalDate startDate, LocalDate endDate, Integer workingDays, RequestType type, TypeInfo typeInfo, Status status) { this(); this.requester = requester; this.startDate = startDate; this.endDate = endDate; this.workingDays = workingDays; this.type = type; this.typeInfo = (typeInfo != null) ? typeInfo.getName() : null; this.status = status; } public Request(User requester, LocalDate startDate, LocalDate endDate, RequestType type, TypeInfo typeInfo, Status status) { this(); this.requester = requester; this.startDate = startDate; this.endDate = endDate; this.type = type; this.typeInfo = (typeInfo != null) ? typeInfo.getName() : null; this.status = status; } public Request(User requester, LocalDate startDate, LocalDate endDate, Integer workingDays, String typeInfo) { this(); this.requester = requester; this.startDate = startDate; this.endDate = endDate; this.workingDays = workingDays; this.typeInfo = typeInfo; this.type = RequestType.SPECIAL; this.status = Status.ACCEPTED; } public Long getId() { return id; } public LocalDateTime getCreated() { return created; } public void setCreated(LocalDateTime created) { this.created = created; } public LocalDateTime getModified() { return modified; } public void setModified(LocalDateTime modified) { this.modified = modified; } public User getRequester() { return requester; } public LocalDate getStartDate() { return startDate; } public LocalDate getEndDate() { return endDate; } public RequestType getType() { return type; } public void setType(RequestType type) { this.type = type; } public String getRequestDescription() { return "%s (%s)".formatted(getTerm(), typeInfo); } public String getSpecialTypeInfo() { return typeInfo; } public TypeInfo getTypeInfo() { return Arrays.stream(OccasionalType.values()) // TODO: remove OccasionalType from here .filter(typeInfo -> typeInfo.getName().equals(this.typeInfo)) .findFirst() .orElse(null); } public void setTypeInfo(TypeInfo typeInfo) { this.typeInfo = typeInfo.getName(); } public Status getStatus() { return status; } public void setStatus(Status status) { this.status = status; } public Integer getWorkingDays() { return workingDays; } public float getWorkingHours(){ return workingDays * requester.getWorkTime(); } public void setWorkingDays(Integer workingDays) { this.workingDays = workingDays; } public Set<Acceptance> getAcceptances() { return acceptances; } @Transient public Set<String> getDeciders() { return this.acceptances.stream() .map(Acceptance::getLeader) .map(User::getFullName) .collect(Collectors.toSet()); } @Transient public boolean isAffecting() { return this.status == Status.ACCEPTED || this.status == Status.PENDING; } @Transient public boolean isOverlapping(Request request) { return !this.startDate.isAfter(request.endDate) && !this.endDate.isBefore(request.startDate); } @Transient public boolean isNormal() { return this.type == RequestType.NORMAL; } @Transient public boolean isPending() { return this.status == Status.PENDING; } @Transient public String getTerm() { var start = startDate == null ? "" : startDate.toString(); var end = endDate == null ? "" : endDate.toString(); return start + " - " + end; } public interface TypeInfo { // TODO: separate TypeInfo interface String getInfo(); String getName(); } public enum Status { PENDING, ACCEPTED, REJECTED, CANCELED } }
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gxp.compiler.scala; import com.google.common.base.CharEscapers; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.gxp.compiler.alerts.AlertSink; import com.google.gxp.compiler.alerts.common.MissingTypeError; import com.google.gxp.compiler.base.JavaAnnotation; import com.google.gxp.compiler.base.NativeType; import com.google.gxp.compiler.base.OutputLanguage; import com.google.gxp.compiler.codegen.OutputLanguageUtil; import com.google.gxp.compiler.codegen.IllegalTypeError; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Contains static functions for validating scala expressions, * and a couple additional scala utility functions. */ public class ScalaUtil extends OutputLanguageUtil { private ScalaUtil() { super(RESERVED_WORDS, FORBIDDEN_OPS, OPS_FINDER, // TODO(harryh): is javaStringUnicodeEscaper() really the right thing here? CharEscapers.javaStringUnicodeEscaper(), CharEscapers.javascriptEscaper()); } // // READ THIS BEFORE YOU CHANGE THE LIST BELOW! // // The list of disabled JavaScript operators was originally based on the list // of disabled Java Operators. If you want to enable something here, see // about getting it enabled for Java as well. // private static final Set<String> FORBIDDEN_OPS = ImmutableSet.of( // simple boolean // "!", // "!=", // "==", // "===", // boolean connectives // "&&", // "||", // boolean comparators // ">", // ">=", // "<", // "<=", // arithmetic // "*", // "+", // "-", // "/", // "%", // conditional operator (really ?:) // "?", // type inspection "in", "instanceof", "typeof", // object instantiation/deletion // "new", "delete", "void", // bitwise "^", "~", "&", "<<", ">>", ">>>", "|", // assignment -- I can't imagine any reason why it would ever be a good // idea to re-enable these, "--", "-=", "/=", "*=", "&=", "%=", "++", "+=", "<<=", "=", ">>=", ">>>=", "|=", "^="); // the order is important! The '|' operator is non-greedy in // regexes. Sorting in order of descending length works. // private static final Pattern OPS_FINDER = compileUnionPattern( "\\binstanceof\\b", "\\bdelete\\b", "\\btypeof\\b", "\\bvoid\\b", Pattern.quote(">>>="), Pattern.quote("<<="), Pattern.quote(">>="), Pattern.quote(">>>"), Pattern.quote("==="), Pattern.quote("--"), Pattern.quote("-="), Pattern.quote("!="), Pattern.quote("/="), Pattern.quote("^="), Pattern.quote("*="), Pattern.quote("&&"), Pattern.quote("&="), Pattern.quote("%="), Pattern.quote("++"), Pattern.quote("+="), Pattern.quote("<<"), Pattern.quote("<="), Pattern.quote("=="), Pattern.quote(">="), Pattern.quote(">>"), Pattern.quote("|="), Pattern.quote("||"), "\\bnew\\b", "\\bin\\b", Pattern.quote("-"), Pattern.quote("!"), Pattern.quote("/"), Pattern.quote("^"), Pattern.quote("~"), Pattern.quote("*"), Pattern.quote("&"), Pattern.quote("%"), Pattern.quote("+"), Pattern.quote("<"), Pattern.quote("="), Pattern.quote(">"), Pattern.quote("|"), Pattern.quote("?")); // just use ? to find ternary operator... private static final ImmutableSet<String> RESERVED_WORDS = ImmutableSet.of(); /* TODO(harryh): fill this out "abstract", "as", "assert", "boolean", "break", "byte", "case", "catch", "char", "class", "continue", "const", "debugger", "default", "do", "double", "else", "enum", "export", "extends", "false", "final", "finally", "float", "for", "function", "goto", "if", "implements", "import", "in", "instanceof", "int", "interface", "is", "long", "namespace", "native", "new", "null", "package", "private", "protected", "public", "return", "short", "static", "super", "switch", "synchronized", "this", "throw", "throws", "transient", "true", "try", "typeof", "use", "var", "void", "volitile", "while", "with");*/ private static final Set<String> TYPE_ARGUMENT_QUALIFIERS = ImmutableSet.of("extends", "super"); private static final Pattern IDENTIFIER_REGEX = Pattern.compile("^[A-Za-z_][A-Za-z0-9_]*$"); private static final Pattern TYPE_TOKEN_REGEX = Pattern.compile("^([A-Za-z_][A-Za-z0-9_]*|[\\?\\[\\]<>{},\\.])(.*)", Pattern.DOTALL); private static final Map<String, String> PRIMITIVE_TO_BOXED_MAP = ImmutableMap.<String, String>builder() .put("boolean", "Boolean") .put("byte", "Number") .put("char", "Character") .put("double", "Number") .put("float", "Number") .put("int", "Number") .put("long", "Number") .put("short", "Number") .build(); private static final Set<String> PRIMITIVE_TYPES = ImmutableSet.copyOf(PRIMITIVE_TO_BOXED_MAP.keySet()); public static final boolean isPrimitiveType(String s) { return PRIMITIVE_TYPES.contains(s); } private static boolean isIdentifier(String s) { return s != null && !RESERVED_WORDS.contains(s) && IDENTIFIER_REGEX.matcher(s).matches(); } /** * Validate the given NativeType and adds alerts to the sink if * necessary. * * @return a String representing the validated type */ public static String validateType(AlertSink alertSink, NativeType type) { String ret = type.getNativeType(OutputLanguage.SCALA); if (ret == null) { alertSink.add(new MissingTypeError(type, OutputLanguage.SCALA)); return ret; } ret = ret.replace('{', '[').replace('}', ']').trim(); // tokenize the type Queue<String> tokens = new LinkedList<String>(); String s = type.getNativeType(OutputLanguage.SCALA).trim(); while (s.length() != 0) { Matcher m = TYPE_TOKEN_REGEX.matcher(s); if (m.find()) { tokens.add(m.group(1)); s = m.group(2).trim(); } else { alertSink.add(new IllegalTypeError(type, OutputLanguage.SCALA)); return ret; } } if (!(parseType(tokens) && tokens.isEmpty())) { alertSink.add(new IllegalTypeError(type, OutputLanguage.SCALA)); } return ret; } /** * Validate the given NativeType and adds alerts to the sink if * necessary. Allows for conjunctive types (ex: Foo & Bar). * * This is taking a bit of a shortcut, as if java ever allowed something like * "extends List<? extends Foo & Bar> & Baz" an alert would be incorrectly * be generated. Java doesn't currently allow this though so we're fine. * * @return a String representing the validated type */ public static String validateConjunctiveType(AlertSink alertSink, NativeType type) { List<String> subTypes = Lists.newArrayList(); for (String subType : type.getNativeType(OutputLanguage.SCALA).split("&")) { subTypes.add(validateType(alertSink, new NativeType(type, subType))); } return Joiner.on(" & ").join(subTypes); } /** * Validate that the given {@link JavaAnnotation} contains a well formed * Scala annotation. * * @return the well formed annotation. */ public static String validateAnnotation(AlertSink alertSink, JavaAnnotation annotation) { // TODO(harryh): actually do some validation return annotation.getWith(); } /** * Parses the following rule from the JLS: * Type: * Identifier [TypeArguments]{ . Identifier [TypeArguments]} {[]} * BasicType {[]} * (actually, this rule deviates from the JLS, which seems to have a * bug in that it doesn't allow arrays of BasicTypes) * * MODIFIED so that {}s can sub for []s */ private static boolean parseType(Queue<String> tokens) { if (isPrimitiveType(tokens.peek())) { tokens.poll(); } else { while (true) { if (!isIdentifier(tokens.poll())) { return false; } if ("[".equals(tokens.peek())) { if (!parseTypeArguments(tokens, "[", "]")) { return false; } } if ("{".equals(tokens.peek())) { if (!parseTypeArguments(tokens, "{", "}")) { return false; } } if (".".equals(tokens.peek())) { tokens.poll(); } else { break; } } } while ("[".equals(tokens.peek())) { tokens.poll(); if (!"]".equals(tokens.poll())) { return false; } } return true; } /** * Parses the following rule from the JLS: * TypeArguments: * < TypeArgument {, TypeArgument} > * * MODIFIED so that {}s can sub for []s */ private static boolean parseTypeArguments(Queue<String> tokens, String start, String end) { if (!tokens.poll().equals(start)) { return false; } while (true) { if (!parseTypeArgument(tokens)) { return false; } if (",".equals(tokens.peek())) { tokens.poll(); } else { break; } } return (end.equals(tokens.poll())); } /** * Parses the following rule from the JLS: * TypeArgument: * Type * ? [( extends | super ) Type] */ private static boolean parseTypeArgument(Queue<String> tokens) { if ("?".equals(tokens.peek())) { tokens.poll(); if (TYPE_ARGUMENT_QUALIFIERS.contains(tokens.peek())) { tokens.poll(); return parseType(tokens); } } else { return parseType(tokens); } return true; } ////////////////////////////////////////////////////////////////////// // Functions for moving back and forth between reference and // primitive types ////////////////////////////////////////////////////////////////////// /** * @return the most general reference type that corresponds to the specified * a Java type, or the specified Java type if it is already a reference type * (ie: a class/interface). */ public static String toReferenceType(String type) { String result = PRIMITIVE_TO_BOXED_MAP.get(type); return (result == null) ? type : result; } public static String unbox(String expr, String type) { if (PRIMITIVE_TO_BOXED_MAP.containsKey(type)) { return "(" + expr + ")." + type + "Value()"; } else { return expr; } } ////////////////////////////////////////////////////////////////////// // Primitive Parsing ////////////////////////////////////////////////////////////////////// // strcitly check for either "true" or "false" private static final Predicate<String> ISVALID_BOOLEAN = new Predicate<String>() { public boolean apply(String s) { s = s.trim(); return (s.equals("true") || s.equals("false")); } }; private static final Predicate<String> ISVALID_BYTE = new Predicate<String>() { public boolean apply(String s) { Byte.valueOf(s.trim()); return true; } }; // as long as s is a single character return it unmodified private static final Predicate<String> ISVALID_CHAR = new Predicate<String>() { public boolean apply(String s) { return (s.length() == 1); } }; private static final Predicate<String> ISVALID_DOUBLE = new Predicate<String>() { public boolean apply(String s) { Double.valueOf(s.trim()); return true; } }; private static final Predicate<String> ISVALID_FLOAT = new Predicate<String>() { public boolean apply(String s) { Float.valueOf(s.trim()); return true; } }; private static final Predicate<String> ISVALID_INT = new Predicate<String>() { public boolean apply(String s) { Integer.valueOf(s.trim()); return true; } }; private static final Predicate<String> ISVALID_LONG = new Predicate<String>() { public boolean apply(String s) { Long.valueOf(s.trim()); return true; } }; private static final Predicate<String> ISVALID_SHORT = new Predicate<String>() { public boolean apply(String s) { Short.valueOf(s.trim()); return true; } }; private static final Map<String, Predicate<String>> PRIMITIVE_TO_VALIDATOR = ImmutableMap.<String, Predicate<String>>builder() .put("boolean", ISVALID_BOOLEAN) .put("byte", ISVALID_BYTE) .put("char", ISVALID_CHAR) .put("double", ISVALID_DOUBLE) .put("float", ISVALID_FLOAT) .put("int", ISVALID_INT) .put("long", ISVALID_LONG) .put("short", ISVALID_SHORT) .build(); /** * @return true if the primitive is a valid literal of the specified * type, false otherwise. */ public static final boolean isValidPrimitive(String primitive, String type) { try { return PRIMITIVE_TO_VALIDATOR.get(type).apply(primitive); } catch (NumberFormatException e) { return false; } } /** * Static Singleton Instance * * Must be declared last in the source file. */ public static final ScalaUtil INSTANCE = new ScalaUtil(); }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.pcepio.protocol.ver1; import org.jboss.netty.buffer.ChannelBuffer; import org.onosproject.pcepio.exceptions.PcepParseException; import org.onosproject.pcepio.protocol.PcepMetricObject; import org.onosproject.pcepio.types.PcepObjectHeader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.MoreObjects; /** * Provides PCEP metric object. */ public class PcepMetricObjectVer1 implements PcepMetricObject { /* METRIC Object Body Format. 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | Flags |C|B| T | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | metric-value | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ protected static final Logger log = LoggerFactory.getLogger(PcepMetricObjectVer1.class); public static final byte METRIC_OBJ_TYPE = 1; public static final byte METRIC_OBJ_CLASS = 6; public static final byte METRIC_OBJECT_VERSION = 1; public static final short METRIC_OBJ_MINIMUM_LENGTH = 12; public static final int OBJECT_HEADER_LENGTH = 4; public static final int IFLAG_SHIFT_VALUE = 9; public static final int BTYPE_SHIFT_VALUE = 8; public static final int CFLAG_SET = 1; public static final int CFLAG_RESET = 0; public static final int BFLAG_SET = 1; public static final int BFLAG_RESET = 0; public static final byte CFLAG_CHECK = 0x02; static final PcepObjectHeader DEFAULT_METRIC_OBJECT_HEADER = new PcepObjectHeader(METRIC_OBJ_CLASS, METRIC_OBJ_TYPE, PcepObjectHeader.REQ_OBJ_OPTIONAL_PROCESS, PcepObjectHeader.RSP_OBJ_PROCESSED, METRIC_OBJ_MINIMUM_LENGTH); private PcepObjectHeader metricObjHeader; private int iMetricVal; private byte yFlag; // 6-flags private boolean bCFlag; private boolean bBFlag; private byte bType; /** * Default constructor. */ public PcepMetricObjectVer1() { this.metricObjHeader = null; this.iMetricVal = 0; this.yFlag = 0; this.bCFlag = false; this.bBFlag = false; this.bType = 0; } /** * Constructor to initialize all member variables. * * @param metricObjHeader metric object header * @param iMetricVal metric value * @param yFlag Y flag * @param bCFlag C flag * @param bBFlag B flag * @param bType Type value */ public PcepMetricObjectVer1(PcepObjectHeader metricObjHeader, int iMetricVal, byte yFlag, boolean bCFlag, boolean bBFlag, byte bType) { this.metricObjHeader = metricObjHeader; this.iMetricVal = iMetricVal; this.yFlag = yFlag; this.bCFlag = bCFlag; this.bBFlag = bBFlag; this.bType = bType; } @Override public void setMetricVal(int value) { this.iMetricVal = value; } @Override public int getMetricVal() { return this.iMetricVal; } @Override public byte getYFlag() { return this.yFlag; } @Override public void setYFlag(byte value) { this.yFlag = value; } @Override public boolean getCFlag() { return this.bCFlag; } @Override public void setCFlag(boolean value) { this.bCFlag = value; } @Override public boolean getBFlag() { return this.bBFlag; } @Override public void setBFlag(boolean value) { this.bBFlag = value; } @Override public byte getBType() { return this.bType; } @Override public void setBType(byte value) { this.bType = value; } /** * Sets metric Object Header. * * @param obj metric object header */ public void setMetricObjHeader(PcepObjectHeader obj) { this.metricObjHeader = obj; } /** * Returns metric Object Header. * * @return metricObjHeader */ public PcepObjectHeader getMetricObjHeader() { return this.metricObjHeader; } /** * Reads from channel buffer and returns object of PcepMetricObject. * * @param cb of channel buffer. * @return object of PcepMetricObject * @throws PcepParseException when metric object is not present in channel buffer */ public static PcepMetricObject read(ChannelBuffer cb) throws PcepParseException { log.debug("MetricObject::read"); PcepObjectHeader metricObjHeader; int iMetricVal; byte yFlag; // 6-flags boolean bCFlag; boolean bBFlag; byte bType; metricObjHeader = PcepObjectHeader.read(cb); if (metricObjHeader.getObjClass() != METRIC_OBJ_CLASS) { throw new PcepParseException("This object is not a Metric Object. Object Class: " + metricObjHeader.getObjClass()); } //take only metric buffer. ChannelBuffer tempCb = cb.readBytes(metricObjHeader.getObjLen() - OBJECT_HEADER_LENGTH); tempCb.readShort(); yFlag = tempCb.readByte(); bType = tempCb.readByte(); bCFlag = (yFlag & CFLAG_CHECK) == CFLAG_CHECK; bBFlag = (yFlag & BFLAG_SET) == BFLAG_SET; iMetricVal = tempCb.readInt(); return new PcepMetricObjectVer1(metricObjHeader, iMetricVal, yFlag, bCFlag, bBFlag, bType); } @Override public int write(ChannelBuffer cb) throws PcepParseException { //write Object header int objStartIndex = cb.writerIndex(); int objLenIndex = metricObjHeader.write(cb); if (objLenIndex <= 0) { throw new PcepParseException("Error: ObjectLength is " + objLenIndex); } int iFlag = (bCFlag) ? CFLAG_SET : CFLAG_RESET; int iTemp = iFlag << IFLAG_SHIFT_VALUE; iFlag = (bBFlag) ? BFLAG_SET : BFLAG_RESET; iTemp = iTemp | (iFlag << BTYPE_SHIFT_VALUE); iTemp = iTemp | bType; cb.writeInt(iTemp); cb.writeInt(iMetricVal); short hLength = (short) (cb.writerIndex() - objStartIndex); cb.setShort(objLenIndex, hLength); //will be helpful during print(). metricObjHeader.setObjLen(hLength); return hLength; } /** * Builder class for PCEP metric object. */ public static class Builder implements PcepMetricObject.Builder { private boolean bIsHeaderSet = false; private PcepObjectHeader metricObjHeader; private int iMetricVal; private boolean bIsMetricValSet = false; private byte yFlag; // 6-flags private boolean bCFlag; private boolean bBFlag; private byte bType; private boolean bIsbTypeSet = false; private boolean bIsPFlagSet = false; private boolean bPFlag; private boolean bIsIFlagSet = false; private boolean bIFlag; @Override public PcepMetricObject build() throws PcepParseException { PcepObjectHeader metricObjHeader = this.bIsHeaderSet ? this.metricObjHeader : DEFAULT_METRIC_OBJECT_HEADER; if (!this.bIsMetricValSet) { throw new PcepParseException(" Metric Value NOT Set while building PcepMetricObject."); } if (!this.bIsbTypeSet) { throw new PcepParseException(" Type NOT Set while building PcepMetricObject."); } if (bIsPFlagSet) { metricObjHeader.setPFlag(bPFlag); } if (bIsIFlagSet) { metricObjHeader.setIFlag(bIFlag); } return new PcepMetricObjectVer1(metricObjHeader, iMetricVal, yFlag, bCFlag, bBFlag, bType); } @Override public PcepObjectHeader getMetricObjHeader() { return this.metricObjHeader; } @Override public Builder setMetricObjHeader(PcepObjectHeader obj) { this.metricObjHeader = obj; this.bIsHeaderSet = true; return this; } @Override public int getMetricVal() { return this.iMetricVal; } @Override public Builder setMetricVal(int value) { this.iMetricVal = value; this.bIsMetricValSet = true; return this; } @Override public byte getYFlag() { return this.yFlag; } @Override public Builder setYFlag(byte value) { this.yFlag = value; return this; } @Override public boolean getCFlag() { return this.bCFlag; } @Override public Builder setCFlag(boolean value) { this.bCFlag = value; return this; } @Override public boolean getBFlag() { return this.bBFlag; } @Override public Builder setBFlag(boolean value) { this.bBFlag = value; return this; } @Override public byte getBType() { return this.bType; } @Override public Builder setBType(byte value) { this.bType = value; this.bIsbTypeSet = true; return this; } @Override public Builder setPFlag(boolean value) { this.bPFlag = value; this.bIsPFlagSet = true; return this; } @Override public Builder setIFlag(boolean value) { this.bIFlag = value; this.bIsIFlagSet = true; return this; } } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("MetricValue", iMetricVal) .add("BFlag", bBFlag) .add("CFlag", bCFlag) .add("BType", bType) .toString(); } }
package test.Droidlogin; import java.util.ArrayList; import org.apache.http.NameValuePair; import org.apache.http.message.BasicNameValuePair; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import test.Droidlogin.library.Httppostaux; import test.Droidlogin.library.setting; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.os.SystemClock; import android.os.Vibrator; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; public class Login extends Activity { /** Called when the activity is first created. */ EditText user; EditText pass; Button blogin; TextView registrar; TextView fpassw; Httppostaux post; boolean result_back; private ProgressDialog pDialog; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); post=new Httppostaux(); user= (EditText) findViewById(R.id.edusuario); pass= (EditText) findViewById(R.id.edpassword); blogin= (Button) findViewById(R.id.Blogin); registrar=(TextView) findViewById(R.id.link_to_register); fpassw=(TextView) findViewById(R.id.forgotpassw); //Login button action blogin.setOnClickListener(new View.OnClickListener(){ public void onClick(View view){ //Extreamos datos de los EditText String usuario=user.getText().toString(); String passw=pass.getText().toString(); //verificamos si estan en blanco if( checklogindata( usuario , passw )==true){ //si pasamos esa validacion ejecutamos el asynctask pasando el usuario y clave como parametros new asynclogin().execute(usuario,passw); }else{ //si detecto un error en la primera validacion vibrar y mostrar un Toast con un mensaje de error. err_login(); } } }); registrar.setOnClickListener(new View.OnClickListener(){ public void onClick(View view){ //Abre el navegador al formulario adduser.html String url = "http://"+setting.IP_Server+"/droidlogin/adduser.html"; Intent i = new Intent(Intent.ACTION_VIEW); i.setData(Uri.parse(url)); startActivity(i); } }); fpassw.setOnClickListener(new View.OnClickListener(){ public void onClick(View view){ Toast toast1 = Toast.makeText(getApplicationContext(),"Pos nimodo... es una alpha todavia :v", Toast.LENGTH_LONG); toast1.show(); } }); } //vibra y muestra un Toast public void err_login(){ Vibrator vibrator =(Vibrator) getSystemService(Context.VIBRATOR_SERVICE); vibrator.vibrate(200); Toast toast1 = Toast.makeText(getApplicationContext(),"Error:Nombre de usuario o password incorrectos", Toast.LENGTH_SHORT); toast1.show(); } /*Valida el estado del logueo solamente necesita como parametros el usuario y passw*/ public boolean loginstatus(String username ,String password ) { int logstatus=-1; /*Creamos un ArrayList del tipo nombre valor para agregar los datos recibidos por los parametros anteriores * y enviarlo mediante POST a nuestro sistema para relizar la validacion*/ ArrayList<NameValuePair> postparameters2send= new ArrayList<NameValuePair>(); postparameters2send.add(new BasicNameValuePair("usuario",username)); postparameters2send.add(new BasicNameValuePair("password",password)); //realizamos una peticion y como respuesta obtenes un array JSON JSONArray jdata=post.getserverdata(postparameters2send, setting.URL_connect+"/acces.php"); /*como estamos trabajando de manera local el ida y vuelta sera casi inmediato * para darle un poco realismo decimos que el proceso se pare por unos segundos para poder * observar el progressdialog * la podemos eliminar si queremos */ SystemClock.sleep(950); //si lo que obtuvimos no es null if (jdata!=null && jdata.length() > 0){ JSONObject json_data; //creamos un objeto JSON try { json_data = jdata.getJSONObject(0); //leemos el primer segmento en nuestro caso el unico logstatus=json_data.getInt("logstatus");//accedemos al valor Log.e("loginstatus","logstatus= "+logstatus);//muestro por log que obtuvimos } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } //validamos el valor obtenido if (logstatus==0){// [{"logstatus":"0"}] Log.e("loginstatus ", "invalido"); return false; } else{// [{"logstatus":"1"}] Log.e("loginstatus ", "valido"); return true; } }else{ //json obtenido invalido verificar parte WEB. Log.e("JSON ", "ERROR"); return false; } } //validamos si no hay ningun campo en blanco public boolean checklogindata(String username ,String password ){ if (username.equals("") || password.equals("")){ Log.e("Login ui", "checklogindata user or pass error"); return false; }else{ return true; } } /* CLASE ASYNCTASK * * usaremos esta para poder mostrar el dialogo de progreso mientras enviamos y obtenemos los datos * podria hacerse lo mismo sin usar esto pero si el tiempo de respuesta es demasiado lo que podria ocurrir * si la conexion es lenta o el servidor tarda en responder la aplicacion sera inestable. * ademas observariamos el mensaje de que la app no responde. */ class asynclogin extends AsyncTask< String, String, String > { String user,pass; protected void onPreExecute() { //para el progress dialog pDialog = new ProgressDialog(Login.this); pDialog.setMessage("Autenticando...."); pDialog.setIndeterminate(false); pDialog.setCancelable(false); pDialog.show(); } protected String doInBackground(String... params) { //obtnemos usr y pass user=params[0]; pass=params[1]; //enviamos y recibimos y analizamos los datos en segundo plano. if (loginstatus(user,pass)==true){ return "ok"; //login valido }else{ return "err"; //login invalido } } /*Una vez terminado doInBackground segun lo que halla ocurrido pasamos a la sig. activity o mostramos error*/ protected void onPostExecute(String result) { pDialog.dismiss();//ocultamos progess dialog. Log.e("onPostExecute=",""+result); if (result.equals("ok")){ //Intent i=new Intent(Login.this, HiScreen.class); Intent i=new Intent(Login.this, Mesh.class); i.putExtra("user",user); startActivity(i); }else{ err_login(); } } } } //-----------------------------------------------------------------------
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.sesame.web.functionconfig; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; import com.google.common.collect.ImmutableMap; import com.opengamma.core.config.impl.ConfigItem; import com.opengamma.core.link.ConfigLink; import com.opengamma.master.config.ConfigMaster; import com.opengamma.master.config.ConfigSearchRequest; import com.opengamma.master.config.ConfigSearchResult; import com.opengamma.sesame.OutputName; import com.opengamma.sesame.config.EngineUtils; import com.opengamma.sesame.config.FunctionArguments; import com.opengamma.sesame.config.FunctionModelConfig; import com.opengamma.sesame.function.AvailableImplementations; import com.opengamma.sesame.function.AvailableOutputs; import com.opengamma.sesame.function.Parameter; import com.opengamma.sesame.function.ParameterType; import com.opengamma.sesame.graph.ArgumentConversionErrorNode; import com.opengamma.sesame.graph.ArgumentNode; import com.opengamma.sesame.graph.CannotBuildNode; import com.opengamma.sesame.graph.ClassNode; import com.opengamma.sesame.graph.FunctionModel; import com.opengamma.sesame.graph.FunctionModelNode; import com.opengamma.sesame.graph.InterfaceNode; import com.opengamma.sesame.graph.MissingArgumentNode; import com.opengamma.sesame.graph.MissingConfigNode; import com.opengamma.sesame.graph.NoImplementationNode; import com.opengamma.sesame.graph.convert.ArgumentConverter; import com.opengamma.util.ArgumentChecker; /** * Builds maps representing the JSON used in the function configuration web app. */ public class ConfigJsonBuilder { private static final String FUNC = "func"; private static final String IMPL = "impl"; private static final String IMPLS = "impls"; private static final String ARGS = "args"; private static final String NAME = "name"; private static final String VALUE = "value"; private static final String ERROR = "error"; private static final String TYPE = "type"; private static final String COL_NAME = "colName"; private static final String INPUT_TYPES = "inputTypes"; private static final String INPUT_TYPE = "inputType"; private static final String OUTPUT_NAMES = "outputNames"; private static final String OUTPUT_NAME = "outputName"; private static final String FUNCTIONS = "functions"; private static final String CONFIGS = "configs"; private final AvailableOutputs _availableOutputs; private final AvailableImplementations _availableImplementations; private final ConfigMaster _configMaster; private final ArgumentConverter _argumentConverter; /** * @param availableOutputs the functions known to the engine that can calculate output values * @param availableImplementations the function implementations known to the engine * @param configMaster for looking up configuration * @param argumentConverter converts arguments to and from strings */ ConfigJsonBuilder(AvailableOutputs availableOutputs, AvailableImplementations availableImplementations, ConfigMaster configMaster, ArgumentConverter argumentConverter) { _argumentConverter = ArgumentChecker.notNull(argumentConverter, "argumentConverter"); _configMaster = ArgumentChecker.notNull(configMaster, "configMaster"); _availableOutputs = ArgumentChecker.notNull(availableOutputs, "availableOutputs"); _availableImplementations = ArgumentChecker.notNull(availableImplementations, "availableImplementations"); } /** * Builds a configuration object from JSON produced by the client. * The expected format of the JSON is: * * <pre> * { * impls: {interface1: impl1, interface2: impl2, ... }, * args: { * impl1: { * propertyName1: arg1, * propertyName2: arg2, * ... * }, * impl2: { * propertyName3: arg3, * ... * }, * ... * } * } * </pre> * * @param json JSON representing function configuration * @return the configuration as an object * @throws IllegalArgumentException if the JSON doesn't define valid configuration */ @SuppressWarnings("unchecked") public FunctionModelConfig getConfigFromJson(Map<String, Object> json) { Map<String, String> implsJson = (Map<String, String>) json.get(IMPLS); Map<Class<?>, Class<?>> impls = new HashMap<>(); for (Map.Entry<String, String> entry : implsJson.entrySet()) { String fnType = entry.getKey(); String implType = entry.getValue(); if (!StringUtils.isEmpty(fnType) && !StringUtils.isEmpty(implType)) { try { impls.put(Class.forName(fnType), Class.forName(implType)); } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e); } } } Map<String, Map<String, String>> argsJson = (Map<String, Map<String, String>>) json.get(ARGS); Map<Class<?>, FunctionArguments> args = new HashMap<>(); for (Map.Entry<String, Map<String, String>> entry : argsJson.entrySet()) { String fnTypeName = entry.getKey(); Class<?> fnType; try { fnType = Class.forName(fnTypeName); } catch (ClassNotFoundException e) { throw new IllegalArgumentException(e); } Map<String, String> fnArgStrs = entry.getValue(); Map<String, Object> fnArgs = new HashMap<>(); for (Map.Entry<String, String> fnArgEntry : fnArgStrs.entrySet()) { String paramName = fnArgEntry.getKey(); String paramValueStr = fnArgEntry.getValue(); if (StringUtils.isEmpty(paramValueStr)) { continue; } Parameter parameter = Parameter.named(paramName, fnType); if (EngineUtils.isConfig(parameter.getType())) { fnArgs.put(paramName, ConfigLink.resolvable(paramValueStr, parameter.getType())); } else if (_argumentConverter.isConvertible(parameter.getParameterType())) { fnArgs.put(paramName, _argumentConverter.convertFromString(parameter.getParameterType(), paramValueStr)); } else { throw new IllegalArgumentException("Cannot convert from string to parameter type " + parameter.getParameterType()); } } FunctionArguments simpleFunctionArguments = new FunctionArguments(fnArgs); args.put(fnType, simpleFunctionArguments); } return new FunctionModelConfig(impls, args); } /** * Converts a configuration instance to a map representing some JSON. * The format of the JSON is: * * <pre> * { * impls: {interface1: impl1, interface2: impl2, ... }, * args: { * impl1: { * propertyName1: arg1, * propertyName2: arg2, * ... * }, * impl2: { * propertyName3: arg3, * ... * }, * ... * } * } * </pre> * * @param config some configuration * @return the configuration as JSON */ public Map<String, Object> getJsonFromConfig(FunctionModelConfig config) { Map<String, Object> jsonMap = new HashMap<>(); Map<String, Object> implsMap = new HashMap<>(); for (Map.Entry<Class<?>, Class<?>> entry : config.getImplementations().entrySet()) { implsMap.put(entry.getKey().getName(), entry.getValue().getName()); } jsonMap.put(IMPLS, implsMap); Map<String, Object> argsMap = new HashMap<>(); for (Map.Entry<Class<?>, FunctionArguments> entry : config.getArguments().entrySet()) { Map<String, String> fnArgsMap = new HashMap<>(); Class<?> functionType = entry.getKey(); FunctionArguments fnArgs = entry.getValue(); for (Map.Entry<String, Object> argEntry : fnArgs.getArguments().entrySet()) { String parameterName = argEntry.getKey(); Object argument = argEntry.getValue(); String argumentStr; Parameter parameter = Parameter.named(parameterName, functionType); ParameterType parameterType = parameter.getParameterType(); if (_argumentConverter.isConvertible(parameterType)) { argumentStr = _argumentConverter.convertToString(parameterType, argument); } else { argumentStr = argument.toString(); } fnArgsMap.put(parameterName, argumentStr); } String typeName = functionType.getName(); argsMap.put(typeName, fnArgsMap); } jsonMap.put(ARGS, argsMap); return jsonMap; } /** * Returns JSON containing the model for the function configuration page. * This contains the configuration for a single output associated with a column. * * @param columnName the name of the column containing the output * @param config the configuration * @param inputType the input type for the top level function * @param outputName the name of the output calculated by the function * @param model the function model of a function that can calculate the named output for the specified input * type, built using the configuration * @return the page model for displaying and editing the configuration */ public Map<String, Object> getConfigPageModel(String columnName, FunctionModelConfig config, @Nullable Class<?> inputType, @Nullable OutputName outputName, @Nullable FunctionModel model) { ArgumentChecker.notEmpty(columnName, "columnName"); List<Map<String, Object>> inputTypeList = new ArrayList<>(); // TODO if we're editing an existing config this should either be empty or only contain the selected type // the user shouldn't be able to change the input type for an existing config, that's part of the key // TODO if we're adding a new config the types shouldn't include the types for which the column already has config for (Class<?> type : _availableOutputs.getInputTypes()) { inputTypeList.add(typeMap(type)); } Collections.sort(inputTypeList, TypeMapComparator.INSTANCE); Map<String, Object> jsonMap = new HashMap<>(); jsonMap.put(COL_NAME, columnName); jsonMap.put(INPUT_TYPES, inputTypeList); if (inputType != null) { Set<OutputName> availableOutputs = _availableOutputs.getAvailableOutputs(inputType); List<String> outputNames = new ArrayList<>(availableOutputs.size()); for (OutputName availableOutput : availableOutputs) { outputNames.add(availableOutput.getName()); } Collections.sort(outputNames); // TODO output names needs to be filtered so it only includes names for which no config exists // need the existing ViewColumn jsonMap.put(INPUT_TYPE, typeMap(inputType)); jsonMap.put(OUTPUT_NAMES, outputNames); if (outputName != null && availableOutputs.contains(outputName)) { jsonMap.put(OUTPUT_NAME, outputName.getName()); } } List<Map<String, Object>> functions = getFunctions(config, model); if (!functions.isEmpty()) { jsonMap.put(FUNCTIONS, functions); } return jsonMap; } private List<Map<String, Object>> getFunctions(FunctionModelConfig config, FunctionModel model) { List<Map<String, Object>> functions = new ArrayList<>(); LinkedHashSet<FunctionModelNode> nodes = flattenModel(model); for (FunctionModelNode node : nodes) { if (node instanceof InterfaceNode) { Class<?> functionType = node.getType(); Class<?> selectedImpl = ((InterfaceNode) node).getImplementationType(); Map<String, Object> map = new HashMap<>(); map.put(FUNC, typeMap(functionType)); map.put(IMPL, typeMap(selectedImpl)); map.put(IMPLS, getImplementations(functionType)); map.put(ARGS, getArguments(config, node.getDependencies())); functions.add(map); } else if (node instanceof ClassNode && hasArguments(node)) { Map<String, Object> map = new HashMap<>(); map.put(IMPL, typeMap(node.getType())); map.put(ARGS, getArguments(config, node.getDependencies())); functions.add(map); } else if (node instanceof NoImplementationNode) { NoImplementationNode noImplementationNode = (NoImplementationNode) node; Class<?> functionType = noImplementationNode.getException().getInterfaceType(); Map<String, Object> map = new HashMap<>(); map.put(FUNC, typeMap(functionType)); map.put(IMPLS, getImplementations(functionType)); functions.add(map); } } return functions; } private List<Map<String, Object>> getImplementations(Class<?> functionType) { List<Class<?>> impls = new ArrayList<>(_availableImplementations.getImplementationTypes(functionType)); List<Map<String, Object>> implsList = new ArrayList<>(impls.size()); for (Class<?> impl : impls) { implsList.add(typeMap(impl)); } Collections.sort(implsList, TypeMapComparator.INSTANCE); return implsList; } @SuppressWarnings("unchecked") private List<Map<String, Object>> getArguments(FunctionModelConfig config, List<FunctionModelNode> dependencies) { List<Map<String, Object>> args = new ArrayList<>(); for (FunctionModelNode node : dependencies) { Parameter parameter = node.getParameter(); String paramName = parameter.getName(); String value; String errorMessage; List<String> configNames = new ArrayList<>(); Map<String, Object> map = new HashMap<>(); map.put(NAME, paramName); map.put(TYPE, parameter.getParameterType().getName()); if (EngineUtils.isConfig(parameter.getType())) { Class<?> parameterType = parameter.getType(); ConfigSearchRequest<?> searchRequest = new ConfigSearchRequest<>(); searchRequest.setType(parameterType); ConfigSearchResult<?> searchResult = _configMaster.search(searchRequest); List<? extends ConfigItem<?>> configItems = searchResult.getValues(); for (ConfigItem<?> configItem : configItems) { configNames.add(configItem.getName()); } map.put(CONFIGS, configNames); } if (node instanceof ArgumentNode) { Object argument = config.getFunctionArguments(parameter.getDeclaringClass()).getArgument(paramName); if (argument == null) { value = null; } else if (argument instanceof ConfigLink<?>) { value = "TODO - need to expose config link name PLAT-6469"; } else if (_argumentConverter.isConvertible(parameter.getParameterType())) { value = _argumentConverter.convertToString(parameter.getParameterType(), argument); } else { value = argument.toString(); } errorMessage = null; } else if (node instanceof MissingConfigNode) { value = null; if (!configNames.isEmpty()) { errorMessage = "Configuration required"; } else { errorMessage = "No configuration available"; } } else if (node instanceof MissingArgumentNode) { value = null; errorMessage = "Value required"; } else if (node instanceof CannotBuildNode) { value = null; errorMessage = "Unable to create value"; } else if (node instanceof ArgumentConversionErrorNode) { ArgumentConversionErrorNode conversionErrorNode = (ArgumentConversionErrorNode) node; value = conversionErrorNode.getValue(); errorMessage = conversionErrorNode.getErrorMessage(); } else { continue; } if (value != null) { map.put(VALUE, value); } if (errorMessage != null) { map.put(ERROR, errorMessage); } args.add(map); } return args; } /** * Returns <code>{name: "type name", type: "fully qualified class name"}</code> * * @param type the type * @return a map containing the type's name and fully qualified class name */ private static Map<String, Object> typeMap(Class<?> type) { return ImmutableMap.<String, Object>of(NAME, getName(type), TYPE, type.getName()); } /** * Returns the name for an input type. * Currently uses the class simple name, but in future could use a value from an annotations. See SSM-224. * * @param inputType a type that is the input to a calculation in the engine * @return the name used for the type in the user interface */ static String getName(Class<?> inputType) { // TODO use annotation if available return inputType.getSimpleName(); } private static boolean hasArguments(FunctionModelNode node) { for (FunctionModelNode childNode : node.getDependencies()) { if (childNode instanceof ArgumentNode || childNode instanceof MissingArgumentNode || childNode instanceof MissingConfigNode) { return true; } } return false; } private static LinkedHashSet<FunctionModelNode> flattenModel(@Nullable FunctionModel model) { if (model == null) { return new LinkedHashSet<>(); } LinkedHashSet<FunctionModelNode> nodes = new LinkedHashSet<>(); flattenNode(model.getRoot(), nodes); return nodes; } private static void flattenNode(FunctionModelNode node, Set<FunctionModelNode> accumulator) { accumulator.add(node); for (FunctionModelNode childNode : node.getDependencies()) { flattenNode(childNode, accumulator); } } private static class TypeMapComparator implements Comparator<Map<String, Object>> { private static final Comparator<Map<String, Object>> INSTANCE = new TypeMapComparator(); @Override public int compare(Map<String, Object> o1, Map<String, Object> o2) { String name1 = (String) o1.get(NAME); String name2 = (String) o2.get(NAME); return name1.compareTo(name2); } } }
/* * #%L * DurabilityTest.java - mongodb-async-driver - Allanbank Consulting, Inc. * %% * Copyright (C) 2011 - 2014 Allanbank Consulting, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.allanbank.mongodb; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.sameInstance; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import org.junit.Test; import com.allanbank.mongodb.bson.builder.BuilderFactory; import com.allanbank.mongodb.bson.builder.DocumentBuilder; import com.allanbank.mongodb.bson.impl.ImmutableDocument; /** * DurabilityTest provides tests for the {@link Durability} class. * * @copyright 2012-2013, Allanbank Consulting, Inc., All Rights Reserved */ public class DurabilityTest { /** * Test method for {@link Durability#asDocument()}. */ @Test public void testAsDocument() { final DocumentBuilder builder = BuilderFactory.start(); builder.add("getlasterror", 1); // This is really bogus but reasonable assertEquals(builder.asDocument(), Durability.NONE.asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("w", 1); assertEquals(builder.asDocument(), Durability.ACK.asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("fsync", true); builder.add("wtimeout", 123); assertEquals(builder.asDocument(), Durability.fsyncDurable(123) .asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("j", true); builder.add("wtimeout", 124); assertEquals(builder.asDocument(), Durability.journalDurable(124) .asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("wtimeout", 125); builder.add("w", 2); assertEquals(builder.asDocument(), Durability.replicaDurable(125) .asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("wtimeout", 126); builder.add("w", 3); assertEquals(builder.asDocument(), Durability.replicaDurable(3, 126) .asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("wtimeout", 127); builder.add("w", "foo"); assertEquals(builder.asDocument(), Durability .replicaDurable("foo", 127).asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("j", true); builder.add("wtimeout", 128); builder.add("w", "bar"); assertEquals(builder.asDocument(), Durability.replicaDurable(true, "bar", 128).asDocument()); builder.reset(); builder.add("getlasterror", 1); builder.add("j", true); builder.add("wtimeout", 129); builder.add("w", 4); assertEquals(builder.asDocument(), Durability.replicaDurable(true, 4, 129).asDocument()); // Second call should return the same document. final Durability durability = Durability.replicaDurable(true, 4, 129); assertThat(durability.asDocument(), sameInstance(durability.asDocument())); assertThat(durability.asDocument(), instanceOf(ImmutableDocument.class)); } /** * Test method for {@link Durability#equals(java.lang.Object)}. */ @SuppressWarnings("boxing") @Test public void testEqualsObject() { final List<Durability> objs1 = new ArrayList<Durability>(); final List<Durability> objs2 = new ArrayList<Durability>(); objs1.add(Durability.NONE); objs2.add(Durability.NONE); objs1.add(Durability.ACK); objs2.add(Durability.ACK); for (final Boolean waitForFsync : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) { for (final Boolean waitForJournal : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) { for (final Integer waitTimeoutMillis : Arrays.asList(1, 2, 3, 4, 10, 100, -1)) { for (final Integer waitForReplicas : Arrays.asList(1, 2, 3, 4, 10, 100, -1)) { objs1.add(new Durability(waitForFsync, waitForJournal, waitForReplicas, waitTimeoutMillis)); objs2.add(new Durability(waitForFsync, waitForJournal, waitForReplicas, waitTimeoutMillis)); } for (final String mode : Arrays.asList("a", "b")) { objs1.add(new Durability(waitForFsync, waitForJournal, mode, waitTimeoutMillis)); objs2.add(new Durability(waitForFsync, waitForJournal, mode, waitTimeoutMillis)); } } } } // Sanity check. assertEquals(objs1.size(), objs2.size()); for (int i = 0; i < objs1.size(); ++i) { final Durability obj1 = objs1.get(i); Durability obj2 = objs2.get(i); assertTrue(obj1.equals(obj1)); assertEquals(obj1, obj2); assertEquals(obj1.hashCode(), obj2.hashCode()); for (int j = i + 1; j < objs1.size(); ++j) { obj2 = objs2.get(j); assertFalse(obj1.equals(obj2)); assertFalse(obj1.hashCode() == obj2.hashCode()); } assertFalse(obj1.equals("foo")); assertFalse(obj1.equals(null)); } } /** * Test method for {@link Durability#fsyncDurable(int)}. */ @Test public void testFsyncDurable() { final Random random = new Random(System.currentTimeMillis()); final int wait = random.nextInt(100000); final Durability durability = Durability.fsyncDurable(wait); assertTrue(durability.isWaitForFsync()); assertFalse(durability.isWaitForJournal()); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(0, durability.getWaitForReplicas()); assertNull(durability.getWaitForReplicasByMode()); } /** * Test method for {@link Durability#journalDurable(int)}. */ @Test public void testJournalDurable() { final Random random = new Random(System.currentTimeMillis()); final int wait = random.nextInt(100000); final Durability durability = Durability.journalDurable(wait); assertFalse(durability.isWaitForFsync()); assertTrue(durability.isWaitForJournal()); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(0, durability.getWaitForReplicas()); assertNull(durability.getWaitForReplicasByMode()); } /** * Test method for {@link Durability#readResolve} . * * @throws IOException * On a failure. * @throws ClassNotFoundException * On a failure. */ @Test public void testReadResolve() throws IOException, ClassNotFoundException { for (final Durability d : Arrays .asList(Durability.ACK, Durability.NONE)) { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final ObjectOutputStream oout = new ObjectOutputStream(out); oout.writeObject(d); oout.close(); final ByteArrayInputStream in = new ByteArrayInputStream( out.toByteArray()); final ObjectInputStream oin = new ObjectInputStream(in); assertSame(d, oin.readObject()); } final Durability d = Durability.journalDurable(100); final ByteArrayOutputStream out = new ByteArrayOutputStream(); final ObjectOutputStream oout = new ObjectOutputStream(out); oout.writeObject(d); oout.close(); final ByteArrayInputStream in = new ByteArrayInputStream( out.toByteArray()); final ObjectInputStream oin = new ObjectInputStream(in); final Object read = oin.readObject(); assertEquals(d, read); assertFalse(d == read); } /** * Test method for {@link Durability#replicaDurable(boolean, int, int)} . */ @Test public void testReplicaDurableBooleanIntInt() { final Random random = new Random(System.currentTimeMillis()); final boolean journal = random.nextBoolean(); final int wait = random.nextInt(100000); final int replicaCount = random.nextInt(10000); final Durability durability = Durability.replicaDurable(journal, replicaCount, wait); assertFalse(durability.isWaitForFsync()); assertEquals(Boolean.valueOf(journal), Boolean.valueOf(durability.isWaitForJournal())); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(replicaCount, durability.getWaitForReplicas()); assertNull(durability.getWaitForReplicasByMode()); } /** * Test method for {@link Durability#replicaDurable(boolean, String, int)} . */ @Test public void testReplicaDurableBooleanStringInt() { final Random random = new Random(System.currentTimeMillis()); final boolean journal = random.nextBoolean(); final int wait = random.nextInt(100000); final String tag = String.valueOf(random.nextInt(10000)); final Durability durability = Durability.replicaDurable(journal, tag, wait); assertFalse(durability.isWaitForFsync()); assertEquals(Boolean.valueOf(journal), Boolean.valueOf(durability.isWaitForJournal())); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(0, durability.getWaitForReplicas()); assertEquals(tag, durability.getWaitForReplicasByMode()); } /** * Test method for {@link Durability#replicaDurable(int)}. */ @Test public void testReplicaDurableInt() { final Random random = new Random(System.currentTimeMillis()); final int wait = random.nextInt(100000); final Durability durability = Durability.replicaDurable(wait); assertFalse(durability.isWaitForFsync()); assertFalse(durability.isWaitForJournal()); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(2, durability.getWaitForReplicas()); assertNull(durability.getWaitForReplicasByMode()); assertNull(durability.getWaitForReplicasByMode()); } /** * Test method for {@link Durability#replicaDurable(int, int)}. */ @Test public void testReplicaDurableIntInt() { final Random random = new Random(System.currentTimeMillis()); final int wait = random.nextInt(100000); final int replicaCount = random.nextInt(10000); final Durability durability = Durability.replicaDurable(replicaCount, wait); assertFalse(durability.isWaitForFsync()); assertFalse(durability.isWaitForJournal()); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(replicaCount, durability.getWaitForReplicas()); assertNull(durability.getWaitForReplicasByMode()); } /** * Test method for {@link Durability#replicaDurable(String, int)}. */ @Test public void testReplicaDurableStringInt() { final Random random = new Random(System.currentTimeMillis()); final int wait = random.nextInt(100000); final String tag = String.valueOf(random.nextInt(10000)); final Durability durability = Durability.replicaDurable(tag, wait); assertFalse(durability.isWaitForFsync()); assertFalse(durability.isWaitForJournal()); assertTrue(durability.isWaitForReply()); assertEquals(wait, durability.getWaitTimeoutMillis()); assertEquals(0, durability.getWaitForReplicas()); assertEquals(tag, durability.getWaitForReplicasByMode()); assertEquals("{ getlasterror : 1, wtimeout : " + wait + ", w : '" + tag + "' }", durability.toString()); } /** * Test method for {@link Durability#valueOf(String)}. */ @Test public void testValueOf() { assertSame(Durability.ACK, Durability.valueOf("AcK")); assertSame(Durability.ACK, Durability.valueOf("sAfe")); assertSame(Durability.NONE, Durability.valueOf("NoNe")); assertEquals(Durability.replicaDurable(true, 4, 129), Durability.valueOf("{ j : true, wtimeout : 129, w : 4 }")); assertEquals( Durability.replicaDurable(true, "bar", 128), Durability .valueOf("{ getlasterror : 1, j : true, wtimeout : 128, w : 'bar' }")); assertEquals( Durability.replicaDurable(true, "bar", 128), Durability .valueOf("{ getlasterror : 1, j : 1, wtimeout : 128, w : bar }")); assertEquals( Durability.replicaDurable("foo", 127), Durability .valueOf("{ getlasterror : 1, wtimeout : 127, w : 'foo' }")); assertEquals(Durability.replicaDurable(3, 126), Durability .valueOf("{ getlasterror : 1, wtimeout : 126, w : 3 }")); assertEquals(Durability.replicaDurable(125), Durability.valueOf("{ wtimeout : 125, w : 2 }")); assertEquals(Durability.journalDurable(124), Durability.valueOf("{ wtimeout : 124, j : 1 }")); assertEquals(Durability.fsyncDurable(123), Durability.valueOf("{ wtimeout : 123, fsync : 1 }")); assertNull(Durability.valueOf("{ wtimeout : 'a', fsync : 1 }")); assertNull(Durability.valueOf("{ foo : 1 }")); assertNull(Durability.valueOf("foo")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec; import org.apache.hadoop.hive.metastore.api.GetPartitionsProjectionSpec; import org.apache.hadoop.hive.metastore.api.ISchemaName; import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.ISchema; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.NotificationEvent; import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest; import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionEventType; import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.WMNullablePool; import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan; import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.metastore.api.WMTrigger; import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; import org.apache.hadoop.hive.metastore.api.RuntimeStat; import org.apache.hadoop.hive.metastore.api.ReplicationMetricList; import org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest; import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; import org.apache.hadoop.hive.metastore.api.SQLForeignKey; import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.api.ScheduledQuery; import org.apache.hadoop.hive.metastore.api.ScheduledQueryKey; import org.apache.hadoop.hive.metastore.api.ScheduledQueryMaintenanceRequest; import org.apache.hadoop.hive.metastore.api.ScheduledQueryPollRequest; import org.apache.hadoop.hive.metastore.api.ScheduledQueryPollResponse; import org.apache.hadoop.hive.metastore.api.ScheduledQueryProgressInfo; import org.apache.hadoop.hive.metastore.api.SchemaVersion; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.TableMeta; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.api.WMMapping; import org.apache.hadoop.hive.metastore.api.WMPool; import org.apache.hadoop.hive.metastore.api.WriteEventInfo; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.thrift.TException; import org.junit.Assert; /** * * DummyRawStoreForJdoConnection. * * An implementation of RawStore that verifies the DummyJdoConnectionUrlHook has already been * applied when this class's setConf method is called, by checking that the value of the * METASTORECONNECTURLKEY ConfVar has been updated. * * All non-void methods return default values. */ public class DummyRawStoreForJdoConnection implements RawStore { @Override public Configuration getConf() { return null; } @Override public void setConf(Configuration arg0) { String expected = DummyJdoConnectionUrlHook.newUrl; String actual = MetastoreConf.getVar(arg0, MetastoreConf.ConfVars.CONNECT_URL_KEY); Assert.assertEquals("The expected URL used by JDO to connect to the metastore: " + expected + " did not match the actual value when the Raw Store was initialized: " + actual, expected, actual); } @Override public void shutdown() { } @Override public boolean openTransaction() { return false; } @Override public boolean commitTransaction() { return false; } @Override public boolean isActiveTransaction() { return false; } @Override public void rollbackTransaction() { } @Override public void createCatalog(Catalog cat) throws MetaException { } @Override public Catalog getCatalog(String catalogName) throws NoSuchObjectException, MetaException { return null; } @Override public void alterCatalog(String catName, Catalog cat) throws MetaException, InvalidOperationException { } @Override public List<String> getCatalogs() throws MetaException { return null; } @Override public void dropCatalog(String catalogName) throws NoSuchObjectException, MetaException { } @Override public void createDatabase(Database db) throws InvalidObjectException, MetaException { } @Override public Database getDatabase(String catName, String name) throws NoSuchObjectException { return null; } @Override public boolean dropDatabase(String catName, String dbname) throws NoSuchObjectException, MetaException { return false; } @Override public boolean alterDatabase(String catName, String dbname, Database db) throws NoSuchObjectException, MetaException { return false; } @Override public List<String> getDatabases(String catName, String pattern) throws MetaException { return Collections.emptyList(); } @Override public List<String> getAllDatabases(String catName) throws MetaException { return Collections.emptyList(); } @Override public boolean createType(Type type) { return false; } @Override public Type getType(String typeName) { return null; } @Override public boolean dropType(String typeName) { return false; } @Override public void createTable(Table tbl) throws InvalidObjectException, MetaException { } @Override public boolean dropTable(String catName, String dbName, String tableName) throws MetaException { return false; } @Override public Table getTable(String catName, String dbName, String tableName) throws MetaException { return null; } @Override public Table getTable(String catalogName, String dbName, String tableName, String writeIdList) throws MetaException { return null; } @Override public boolean addPartition(Partition part) throws InvalidObjectException, MetaException { return false; } @Override public Partition getPartition(String catName, String dbName, String tableName, List<String> part_vals) throws MetaException, NoSuchObjectException { return null; } @Override public Partition getPartition(String catName, String dbName, String tableName, List<String> part_vals, String writeIdList) throws MetaException, NoSuchObjectException { return null; } @Override public boolean dropPartition(String catName, String dbName, String tableName, List<String> part_vals) throws MetaException { return false; } @Override public List<Partition> getPartitions(String catName, String dbName, String tableName, int max) throws MetaException { return Collections.emptyList(); } @Override public Map<String, String> getPartitionLocations(String catName, String dbName, String tblName, String baseLocationToNotShow, int max) { return Collections.emptyMap(); } @Override public Table alterTable(String catName, String dbname, String name, Table newTable, String queryValidWriteIds) throws InvalidObjectException, MetaException { return newTable; } @Override public void updateCreationMetadata(String catName, String dbname, String tablename, CreationMetadata cm) throws MetaException { } @Override public List<String> getTables(String catName, String dbName, String pattern) throws MetaException { return Collections.emptyList(); } @Override public List<String> getTables(String catName, String dbName, String pattern, TableType tableType, int limit) throws MetaException { return Collections.emptyList(); } @Override public List<Table> getAllMaterializedViewObjectsForRewriting(String catName) throws MetaException { return Collections.emptyList(); } @Override public List<String> getMaterializedViewsForRewriting(String catName, String dbName) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public List<TableMeta> getTableMeta(String catName, String dbNames, String tableNames, List<String> tableTypes) throws MetaException { return Collections.emptyList(); } @Override public List<Table> getTableObjectsByName(String catName, String dbname, List<String> tableNames) throws MetaException, UnknownDBException { return Collections.emptyList(); } @Override public List<String> getAllTables(String catName, String dbName) throws MetaException { return Collections.emptyList(); } @Override public List<String> listTableNamesByFilter(String catName, String dbName, String filter, short max_tables) throws MetaException, UnknownDBException { return Collections.emptyList(); } @Override public List<String> listPartitionNames(String catName, String db_name, String tbl_name, short max_parts) throws MetaException { return Collections.emptyList(); } @Override public List<String> listPartitionNames(String catName, String dbName, String tblName, String defaultPartName, byte[] exprBytes, String order, short maxParts) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public PartitionValuesResponse listPartitionValues(String catName, String db_name, String tbl_name, List<FieldSchema> cols, boolean applyDistinct, String filter, boolean ascending, List<FieldSchema> order, long maxParts) throws MetaException { return null; } @Override public Partition alterPartition(String catName, String db_name, String tbl_name, List<String> part_vals, Partition new_part, String queryValidWriteIds) throws InvalidObjectException, MetaException { return new_part; } @Override public List<Partition> alterPartitions(String catName, String db_name, String tbl_name, List<List<String>> part_vals_list, List<Partition> new_parts, long writeId, String queryValidWriteIds) throws InvalidObjectException, MetaException { return new_parts; } @Override public List<Partition> getPartitionsByFilter(String catName, String dbName, String tblName, String filter, short maxParts) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public List<Partition> getPartitionSpecsByFilterAndProjection(Table table, GetPartitionsProjectionSpec projectSpec, GetPartitionsFilterSpec filterSpec) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public List<Partition> getPartitionsByNames(String catName, String dbName, String tblName, List<String> partNames) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public boolean getPartitionsByExpr(String catName, String dbName, String tblName, byte[] expr, String defaultPartitionName, short maxParts, List<Partition> result) throws TException { return false; } @Override public int getNumPartitionsByFilter(String catName, String dbName, String tblName, String filter) throws MetaException, NoSuchObjectException { return -1; } @Override public int getNumPartitionsByExpr(String catName, String dbName, String tblName, byte[] expr) throws MetaException, NoSuchObjectException { return -1; } @Override public Table markPartitionForEvent(String catName, String dbName, String tblName, Map<String, String> partVals, PartitionEventType evtType) throws MetaException, UnknownTableException, InvalidPartitionException, UnknownPartitionException { return null; } @Override public boolean isPartitionMarkedForEvent(String catName, String dbName, String tblName, Map<String, String> partName, PartitionEventType evtType) throws MetaException, UnknownTableException, InvalidPartitionException, UnknownPartitionException { return false; } @Override public boolean addRole(String rowName, String ownerName) throws InvalidObjectException, MetaException, NoSuchObjectException { return false; } @Override public boolean removeRole(String roleName) throws MetaException, NoSuchObjectException { return false; } @Override public boolean grantRole(Role role, String userName, PrincipalType principalType, String grantor, PrincipalType grantorType, boolean grantOption) throws MetaException, NoSuchObjectException, InvalidObjectException { return false; } @Override public boolean revokeRole(Role role, String userName, PrincipalType principalType, boolean grantOption) throws MetaException, NoSuchObjectException { return false; } @Override public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, List<String> groupNames) throws InvalidObjectException, MetaException { return null; } @Override public PrincipalPrivilegeSet getDBPrivilegeSet(String catName, String dbName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { return null; } @Override public PrincipalPrivilegeSet getTablePrivilegeSet(String catName, String dbName, String tableName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { return null; } @Override public PrincipalPrivilegeSet getPartitionPrivilegeSet(String catName, String dbName, String tableName, String partition, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { return null; } @Override public PrincipalPrivilegeSet getColumnPrivilegeSet(String catName, String dbName, String tableName, String partitionName, String columnName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException { return null; } @Override public List<HiveObjectPrivilege> listPrincipalGlobalGrants(String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalDBGrants(String principalName, PrincipalType principalType, String catName, String dbName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listAllTableGrants(String principalName, PrincipalType principalType, String catName, String dbName, String tableName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalPartitionGrants(String principalName, PrincipalType principalType, String catName, String dbName, String tableName, List<String> partValues, String partName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalTableColumnGrants(String principalName, PrincipalType principalType, String catName, String dbName, String tableName, String columnName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrants(String principalName, PrincipalType principalType, String catName, String dbName, String tableName, List<String> partVals, String partName, String columnName) { return Collections.emptyList(); } @Override public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, MetaException, NoSuchObjectException { return false; } @Override public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) throws InvalidObjectException, MetaException, NoSuchObjectException { return false; } @Override public boolean refreshPrivileges(HiveObjectRef objToRefresh, String authorizer, PrivilegeBag grantPrivileges) throws InvalidObjectException, MetaException, NoSuchObjectException { return false; } @Override public Role getRole(String roleName) throws NoSuchObjectException { return null; } @Override public List<String> listRoleNames() { return Collections.emptyList(); } @Override public List<Role> listRoles(String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<RolePrincipalGrant> listRolesWithGrants(String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<RolePrincipalGrant> listRoleMembers(String roleName) { return null; } @Override public Partition getPartitionWithAuth(String catName, String dbName, String tblName, List<String> partVals, String user_name, List<String> group_names) throws MetaException, NoSuchObjectException, InvalidObjectException { return null; } @Override public List<Partition> getPartitionsWithAuth(String catName, String dbName, String tblName, short maxParts, String userName, List<String> groupNames) throws MetaException, NoSuchObjectException, InvalidObjectException { return Collections.emptyList(); } @Override public List<String> listPartitionNamesPs(String catName, String db_name, String tbl_name, List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public List<Partition> listPartitionsPsWithAuth(String catName, String db_name, String tbl_name, List<String> part_vals, short max_parts, String userName, List<String> groupNames) throws MetaException, InvalidObjectException, NoSuchObjectException { return Collections.emptyList(); } @Override public long cleanupEvents() { return 0; } @Override public boolean addToken(String tokenIdentifier, String delegationToken) { return false; } @Override public boolean removeToken(String tokenIdentifier) { return false; } @Override public String getToken(String tokenIdentifier) { return null; } @Override public List<String> getAllTokenIdentifiers() { return Collections.emptyList(); } @Override public int addMasterKey(String key) { return 0; } @Override public void updateMasterKey(Integer seqNo, String key) { } @Override public boolean removeMasterKey(Integer keySeq) { return false; } @Override public String[] getMasterKeys() { return new String[0]; } @Override public List<HiveObjectPrivilege> listPrincipalDBGrantsAll( String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalTableGrantsAll( String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll( String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll( String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll( String principalName, PrincipalType principalType) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listGlobalGrantsAll() { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listDBGrantsAll(String catName, String dbName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(String catName, String dbName, String tableName, String partitionName, String columnName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listTableGrantsAll(String catName, String dbName, String tableName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listPartitionGrantsAll(String catName, String dbName, String tableName, String partitionName) { return Collections.emptyList(); } @Override public List<HiveObjectPrivilege> listTableColumnGrantsAll(String catName, String dbName, String tableName, String columnName) { return Collections.emptyList(); } @Override public List<ColumnStatistics> getTableColumnStatistics(String catName, String dbName, String tableName, List<String> colName) throws MetaException, NoSuchObjectException { return null; } @Override public ColumnStatistics getTableColumnStatistics(String catName, String dbName, String tableName, List<String> colName, String engine) throws MetaException, NoSuchObjectException { return null; } @Override public ColumnStatistics getTableColumnStatistics( String catName, String dbName, String tableName, List<String> colName, String engine, String writeIdList) throws MetaException, NoSuchObjectException { return null; } @Override public boolean deleteTableColumnStatistics(String catName, String dbName, String tableName, String colName, String engine) throws NoSuchObjectException, MetaException, InvalidObjectException { return false; } @Override public boolean deletePartitionColumnStatistics(String catName, String dbName, String tableName, String partName, List<String> partVals, String colName, String engine) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { return false; } @Override public Map<String, String> updateTableColumnStatistics(ColumnStatistics statsObj, String validWriteIds, long writeId) throws NoSuchObjectException, MetaException, InvalidObjectException { return null; } @Override public Map<String, String> updatePartitionColumnStatistics(ColumnStatistics statsObj,List<String> partVals, String validWriteIds, long writeId) throws NoSuchObjectException, MetaException, InvalidObjectException { return null; } @Override public void verifySchema() throws MetaException { } @Override public String getMetaStoreSchemaVersion() throws MetaException { return null; } @Override public void setMetaStoreSchemaVersion(String version, String comment) throws MetaException { } @Override public List<List<ColumnStatistics>> getPartitionColumnStatistics(String catName, String dbName, String tblName, List<String> colNames, List<String> partNames) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public List<ColumnStatistics> getPartitionColumnStatistics(String catName, String dbName, String tblName, List<String> colNames, List<String> partNames, String engine) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public List<ColumnStatistics> getPartitionColumnStatistics( String catName, String dbName, String tblName, List<String> partNames, List<String> colNames, String engine, String writeIdList) throws MetaException, NoSuchObjectException { return Collections.emptyList(); } @Override public boolean doesPartitionExist(String catName, String dbName, String tableName, List<FieldSchema> partKeys, List<String> partVals) throws MetaException, NoSuchObjectException { return false; } @Override public boolean addPartitions(String catName, String dbName, String tblName, List<Partition> parts) throws InvalidObjectException, MetaException { return false; } @Override public boolean addPartitions(String catName, String dbName, String tblName, PartitionSpecProxy partitionSpec, boolean ifNotExists) throws InvalidObjectException, MetaException { return false; } @Override public void dropPartitions(String catName, String dbName, String tblName, List<String> partNames) { } @Override public void createFunction(Function func) throws InvalidObjectException, MetaException { } @Override public void alterFunction(String catName, String dbName, String funcName, Function newFunction) throws InvalidObjectException, MetaException { } @Override public void dropFunction(String catName, String dbName, String funcName) throws MetaException, NoSuchObjectException, InvalidObjectException, InvalidInputException { } @Override public Function getFunction(String catName, String dbName, String funcName) throws MetaException { return null; } @Override public List<Function> getAllFunctions(String catName) throws MetaException { return Collections.emptyList(); } @Override public List<String> getFunctions(String catName, String dbName, String pattern) throws MetaException { return Collections.emptyList(); } @Override public AggrStats get_aggr_stats_for(String catName, String dbName, String tblName, List<String> partNames, List<String> colNames, String engine) throws MetaException { return null; } @Override public AggrStats get_aggr_stats_for( String catName, String dbName, String tblName, List<String> partNames, List<String> colNames, String engine, String writeIdList) throws MetaException, NoSuchObjectException { return null; } @Override public NotificationEventResponse getNextNotification(NotificationEventRequest rqst) { return null; } @Override public void addNotificationEvent(NotificationEvent event) throws MetaException { } @Override public void cleanNotificationEvents(int olderThan) { } @Override public CurrentNotificationEventId getCurrentNotificationEventId() { return null; } @Override public NotificationEventsCountResponse getNotificationEventsCount(NotificationEventsCountRequest rqst) { return null; } @Override public void flushCache() { } @Override public ByteBuffer[] getFileMetadata(List<Long> fileIds) { return null; } @Override public void putFileMetadata( List<Long> fileIds, List<ByteBuffer> metadata, FileMetadataExprType type) { } @Override public boolean isFileMetadataSupported() { return false; } @Override public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr, ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) { } @Override public int getTableCount() throws MetaException { return 0; } @Override public int getPartitionCount() throws MetaException { return 0; } @Override public int getDatabaseCount() throws MetaException { return 0; } @Override public FileMetadataHandler getFileMetadataHandler(FileMetadataExprType type) { return null; } @Override public List<SQLPrimaryKey> getPrimaryKeys(String catName, String db_name, String tbl_name) throws MetaException { // TODO Auto-generated method stub return null; } @Override public List<SQLForeignKey> getForeignKeys(String catName, String parent_db_name, String parent_tbl_name, String foreign_db_name, String foreign_tbl_name) throws MetaException { // TODO Auto-generated method stub return null; } @Override public List<SQLUniqueConstraint> getUniqueConstraints(String catName, String db_name, String tbl_name) throws MetaException { // TODO Auto-generated method stub return null; } @Override public List<SQLNotNullConstraint> getNotNullConstraints(String catName, String db_name, String tbl_name) throws MetaException { // TODO Auto-generated method stub return null; } @Override public List<SQLDefaultConstraint> getDefaultConstraints(String catName, String db_name, String tbl_name) throws MetaException { // TODO Auto-generated method stub return null; } @Override public List<SQLCheckConstraint> getCheckConstraints(String catName, String db_name, String tbl_name) throws MetaException { // TODO Auto-generated method stub return null; } @Override public List<String> createTableWithConstraints(Table tbl, List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys, List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> notNullConstraints, List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> checkConstraints) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public void dropConstraint(String catName, String dbName, String tableName, String constraintName, boolean missingOk) throws NoSuchObjectException { // TODO Auto-generated method stub } @Override public List<String> addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public List<String> addForeignKeys(List<SQLForeignKey> fks) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public List<String> addUniqueConstraints(List<SQLUniqueConstraint> uks) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public List<String> addNotNullConstraints(List<SQLNotNullConstraint> nns) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public List<String> addDefaultConstraints(List<SQLDefaultConstraint> nns) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public List<String> addCheckConstraints(List<SQLCheckConstraint> nns) throws InvalidObjectException, MetaException { // TODO Auto-generated method stub return null; } @Override public String getMetastoreDbUuid() throws MetaException { throw new MetaException("Get metastore uuid is not implemented"); } @Override public void createResourcePlan( WMResourcePlan resourcePlan, String copyFrom, int defaultPoolSize) throws MetaException { } @Override public WMFullResourcePlan getResourcePlan(String name, String ns) throws NoSuchObjectException { return null; } @Override public List<WMResourcePlan> getAllResourcePlans(String ns) throws MetaException { return null; } @Override public WMFullResourcePlan alterResourcePlan( String name, String ns, WMNullableResourcePlan resourcePlan, boolean canActivateDisabled, boolean canDeactivate, boolean isReplace) throws NoSuchObjectException, InvalidOperationException, MetaException { return null; } @Override public WMFullResourcePlan getActiveResourcePlan(String ns) throws MetaException { return null; } @Override public WMValidateResourcePlanResponse validateResourcePlan(String name, String ns) throws NoSuchObjectException, InvalidObjectException, MetaException { return null; } @Override public void dropResourcePlan(String name, String ns) throws NoSuchObjectException, MetaException { } @Override public void createWMTrigger(WMTrigger trigger) throws MetaException { } @Override public void alterWMTrigger(WMTrigger trigger) throws NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void dropWMTrigger(String resourcePlanName, String triggerName, String ns) throws NoSuchObjectException, MetaException { } @Override public List<WMTrigger> getTriggersForResourcePlan(String resourcePlanName, String ns) throws NoSuchObjectException, MetaException { return null; } @Override public void createPool(WMPool pool) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void alterPool(WMNullablePool pool, String poolPath) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void dropWMPool(String resourcePlanName, String poolPath, String ns) throws NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void createOrUpdateWMMapping(WMMapping mapping, boolean update) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void dropWMMapping(WMMapping mapping) throws NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void createWMTriggerToPoolMapping(String resourcePlanName, String triggerName, String poolPath, String ns) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException { } @Override public void dropWMTriggerToPoolMapping(String resourcePlanName, String triggerName, String poolPath, String ns) throws NoSuchObjectException, InvalidOperationException, MetaException { } @Override public List<MetaStoreServerUtils.ColStatsObjWithSourceInfo> getPartitionColStatsForDatabase(String catName, String dbName) throws MetaException, NoSuchObjectException { // TODO Auto-generated method stub return null; } @Override public void createISchema(ISchema schema) throws AlreadyExistsException, MetaException { } @Override public void alterISchema(ISchemaName schemaName, ISchema newSchema) throws NoSuchObjectException, MetaException { } @Override public ISchema getISchema(ISchemaName schemaName) throws MetaException { return null; } @Override public void dropISchema(ISchemaName schemaName) throws NoSuchObjectException, MetaException { } @Override public void addSchemaVersion(SchemaVersion schemaVersion) throws AlreadyExistsException, InvalidObjectException, NoSuchObjectException, MetaException { } @Override public void alterSchemaVersion(SchemaVersionDescriptor version, SchemaVersion newVersion) throws NoSuchObjectException, MetaException { } @Override public SchemaVersion getSchemaVersion(SchemaVersionDescriptor version) throws MetaException { return null; } @Override public SchemaVersion getLatestSchemaVersion(ISchemaName schemaName) throws MetaException { return null; } @Override public List<SchemaVersion> getAllSchemaVersion(ISchemaName schemaName) throws MetaException { return null; } @Override public List<SchemaVersion> getSchemaVersionsByColumns(String colName, String colNamespace, String type) throws MetaException { return null; } @Override public void dropSchemaVersion(SchemaVersionDescriptor version) throws NoSuchObjectException, MetaException { } @Override public SerDeInfo getSerDeInfo(String serDeName) throws MetaException { return null; } @Override public void addSerde(SerDeInfo serde) throws AlreadyExistsException, MetaException { } @Override public void addRuntimeStat(RuntimeStat stat) throws MetaException { } @Override public List<RuntimeStat> getRuntimeStats(int maxEntries, int maxCreateTime) throws MetaException { return Collections.emptyList(); } @Override public int deleteRuntimeStats(int maxRetainSecs) throws MetaException { return 0; } @Override public List<TableName> getTableNamesWithStats() throws MetaException, NoSuchObjectException { return null; } @Override public List<TableName> getAllTableNamesForStats() throws MetaException, NoSuchObjectException { return null; } @Override public Map<String, List<String>> getPartitionColsWithStats(String catName, String dbName, String tableName) throws MetaException, NoSuchObjectException { return null; } @Override public void cleanWriteNotificationEvents(int olderThan) { } @Override public List<WriteEventInfo> getAllWriteEventInfo(long txnId, String dbName, String tableName) throws MetaException { return null; } @Override public List<String> isPartOfMaterializedView(String catName, String dbName, String tblName) { throw new RuntimeException("unimplemented"); } @Override public ScheduledQueryPollResponse scheduledQueryPoll(ScheduledQueryPollRequest request) { throw new RuntimeException("unimplemented"); } @Override public void scheduledQueryMaintenance(ScheduledQueryMaintenanceRequest request) { } @Override public void scheduledQueryProgress(ScheduledQueryProgressInfo info) { } @Override public void addReplicationMetrics(ReplicationMetricList replicationMetricList) { throw new RuntimeException("unimplemented"); } @Override public ReplicationMetricList getReplicationMetrics(GetReplicationMetricsRequest replicationMetricsRequest) { throw new RuntimeException("unimplemented"); } @Override public int deleteReplicationMetrics(int maxRetainSecs) { throw new RuntimeException("unimplemented"); } @Override public ScheduledQuery getScheduledQuery(ScheduledQueryKey scheduleKey) { throw new RuntimeException("unimplemented"); } @Override public int deleteScheduledExecutions(int maxRetainSecs) { throw new RuntimeException("unimplemented"); } @Override public int markScheduledExecutionsTimedOut(int timeoutSecs) throws InvalidOperationException{ throw new RuntimeException("unimplemented"); } @Override public void deleteAllPartitionColumnStatistics(TableName tableName, String w) { throw new RuntimeException("unimplemented"); } }
package eu.yaga.stockanalyzer.service.impl; import eu.yaga.stockanalyzer.model.FundamentalData; import eu.yaga.stockanalyzer.model.RateProgressBean; import eu.yaga.stockanalyzer.model.StockIndex; import eu.yaga.stockanalyzer.model.eodhd.EodhdQuote; import eu.yaga.stockanalyzer.model.historicaldata.HistoricalDataQuote; import eu.yaga.stockanalyzer.service.HistoricalExchangeRateService; import eu.yaga.stockanalyzer.util.EodhdCode; import eu.yaga.stockanalyzer.util.EodhdProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.client.RestTemplate; import org.threeten.bp.DateTimeUtils; import org.threeten.bp.LocalDate; import org.threeten.bp.ZoneId; import org.threeten.bp.format.DateTimeFormatter; import org.threeten.bp.temporal.ChronoUnit; import java.text.ParseException; import java.util.*; /** * Implementation of the {@link HistoricalExchangeRateService} for eodhistoricaldata.com */ public class EodhdHistoricalExchangeRateServiceImpl implements HistoricalExchangeRateService { @Autowired private EodhdProperties eodhdProperties; @Autowired private RestTemplate restTemplate; private static final Logger log = LoggerFactory.getLogger(EodhdHistoricalExchangeRateServiceImpl.class); private DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd"); private final String BASE_URL = "https://eodhistoricaldata.com/api/eod/"; /** * This method returns historical exchange Rates of the given stock * * @param symbol Symbol of the stock * @param dateStringFrom Date of the start of the historical data (yyyy-MM-dd) * @param dateStringTo Date of the end of the historical data (yyyy-MM-dd) * @return Historical Exchange Rates */ @Override public List<HistoricalDataQuote> getHistoricalExchangeRates(String symbol, String dateStringFrom, String dateStringTo) throws ParseException { log.info("Getting HistoricalExchangeRates for: " + symbol + " " + dateStringFrom + " " + dateStringTo); String[] splitSymbol = symbol.split("\\."); String cleanSymbol = splitSymbol[0]; String exchange = ""; if (splitSymbol.length > 1) { exchange = splitSymbol[1]; } List<EodhdCode> eodhdCodeList = buildEodhdCode(cleanSymbol, exchange); log.info("Clean Symbol: " + cleanSymbol); LocalDate dateTo = LocalDate.now(); if (dateStringTo != null) { dateTo = LocalDate.parse(dateStringTo, DateTimeFormatter.ISO_LOCAL_DATE); } LocalDate dateFrom = LocalDate.now().minusYears(1); if (dateStringFrom != null) { dateFrom = LocalDate.parse(dateStringFrom, DateTimeFormatter.ISO_LOCAL_DATE); } if (dateFrom.equals(dateTo)) { throw new RuntimeException("The dates may not be equal!"); } if (dateFrom.isAfter(dateTo)) { throw new RuntimeException("The from date has to be before the to date!"); } List<HistoricalDataQuote> quoteList = new ArrayList<>(); String urlParams = "?period=d&fmt=json&api_token=" + eodhdProperties.getAuth().getToken() + "&from=" + dateStringFrom + "&to=" + dateStringTo; for (EodhdCode code : eodhdCodeList) { EodhdQuote[] quotes = new EodhdQuote[0]; try { quotes = restTemplate.getForObject(BASE_URL + code.getCode() + urlParams, EodhdQuote[].class); } catch (Exception e) { log.warn("error retrieving eod data"); } if (quotes.length > 0) { for (EodhdQuote quote : quotes) { quoteList.add(new HistoricalDataQuote(code.getSymbol(), quote.getDate(), quote.getClose())); } break; } } return quoteList; } /** * Generates a list of eodhdCodes to try * @param cleanSymbol the stocks symbol * @param exchange the symbol of the exchange * @return a list of Strings with eodhd codes */ private List<EodhdCode> buildEodhdCode(String cleanSymbol, String exchange) { List<EodhdCode> eodhdCodeList = new ArrayList<>(); if (cleanSymbol.startsWith("^")) { String cutSymbol = cleanSymbol.replace("^", ""); eodhdCodeList.add(new EodhdCode(cutSymbol, cutSymbol + ".INDX", null, null)); } else { switch (exchange) { case "F": case "DE": eodhdCodeList.add(new EodhdCode(cleanSymbol, cleanSymbol + ".XETRA", "EUR", "Close")); eodhdCodeList.add(new EodhdCode(cleanSymbol, cleanSymbol + ".F", "EUR", "Close")); break; case "US": eodhdCodeList.add(new EodhdCode(cleanSymbol, cleanSymbol + ".US", "USD", "Close")); break; case "AS": eodhdCodeList.add(new EodhdCode(cleanSymbol, cleanSymbol + ".AS", "EUR", "Last")); break; default: eodhdCodeList.add(new EodhdCode(cleanSymbol, cleanSymbol + ".US", "USD", "Close")); } } return eodhdCodeList; } /** * This method returns the stock's reaction to quarterly figures (comparing it to its index) * * @param fundamentalData of the stock * @return the progress difference to the index */ @Override public double getReactionToQuarterlyFigures(FundamentalData fundamentalData) { try { Date dateLegacy = fundamentalData.getLastQuarterlyFigures(); String symbol = fundamentalData.getSymbol(); String indexSymbol = fundamentalData.getStockIndex().getSymbol(); if (dateLegacy == null || symbol == null || indexSymbol == null) { return -9999; } LocalDate date = DateTimeUtils.toInstant(dateLegacy).atZone(ZoneId.systemDefault()).toLocalDate(); String dateString = date.format(dtf); String priorDay = date.minusDays(1).format(dtf); List<HistoricalDataQuote> ratesSymbol = getHistoricalExchangeRates(symbol, priorDay, dateString); LocalDate dateTmp = date; int cnt = 0; while (ratesSymbol.size() < 2 && cnt <= 7) { cnt++; dateTmp = dateTmp.minusDays(1); priorDay = dateTmp.format(dtf); ratesSymbol = getHistoricalExchangeRates(symbol, priorDay, dateString); } if (ratesSymbol.size() < 2) { throw new RuntimeException("Unable to get historical exchange rates for " + symbol); } List<HistoricalDataQuote> ratesIndex = getHistoricalExchangeRates(indexSymbol, priorDay, dateString); // calculate Data double closeSymbol = ratesSymbol.get(0).getClose(); double closeSymbolPriorDay = ratesSymbol.get(1).getClose(); double closeIndex = ratesIndex.get(0).getClose(); double closeIndexPriorDay = ratesIndex.get(1).getClose(); double progressSymbol = (1 - closeSymbolPriorDay / closeSymbol) * 100; log.info("progressSymbol " + symbol + ": " + progressSymbol); double progressIndex = (1 - closeIndexPriorDay / closeIndex) * 100; log.info("progressIndex " + indexSymbol + ": " + progressIndex); double totalProgress = progressSymbol - progressIndex; log.info("totalProgress: " + totalProgress); return totalProgress; } catch (ParseException e) { return -9999; } } /** * This method calculates the stock progression within the last 6 months * * @param fundamentalData of the stock * @return the progression in percent */ @Override public double getRateProgress6month(FundamentalData fundamentalData) { log.info("getRateProgress6month"); return getRateProgress(fundamentalData.getSymbol(), 6, ChronoUnit.MONTHS).getProgress(); } /** * This method calculates the stock progression within the last 1 year * * @param fundamentalData of the stock * @return the progression in percent */ @Override public double getRateProgress1year(FundamentalData fundamentalData) { log.info("getRateProgress1year"); return getRateProgress(fundamentalData.getSymbol(), 1, ChronoUnit.YEARS).getProgress(); } /** * This method calculates the stock progression compared to its index of the last 3 months * * @param fundamentalData of the stock * @return a list with the progression of the last 3 months */ @Override public List<Double> getReversal3Month(FundamentalData fundamentalData) { log.info("getReversal3Month"); String symbol = fundamentalData.getSymbol(); String stockIndex = fundamentalData.getStockIndex().getSymbol(); if (symbol != null && stockIndex != null) { LocalDate lastMonth = LocalDate.now().minusMonths(1); LocalDate twoMonthAgo = LocalDate.now().minusMonths(2); LocalDate threeMonthAgo = LocalDate.now().minusMonths(3); LocalDate fourMonthAgo = LocalDate.now().minusMonths(4); LocalDate endOfLastMonth = lastMonth.withDayOfMonth(lastMonth.lengthOfMonth()); LocalDate endOfTwoMonthAgo = twoMonthAgo.withDayOfMonth(twoMonthAgo.lengthOfMonth()); LocalDate endOfThreeMonthAgo = threeMonthAgo.withDayOfMonth(threeMonthAgo.lengthOfMonth()); LocalDate endOfFourMonthAgo = fourMonthAgo.withDayOfMonth(fourMonthAgo.lengthOfMonth()); RateProgressBean symbolProgressLastMonthBean = getRateProgress(symbol, endOfLastMonth, endOfTwoMonthAgo); RateProgressBean symbolProgressTwoMonthAgoBean = getRateProgress(symbol, endOfTwoMonthAgo, endOfThreeMonthAgo); RateProgressBean symbolProgressThreeMonthAgoBean = getRateProgress(symbol, endOfThreeMonthAgo, endOfFourMonthAgo); double symbolProgressLastMonth = symbolProgressLastMonthBean.getProgress(); double symbolProgressTwoMonthAgo = symbolProgressTwoMonthAgoBean.getProgress(); double symbolProgressThreeMonthAgo = symbolProgressThreeMonthAgoBean.getProgress(); double indexProgressLastMonth = getIndexRateProgress( fundamentalData.getStockIndex(), symbolProgressLastMonthBean.getBaseDateQuote().getDate(), symbolProgressLastMonthBean.getCompareDateQuote().getDate()); double indexProgressTwoMonthAgo = getIndexRateProgress( fundamentalData.getStockIndex(), symbolProgressTwoMonthAgoBean.getBaseDateQuote().getDate(), symbolProgressTwoMonthAgoBean.getCompareDateQuote().getDate()); double indexProgressThreeMonthAgo = getIndexRateProgress( fundamentalData.getStockIndex(), symbolProgressThreeMonthAgoBean.getBaseDateQuote().getDate(), symbolProgressThreeMonthAgoBean.getCompareDateQuote().getDate()); List<Double> reversalList = new ArrayList<>(); reversalList.add(symbolProgressLastMonth - indexProgressLastMonth); reversalList.add(symbolProgressTwoMonthAgo - indexProgressTwoMonthAgo); reversalList.add(symbolProgressThreeMonthAgo - indexProgressThreeMonthAgo); return reversalList; } else { List<Double> reversalList = new ArrayList<>(); reversalList.add(-999.0); return reversalList; } } /** * returns the rate progress from today * @param symbol the symbol * @param amount the amount of units to be subtracted * @param chronoUnit the unit of the amount (days, months, ...) * @return the rate progress */ private RateProgressBean getRateProgress(String symbol, int amount, ChronoUnit chronoUnit) { LocalDate today = LocalDate.now(); LocalDate compareDate = today.minus(amount, chronoUnit); return getRateProgress(symbol, today, compareDate); } /** * returns the rate progress from the base date * @param symbol the symbol * @param baseDate the reference date * @param compareDate the date to compare with * @return the rate progress */ private RateProgressBean getRateProgress(String symbol, LocalDate baseDate, LocalDate compareDate) { try { if (symbol != null && baseDate != null && compareDate != null) { // Fetch data LocalDate baseDateMinus = baseDate.minusDays(1); List<HistoricalDataQuote> ratesToday = getHistoricalExchangeRates(symbol, baseDateMinus.format(dtf), baseDate.format(dtf)); int cnt = 0; while (ratesToday.size() < 1 && cnt <= 10) { cnt++; baseDateMinus = baseDateMinus.minusDays(1); ratesToday = getHistoricalExchangeRates(symbol, baseDateMinus.format(dtf), baseDate.format(dtf)); } // Find data for compareDate LocalDate compareDateMinus = compareDate.minusDays(1); List<HistoricalDataQuote> ratesCompareDate = getHistoricalExchangeRates(symbol, compareDateMinus.format(dtf), compareDate.format(dtf)); cnt = 0; while (ratesCompareDate.size() < 1 && cnt <= 10) { cnt++; compareDateMinus = compareDateMinus.minusDays(1); ratesCompareDate = getHistoricalExchangeRates(symbol, compareDateMinus.format(dtf), compareDate.format(dtf)); } double closeToday = ratesToday.get(0).getClose(); log.info("closeToday: " + closeToday); double closeCompareDate = ratesCompareDate.get(0).getClose(); log.info("closeCompareDate: " + closeCompareDate); double rateProgress = (closeToday - closeCompareDate) / closeCompareDate * 100; log.info("rateProgress: " + rateProgress); return new RateProgressBean(ratesToday.get(0), ratesCompareDate.get(0), rateProgress); } return null; } catch (ParseException e) { return null; } } /** * returns the index' rate progress from the base date with backup data source * @param index the StockIndex * @param baseDateString the reference date * @param compareDateString the date to compare with * @return the rate progress */ private double getIndexRateProgress(StockIndex index, String baseDateString, String compareDateString) { if (index != null && baseDateString != null && compareDateString != null) { List<HistoricalDataQuote> ratesToday = null; List<HistoricalDataQuote> ratesCompareDate = null; try { ratesToday = getHistoricalExchangeRates(index.getSymbol(), compareDateString, baseDateString); ratesCompareDate = getHistoricalExchangeRates(index.getSymbol(), compareDateString, baseDateString); } catch (ParseException e) { e.printStackTrace(); } double closeToday = ratesToday.get(0).getClose(); log.info("closeToday: " + closeToday); double closeCompareDate = ratesCompareDate.get(0).getClose(); log.info("closeCompareDate: " + closeCompareDate); double rateProgress = (closeToday - closeCompareDate) / closeCompareDate * 100; log.info("rateProgress: " + rateProgress); return rateProgress; } return -9999; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import org.apache.geode.cache.EntryDestroyedException; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.internal.QueryExecutionContext; import org.apache.geode.internal.cache.LocalRegion.IteratorType; import org.apache.geode.internal.cache.LocalRegion.NonTXEntry; import org.apache.geode.internal.i18n.LocalizedStrings; /** Set view of entries */ public class EntriesSet extends AbstractSet { final LocalRegion topRegion; final boolean recursive; final IteratorType iterType; protected final TXStateInterface myTX; final boolean allowTombstones; protected final InternalDataView view; final boolean rememberReads; private boolean keepSerialized = false; protected boolean ignoreCopyOnReadForQuery = false; EntriesSet(LocalRegion region, boolean recursive, IteratorType viewType, boolean allowTombstones) { this.topRegion = region; this.recursive = recursive; this.iterType = viewType; this.myTX = region.getTXState(); this.view = this.myTX == null ? region.getSharedDataView() : this.myTX; this.rememberReads = true; this.allowTombstones = allowTombstones; } protected void checkTX() { if (this.myTX != null) { if (!myTX.isInProgress()) { throw new IllegalStateException( LocalizedStrings.LocalRegion_REGION_COLLECTION_WAS_CREATED_WITH_TRANSACTION_0_THAT_IS_NO_LONGER_ACTIVE .toLocalizedString(myTX.getTransactionId())); } } else { if (this.topRegion.isTX()) { throw new IllegalStateException( LocalizedStrings.LocalRegion_NON_TRANSACTIONAL_REGION_COLLECTION_IS_BEING_USED_IN_A_TRANSACTION .toLocalizedString(this.topRegion.getTXState().getTransactionId())); } } } @Override public Iterator<Object> iterator() { checkTX(); return new EntriesIterator(); } private class EntriesIterator implements Iterator<Object> { final List<LocalRegion> regions; final int numSubRegions; int regionsIndex; LocalRegion currRgn; // keep track of look-ahead on hasNext() call, used to filter out null // values Object nextElem; Iterator<?> currItr; Collection<?> additionalKeysFromView; /** reusable KeyInfo */ protected final KeyInfo keyInfo = new KeyInfo(null, null, null); @SuppressWarnings("unchecked") protected EntriesIterator() { if (recursive) { // FIFO queue of regions this.regions = new ArrayList<LocalRegion>(topRegion.subregions(true)); this.numSubRegions = this.regions.size(); } else { this.regions = null; this.numSubRegions = 0; } createIterator(topRegion); this.nextElem = moveNext(); } public void remove() { throw new UnsupportedOperationException( LocalizedStrings.LocalRegion_THIS_ITERATOR_DOES_NOT_SUPPORT_MODIFICATION .toLocalizedString()); } public boolean hasNext() { return (this.nextElem != null); } public Object next() { final Object result = this.nextElem; if (result != null) { this.nextElem = moveNext(); return result; } throw new NoSuchElementException(); } private Object moveNext() { // keep looping until: // we find an element and return it // OR we run out of elements and return null for (;;) { if (this.currItr.hasNext()) { final Object currKey = this.currItr.next(); final Object result; this.keyInfo.setKey(currKey); if (this.additionalKeysFromView != null) { if (currKey instanceof AbstractRegionEntry) { this.additionalKeysFromView.remove(((AbstractRegionEntry) currKey).getKey()); } else { this.additionalKeysFromView.remove(currKey); } } if (iterType == IteratorType.KEYS) { result = view.getKeyForIterator(this.keyInfo, this.currRgn, rememberReads, allowTombstones); if (result != null) { return result; } } else if (iterType == IteratorType.ENTRIES) { result = view.getEntryForIterator(this.keyInfo, this.currRgn, rememberReads, allowTombstones); if (result != null) { return result; } } else { Region.Entry re = (Region.Entry) view.getEntryForIterator(this.keyInfo, currRgn, rememberReads, allowTombstones); if (re != null) { try { if (keepSerialized) { result = ((NonTXEntry) re).getRawValue(); // OFFHEAP: need to either copy into a // cd or figure out when result will be // released. } else if (ignoreCopyOnReadForQuery) { result = ((NonTXEntry) re).getValue(true); } else { result = re.getValue(); } if (result != null && !Token.isInvalidOrRemoved(result)) { // fix for bug 34583 return result; } if (result == Token.TOMBSTONE && allowTombstones) { return result; } } catch (EntryDestroyedException ede) { // Fix for bug 43526, caused by fix to 43064 // Entry is destroyed, continue to the next element. } } // key disappeared or is invalid, go on to next } } else if (this.additionalKeysFromView != null) { this.currItr = this.additionalKeysFromView.iterator(); this.additionalKeysFromView = null; } else if (this.regionsIndex < this.numSubRegions) { // advance to next region createIterator(this.regions.get(this.regionsIndex)); ++this.regionsIndex; } else { return null; } } } private void createIterator(final LocalRegion rgn) { // TX iterates over KEYS. // NonTX iterates over RegionEntry instances this.currRgn = rgn; this.currItr = view.getRegionKeysForIteration(rgn).iterator(); this.additionalKeysFromView = view.getAdditionalKeysForIterator(rgn); } } @Override public int size() { checkTX(); if (this.iterType == IteratorType.VALUES) { // if this is a values-view, then we have to filter out nulls to // determine the correct size int s = 0; for (Iterator<Object> itr = new EntriesIterator(); itr.hasNext(); itr.next()) { s++; } return s; } else if (this.recursive) { return this.topRegion.allEntriesSize(); } else { return view.entryCount(this.topRegion); } } @Override public Object[] toArray() { return toArray(null); } @Override public Object[] toArray(final Object[] array) { checkTX(); final ArrayList<Object> temp = new ArrayList<Object>(this.size()); final Iterator<Object> iter = new EntriesIterator(); while (iter.hasNext()) { temp.add(iter.next()); } if (array == null) { return temp.toArray(); } else { return temp.toArray(array); } } public void setKeepSerialized(boolean keepSerialized) { this.keepSerialized = keepSerialized; } public boolean isKeepSerialized() { return this.keepSerialized; } public void setIgnoreCopyOnReadForQuery(boolean ignoreCopyOnReadForQuery) { this.ignoreCopyOnReadForQuery = ignoreCopyOnReadForQuery; } public boolean isIgnoreCopyOnReadForQuery() { return this.ignoreCopyOnReadForQuery; } }
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.method; import static org.springframework.security.config.Elements.*; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.aop.config.AopNamespaceUtils; import org.springframework.aop.framework.ProxyFactoryBean; import org.springframework.aop.target.LazyInitTargetSource; import org.springframework.beans.BeanMetadataElement; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanReference; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.parsing.BeanComponentDefinition; import org.springframework.beans.factory.parsing.CompositeComponentDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.beans.factory.support.ManagedList; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.security.access.ConfigAttribute; import org.springframework.security.access.PermissionEvaluator; import org.springframework.security.access.SecurityConfig; import org.springframework.security.access.annotation.Jsr250MethodSecurityMetadataSource; import org.springframework.security.access.annotation.Jsr250Voter; import org.springframework.security.access.annotation.SecuredAnnotationSecurityMetadataSource; import org.springframework.security.access.expression.method.DefaultMethodSecurityExpressionHandler; import org.springframework.security.access.expression.method.ExpressionBasedAnnotationAttributeFactory; import org.springframework.security.access.expression.method.ExpressionBasedPostInvocationAdvice; import org.springframework.security.access.expression.method.ExpressionBasedPreInvocationAdvice; import org.springframework.security.access.expression.method.MethodSecurityExpressionHandler; import org.springframework.security.access.intercept.AfterInvocationProviderManager; import org.springframework.security.access.intercept.aopalliance.MethodSecurityInterceptor; import org.springframework.security.access.intercept.aopalliance.MethodSecurityMetadataSourceAdvisor; import org.springframework.security.access.intercept.aspectj.AspectJMethodSecurityInterceptor; import org.springframework.security.access.method.DelegatingMethodSecurityMetadataSource; import org.springframework.security.access.method.MapBasedMethodSecurityMetadataSource; import org.springframework.security.access.prepost.PostInvocationAdviceProvider; import org.springframework.security.access.prepost.PreInvocationAuthorizationAdviceVoter; import org.springframework.security.access.prepost.PrePostAnnotationSecurityMetadataSource; import org.springframework.security.access.vote.AffirmativeBased; import org.springframework.security.access.vote.AuthenticatedVoter; import org.springframework.security.access.vote.RoleVoter; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.config.BeanIds; import org.springframework.security.config.Elements; import org.springframework.security.config.authentication.AuthenticationManagerFactoryBean; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; import org.springframework.util.Assert; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; /** * Processes the top-level "global-method-security" element. * * @author Ben Alex * @author Luke Taylor * @author Rob Winch * @since 2.0 */ public class GlobalMethodSecurityBeanDefinitionParser implements BeanDefinitionParser { private final Log logger = LogFactory.getLog(getClass()); private static final String ATT_AUTHENTICATION_MANAGER_REF = "authentication-manager-ref"; private static final String ATT_ACCESS = "access"; private static final String ATT_EXPRESSION = "expression"; private static final String ATT_ACCESS_MGR = "access-decision-manager-ref"; private static final String ATT_RUN_AS_MGR = "run-as-manager-ref"; private static final String ATT_USE_JSR250 = "jsr250-annotations"; private static final String ATT_USE_SECURED = "secured-annotations"; private static final String ATT_USE_PREPOST = "pre-post-annotations"; private static final String ATT_REF = "ref"; private static final String ATT_MODE = "mode"; private static final String ATT_ADVICE_ORDER = "order"; private static final String ATT_META_DATA_SOURCE_REF = "metadata-source-ref"; public BeanDefinition parse(Element element, ParserContext pc) { CompositeComponentDefinition compositeDef = new CompositeComponentDefinition( element.getTagName(), pc.extractSource(element)); pc.pushContainingComponent(compositeDef); Object source = pc.extractSource(element); // The list of method metadata delegates ManagedList<BeanMetadataElement> delegates = new ManagedList<BeanMetadataElement>(); boolean jsr250Enabled = "enabled".equals(element.getAttribute(ATT_USE_JSR250)); boolean useSecured = "enabled".equals(element.getAttribute(ATT_USE_SECURED)); boolean prePostAnnotationsEnabled = "enabled".equals(element .getAttribute(ATT_USE_PREPOST)); boolean useAspectJ = "aspectj".equals(element.getAttribute(ATT_MODE)); BeanDefinition preInvocationVoter = null; ManagedList<BeanMetadataElement> afterInvocationProviders = new ManagedList<BeanMetadataElement>(); // Check for an external SecurityMetadataSource, which takes priority over other // sources String metaDataSourceId = element.getAttribute(ATT_META_DATA_SOURCE_REF); if (StringUtils.hasText(metaDataSourceId)) { delegates.add(new RuntimeBeanReference(metaDataSourceId)); } if (prePostAnnotationsEnabled) { Element prePostElt = DomUtils.getChildElementByTagName(element, INVOCATION_HANDLING); Element expressionHandlerElt = DomUtils.getChildElementByTagName(element, EXPRESSION_HANDLER); if (prePostElt != null && expressionHandlerElt != null) { pc.getReaderContext().error( INVOCATION_HANDLING + " and " + EXPRESSION_HANDLER + " cannot be used together ", source); } BeanDefinitionBuilder preInvocationVoterBldr = BeanDefinitionBuilder .rootBeanDefinition(PreInvocationAuthorizationAdviceVoter.class); // After-invocation provider to handle post-invocation filtering and // authorization expression annotations. BeanDefinitionBuilder afterInvocationBldr = BeanDefinitionBuilder .rootBeanDefinition(PostInvocationAdviceProvider.class); // The metadata source for the security interceptor BeanDefinitionBuilder mds = BeanDefinitionBuilder .rootBeanDefinition(PrePostAnnotationSecurityMetadataSource.class); if (prePostElt != null) { // Customized override of expression handling system String attributeFactoryRef = DomUtils.getChildElementByTagName( prePostElt, INVOCATION_ATTRIBUTE_FACTORY).getAttribute("ref"); String preAdviceRef = DomUtils.getChildElementByTagName(prePostElt, PRE_INVOCATION_ADVICE).getAttribute("ref"); String postAdviceRef = DomUtils.getChildElementByTagName(prePostElt, POST_INVOCATION_ADVICE).getAttribute("ref"); mds.addConstructorArgReference(attributeFactoryRef); preInvocationVoterBldr.addConstructorArgReference(preAdviceRef); afterInvocationBldr.addConstructorArgReference(postAdviceRef); } else { // The default expression-based system String expressionHandlerRef = expressionHandlerElt == null ? null : expressionHandlerElt.getAttribute("ref"); if (StringUtils.hasText(expressionHandlerRef)) { logger.info("Using bean '" + expressionHandlerRef + "' as method ExpressionHandler implementation"); RootBeanDefinition lazyInitPP = new RootBeanDefinition( LazyInitBeanDefinitionRegistryPostProcessor.class); lazyInitPP.getConstructorArgumentValues().addGenericArgumentValue( expressionHandlerRef); pc.getReaderContext().registerWithGeneratedName(lazyInitPP); BeanDefinitionBuilder lazyMethodSecurityExpressionHandlerBldr = BeanDefinitionBuilder .rootBeanDefinition(LazyInitTargetSource.class); lazyMethodSecurityExpressionHandlerBldr.addPropertyValue( "targetBeanName", expressionHandlerRef); BeanDefinitionBuilder expressionHandlerProxyBldr = BeanDefinitionBuilder .rootBeanDefinition(ProxyFactoryBean.class); expressionHandlerProxyBldr.addPropertyValue("targetSource", lazyMethodSecurityExpressionHandlerBldr.getBeanDefinition()); expressionHandlerProxyBldr.addPropertyValue("proxyInterfaces", MethodSecurityExpressionHandler.class); expressionHandlerRef = pc.getReaderContext().generateBeanName( expressionHandlerProxyBldr.getBeanDefinition()); pc.registerBeanComponent(new BeanComponentDefinition( expressionHandlerProxyBldr.getBeanDefinition(), expressionHandlerRef)); } else { BeanDefinition expressionHandler = new RootBeanDefinition( DefaultMethodSecurityExpressionHandler.class); expressionHandlerRef = pc.getReaderContext().generateBeanName( expressionHandler); pc.registerBeanComponent(new BeanComponentDefinition( expressionHandler, expressionHandlerRef)); logger.info("Expressions were enabled for method security but no SecurityExpressionHandler was configured. " + "All hasPermision() expressions will evaluate to false."); } BeanDefinitionBuilder expressionPreAdviceBldr = BeanDefinitionBuilder .rootBeanDefinition(ExpressionBasedPreInvocationAdvice.class); expressionPreAdviceBldr.addPropertyReference("expressionHandler", expressionHandlerRef); preInvocationVoterBldr.addConstructorArgValue(expressionPreAdviceBldr .getBeanDefinition()); BeanDefinitionBuilder expressionPostAdviceBldr = BeanDefinitionBuilder .rootBeanDefinition(ExpressionBasedPostInvocationAdvice.class); expressionPostAdviceBldr.addConstructorArgReference(expressionHandlerRef); afterInvocationBldr.addConstructorArgValue(expressionPostAdviceBldr .getBeanDefinition()); BeanDefinitionBuilder annotationInvocationFactory = BeanDefinitionBuilder .rootBeanDefinition(ExpressionBasedAnnotationAttributeFactory.class); annotationInvocationFactory .addConstructorArgReference(expressionHandlerRef); mds.addConstructorArgValue(annotationInvocationFactory .getBeanDefinition()); } preInvocationVoter = preInvocationVoterBldr.getBeanDefinition(); afterInvocationProviders.add(afterInvocationBldr.getBeanDefinition()); delegates.add(mds.getBeanDefinition()); } if (useSecured) { delegates.add(BeanDefinitionBuilder.rootBeanDefinition( SecuredAnnotationSecurityMetadataSource.class).getBeanDefinition()); } if (jsr250Enabled) { delegates.add(BeanDefinitionBuilder.rootBeanDefinition( Jsr250MethodSecurityMetadataSource.class).getBeanDefinition()); } // Now create a Map<String, ConfigAttribute> for each <protect-pointcut> // sub-element Map<String, List<ConfigAttribute>> pointcutMap = parseProtectPointcuts(pc, DomUtils.getChildElementsByTagName(element, PROTECT_POINTCUT)); if (pointcutMap.size() > 0) { if (useAspectJ) { pc.getReaderContext().error( "You can't use AspectJ mode with protect-pointcut definitions", source); } // Only add it if there are actually any pointcuts defined. BeanDefinition mapBasedMetadataSource = new RootBeanDefinition( MapBasedMethodSecurityMetadataSource.class); BeanReference ref = new RuntimeBeanReference(pc.getReaderContext() .generateBeanName(mapBasedMetadataSource)); delegates.add(ref); pc.registerBeanComponent(new BeanComponentDefinition(mapBasedMetadataSource, ref.getBeanName())); registerProtectPointcutPostProcessor(pc, pointcutMap, ref, source); } BeanReference metadataSource = registerDelegatingMethodSecurityMetadataSource(pc, delegates, source); // Check for additional after-invocation-providers.. List<Element> afterInvocationElts = DomUtils.getChildElementsByTagName(element, Elements.AFTER_INVOCATION_PROVIDER); for (Element elt : afterInvocationElts) { afterInvocationProviders.add(new RuntimeBeanReference(elt .getAttribute(ATT_REF))); } String accessManagerId = element.getAttribute(ATT_ACCESS_MGR); if (!StringUtils.hasText(accessManagerId)) { accessManagerId = registerAccessManager(pc, jsr250Enabled, preInvocationVoter); } String authMgrRef = element.getAttribute(ATT_AUTHENTICATION_MANAGER_REF); String runAsManagerId = element.getAttribute(ATT_RUN_AS_MGR); BeanReference interceptor = registerMethodSecurityInterceptor(pc, authMgrRef, accessManagerId, runAsManagerId, metadataSource, afterInvocationProviders, source, useAspectJ); if (useAspectJ) { BeanDefinitionBuilder aspect = BeanDefinitionBuilder .rootBeanDefinition("org.springframework.security.access.intercept.aspectj.aspect.AnnotationSecurityAspect"); aspect.setFactoryMethod("aspectOf"); aspect.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); aspect.addPropertyValue("securityInterceptor", interceptor); String id = pc.getReaderContext().registerWithGeneratedName( aspect.getBeanDefinition()); pc.registerBeanComponent(new BeanComponentDefinition(aspect .getBeanDefinition(), id)); } else { registerAdvisor(pc, interceptor, metadataSource, source, element.getAttribute(ATT_ADVICE_ORDER)); AopNamespaceUtils.registerAutoProxyCreatorIfNecessary(pc, element); } pc.popAndRegisterContainingComponent(); return null; } /** * Register the default AccessDecisionManager. Adds the special JSR 250 voter jsr-250 * is enabled and an expression voter if expression-based access control is enabled. * @return */ @SuppressWarnings({ "unchecked", "rawtypes" }) private String registerAccessManager(ParserContext pc, boolean jsr250Enabled, BeanDefinition expressionVoter) { BeanDefinitionBuilder accessMgrBuilder = BeanDefinitionBuilder .rootBeanDefinition(AffirmativeBased.class); ManagedList voters = new ManagedList(4); if (expressionVoter != null) { voters.add(expressionVoter); } voters.add(new RootBeanDefinition(RoleVoter.class)); voters.add(new RootBeanDefinition(AuthenticatedVoter.class)); if (jsr250Enabled) { voters.add(new RootBeanDefinition(Jsr250Voter.class)); } accessMgrBuilder.addConstructorArgValue(voters); BeanDefinition accessManager = accessMgrBuilder.getBeanDefinition(); String id = pc.getReaderContext().generateBeanName(accessManager); pc.registerBeanComponent(new BeanComponentDefinition(accessManager, id)); return id; } @SuppressWarnings("rawtypes") private BeanReference registerDelegatingMethodSecurityMetadataSource( ParserContext pc, ManagedList delegates, Object source) { RootBeanDefinition delegatingMethodSecurityMetadataSource = new RootBeanDefinition( DelegatingMethodSecurityMetadataSource.class); delegatingMethodSecurityMetadataSource.setSource(source); delegatingMethodSecurityMetadataSource.getConstructorArgumentValues() .addGenericArgumentValue(delegates); String id = pc.getReaderContext().generateBeanName( delegatingMethodSecurityMetadataSource); pc.registerBeanComponent(new BeanComponentDefinition( delegatingMethodSecurityMetadataSource, id)); return new RuntimeBeanReference(id); } private void registerProtectPointcutPostProcessor(ParserContext parserContext, Map<String, List<ConfigAttribute>> pointcutMap, BeanReference mapBasedMethodSecurityMetadataSource, Object source) { RootBeanDefinition ppbp = new RootBeanDefinition( ProtectPointcutPostProcessor.class); ppbp.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); ppbp.setSource(source); ppbp.getConstructorArgumentValues().addGenericArgumentValue( mapBasedMethodSecurityMetadataSource); ppbp.getPropertyValues().addPropertyValue("pointcutMap", pointcutMap); parserContext.getReaderContext().registerWithGeneratedName(ppbp); } private Map<String, List<ConfigAttribute>> parseProtectPointcuts( ParserContext parserContext, List<Element> protectPointcutElts) { Map<String, List<ConfigAttribute>> pointcutMap = new LinkedHashMap<String, List<ConfigAttribute>>(); for (Element childElt : protectPointcutElts) { String accessConfig = childElt.getAttribute(ATT_ACCESS); String expression = childElt.getAttribute(ATT_EXPRESSION); if (!StringUtils.hasText(accessConfig)) { parserContext.getReaderContext().error("Access configuration required", parserContext.extractSource(childElt)); } if (!StringUtils.hasText(expression)) { parserContext.getReaderContext().error("Pointcut expression required", parserContext.extractSource(childElt)); } String[] attributeTokens = StringUtils .commaDelimitedListToStringArray(accessConfig); List<ConfigAttribute> attributes = new ArrayList<ConfigAttribute>( attributeTokens.length); for (String token : attributeTokens) { attributes.add(new SecurityConfig(token)); } pointcutMap.put(expression, attributes); } return pointcutMap; } private BeanReference registerMethodSecurityInterceptor(ParserContext pc, String authMgrRef, String accessManagerId, String runAsManagerId, BeanReference metadataSource, List<BeanMetadataElement> afterInvocationProviders, Object source, boolean useAspectJ) { BeanDefinitionBuilder bldr = BeanDefinitionBuilder .rootBeanDefinition(useAspectJ ? AspectJMethodSecurityInterceptor.class : MethodSecurityInterceptor.class); bldr.getRawBeanDefinition().setSource(source); bldr.addPropertyReference("accessDecisionManager", accessManagerId); RootBeanDefinition authMgr = new RootBeanDefinition( AuthenticationManagerDelegator.class); authMgr.getConstructorArgumentValues().addGenericArgumentValue(authMgrRef); bldr.addPropertyValue("authenticationManager", authMgr); bldr.addPropertyValue("securityMetadataSource", metadataSource); if (StringUtils.hasText(runAsManagerId)) { bldr.addPropertyReference("runAsManager", runAsManagerId); } if (!afterInvocationProviders.isEmpty()) { BeanDefinition afterInvocationManager; afterInvocationManager = new RootBeanDefinition( AfterInvocationProviderManager.class); afterInvocationManager.getPropertyValues().addPropertyValue("providers", afterInvocationProviders); bldr.addPropertyValue("afterInvocationManager", afterInvocationManager); } BeanDefinition bean = bldr.getBeanDefinition(); String id = pc.getReaderContext().generateBeanName(bean); pc.registerBeanComponent(new BeanComponentDefinition(bean, id)); return new RuntimeBeanReference(id); } private void registerAdvisor(ParserContext parserContext, BeanReference interceptor, BeanReference metadataSource, Object source, String adviceOrder) { if (parserContext.getRegistry().containsBeanDefinition( BeanIds.METHOD_SECURITY_METADATA_SOURCE_ADVISOR)) { parserContext.getReaderContext().error( "Duplicate <global-method-security> detected.", source); } RootBeanDefinition advisor = new RootBeanDefinition( MethodSecurityMetadataSourceAdvisor.class); if (StringUtils.hasText(adviceOrder)) { advisor.getPropertyValues().addPropertyValue("order", adviceOrder); } // advisor must be an infrastructure bean as Spring's // InfrastructureAdvisorAutoProxyCreator will ignore it // otherwise advisor.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); advisor.setSource(source); advisor.getConstructorArgumentValues().addGenericArgumentValue( interceptor.getBeanName()); advisor.getConstructorArgumentValues().addGenericArgumentValue(metadataSource); advisor.getConstructorArgumentValues().addGenericArgumentValue( metadataSource.getBeanName()); parserContext.getRegistry().registerBeanDefinition( BeanIds.METHOD_SECURITY_METADATA_SOURCE_ADVISOR, advisor); } /** * Delays the lookup of the AuthenticationManager within MethodSecurityInterceptor, to * prevent issues like SEC-933. * * @author Luke Taylor * @since 3.0 */ static final class AuthenticationManagerDelegator implements AuthenticationManager, BeanFactoryAware { private AuthenticationManager delegate; private final Object delegateMonitor = new Object(); private BeanFactory beanFactory; private final String authMgrBean; AuthenticationManagerDelegator(String authMgrBean) { this.authMgrBean = StringUtils.hasText(authMgrBean) ? authMgrBean : BeanIds.AUTHENTICATION_MANAGER; } public Authentication authenticate(Authentication authentication) throws AuthenticationException { synchronized (delegateMonitor) { if (delegate == null) { Assert.state(beanFactory != null, "BeanFactory must be set to resolve " + authMgrBean); try { delegate = beanFactory.getBean(authMgrBean, AuthenticationManager.class); } catch (NoSuchBeanDefinitionException e) { if (BeanIds.AUTHENTICATION_MANAGER.equals(e.getBeanName())) { throw new NoSuchBeanDefinitionException( BeanIds.AUTHENTICATION_MANAGER, AuthenticationManagerFactoryBean.MISSING_BEAN_ERROR_MESSAGE); } throw e; } } } return delegate.authenticate(authentication); } public void setBeanFactory(BeanFactory beanFactory) throws BeansException { this.beanFactory = beanFactory; } } /** * Delays setting a bean of a given name to be lazyily initialized until after all the * beans are registered. * * @author Rob Winch * @since 3.2 */ private static final class LazyInitBeanDefinitionRegistryPostProcessor implements BeanDefinitionRegistryPostProcessor { private final String beanName; private LazyInitBeanDefinitionRegistryPostProcessor(String beanName) { this.beanName = beanName; } public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException { if (!registry.containsBeanDefinition(beanName)) { return; } BeanDefinition beanDefinition = registry.getBeanDefinition(beanName); beanDefinition.setLazyInit(true); } public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Details of a provisioned service catalog product. For information about service catalog, see <a * href="https://docs.aws.amazon.com/servicecatalog/latest/adminguide/introduction.html">What is Amazon Web Services * Service Catalog</a>. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/ServiceCatalogProvisionedProductDetails" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ServiceCatalogProvisionedProductDetails implements Serializable, Cloneable, StructuredPojo { /** * <p> * The ID of the provisioned product. * </p> */ private String provisionedProductId; /** * <p> * The current status of the product. * </p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation succeeded and * completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for an * AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the requested * operation but is not exactly what was requested. For example, a request to update to a new version failed and the * stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not running. * For example, CloudFormation received a parameter value that was not valid and could not launch the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new product, * but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. * Wait for an AVAILABLE status before performing operations. * </p> * </li> * </ul> */ private String provisionedProductStatusMessage; /** * <p> * The ID of the provisioned product. * </p> * * @param provisionedProductId * The ID of the provisioned product. */ public void setProvisionedProductId(String provisionedProductId) { this.provisionedProductId = provisionedProductId; } /** * <p> * The ID of the provisioned product. * </p> * * @return The ID of the provisioned product. */ public String getProvisionedProductId() { return this.provisionedProductId; } /** * <p> * The ID of the provisioned product. * </p> * * @param provisionedProductId * The ID of the provisioned product. * @return Returns a reference to this object so that method calls can be chained together. */ public ServiceCatalogProvisionedProductDetails withProvisionedProductId(String provisionedProductId) { setProvisionedProductId(provisionedProductId); return this; } /** * <p> * The current status of the product. * </p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation succeeded and * completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for an * AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the requested * operation but is not exactly what was requested. For example, a request to update to a new version failed and the * stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not running. * For example, CloudFormation received a parameter value that was not valid and could not launch the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new product, * but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. * Wait for an AVAILABLE status before performing operations. * </p> * </li> * </ul> * * @param provisionedProductStatusMessage * The current status of the product.</p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation succeeded * and completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for * an AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the requested * operation but is not exactly what was requested. For example, a request to update to a new version failed * and the stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not * running. For example, CloudFormation received a parameter value that was not valid and could not launch * the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new * product, but resources have not yet been created. After reviewing the list of resources to be created, * execute the plan. Wait for an AVAILABLE status before performing operations. * </p> * </li> */ public void setProvisionedProductStatusMessage(String provisionedProductStatusMessage) { this.provisionedProductStatusMessage = provisionedProductStatusMessage; } /** * <p> * The current status of the product. * </p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation succeeded and * completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for an * AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the requested * operation but is not exactly what was requested. For example, a request to update to a new version failed and the * stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not running. * For example, CloudFormation received a parameter value that was not valid and could not launch the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new product, * but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. * Wait for an AVAILABLE status before performing operations. * </p> * </li> * </ul> * * @return The current status of the product.</p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation * succeeded and completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for * an AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the * requested operation but is not exactly what was requested. For example, a request to update to a new * version failed and the stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not * running. For example, CloudFormation received a parameter value that was not valid and could not launch * the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new * product, but resources have not yet been created. After reviewing the list of resources to be created, * execute the plan. Wait for an AVAILABLE status before performing operations. * </p> * </li> */ public String getProvisionedProductStatusMessage() { return this.provisionedProductStatusMessage; } /** * <p> * The current status of the product. * </p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation succeeded and * completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for an * AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the requested * operation but is not exactly what was requested. For example, a request to update to a new version failed and the * stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not running. * For example, CloudFormation received a parameter value that was not valid and could not launch the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new product, * but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. * Wait for an AVAILABLE status before performing operations. * </p> * </li> * </ul> * * @param provisionedProductStatusMessage * The current status of the product.</p> * <ul> * <li> * <p> * <code>AVAILABLE</code> - Stable state, ready to perform any operation. The most recent operation succeeded * and completed. * </p> * </li> * <li> * <p> * <code>UNDER_CHANGE</code> - Transitive state. Operations performed might not have valid results. Wait for * an AVAILABLE status before performing operations. * </p> * </li> * <li> * <p> * <code>TAINTED</code> - Stable state, ready to perform any operation. The stack has completed the requested * operation but is not exactly what was requested. For example, a request to update to a new version failed * and the stack rolled back to the current version. * </p> * </li> * <li> * <p> * <code>ERROR</code> - An unexpected error occurred. The provisioned product exists but the stack is not * running. For example, CloudFormation received a parameter value that was not valid and could not launch * the stack. * </p> * </li> * <li> * <p> * <code>PLAN_IN_PROGRESS</code> - Transitive state. The plan operations were performed to provision a new * product, but resources have not yet been created. After reviewing the list of resources to be created, * execute the plan. Wait for an AVAILABLE status before performing operations. * </p> * </li> * @return Returns a reference to this object so that method calls can be chained together. */ public ServiceCatalogProvisionedProductDetails withProvisionedProductStatusMessage(String provisionedProductStatusMessage) { setProvisionedProductStatusMessage(provisionedProductStatusMessage); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getProvisionedProductId() != null) sb.append("ProvisionedProductId: ").append(getProvisionedProductId()).append(","); if (getProvisionedProductStatusMessage() != null) sb.append("ProvisionedProductStatusMessage: ").append(getProvisionedProductStatusMessage()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ServiceCatalogProvisionedProductDetails == false) return false; ServiceCatalogProvisionedProductDetails other = (ServiceCatalogProvisionedProductDetails) obj; if (other.getProvisionedProductId() == null ^ this.getProvisionedProductId() == null) return false; if (other.getProvisionedProductId() != null && other.getProvisionedProductId().equals(this.getProvisionedProductId()) == false) return false; if (other.getProvisionedProductStatusMessage() == null ^ this.getProvisionedProductStatusMessage() == null) return false; if (other.getProvisionedProductStatusMessage() != null && other.getProvisionedProductStatusMessage().equals(this.getProvisionedProductStatusMessage()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getProvisionedProductId() == null) ? 0 : getProvisionedProductId().hashCode()); hashCode = prime * hashCode + ((getProvisionedProductStatusMessage() == null) ? 0 : getProvisionedProductStatusMessage().hashCode()); return hashCode; } @Override public ServiceCatalogProvisionedProductDetails clone() { try { return (ServiceCatalogProvisionedProductDetails) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.sagemaker.model.transform.ServiceCatalogProvisionedProductDetailsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (c) 2016, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of Salesforce.com nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.dva.argus.service.alert.notifier; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.Provider; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; import java.net.URLEncoder; import java.sql.Date; import java.text.MessageFormat; import java.util.Properties; import javax.persistence.EntityManager; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.StringRequestEntity; import org.apache.commons.httpclient.params.HttpConnectionManagerParams; import org.slf4j.Logger; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; /** * Implementation of notifier interface for notifying GOC++. * * @author Fiaz Hossain (fiaz.hossain@salesforce.com) */ public class GOCNotifier extends AuditNotifier { //~ Instance fields ****************************************************************************************************************************** @SLF4JTypeListener.InjectLogger private Logger _logger; //~ Constructors ********************************************************************************************************************************* /** * Creates a new GOC notifier. * * @param metricService The metric service. Cannot be null. * @param annotationService The annotation service. Cannot be null. * @param auditService The audit service. Cannot be null. * @param gocService The GOC service. Cannot be null. * @param config The system configuration. Cannot be null. * @param emf The entity manager factory. Cannot be null. */ @Inject public GOCNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, SystemConfiguration config, Provider<EntityManager> emf) { super(metricService, annotationService, auditService, config, emf); requireArgument(config != null, "The configuration cannot be null."); } //~ Methods ************************************************************************************************************************************** private PostMethod getRequestMethod(boolean refresh, String id) { GOCTransport gocTransport = new GOCTransport(); EndpointInfo endpointInfo = gocTransport.getEndpointInfo(_config, _logger, refresh); // Create upsert URI with PATCH method PostMethod post = new PostMethod(String.format("%s/services/data/v25.0/sobjects/SM_Alert__c/%s/%s", endpointInfo.getEndPoint(), GOCData.SM_ALERT_ID__C_FIELD, id)) { @Override public String getName() { return "PATCH"; } }; post.setRequestHeader("Authorization", "Bearer " + endpointInfo.getToken()); return post; } /** * Sends an GOC++ message. * * @param severity The message severity * @param className The alert class name * @param elementName The element/instance name * @param eventName The event name * @param message The message body. * @param lastNotified The last message time. (typically current time) */ public void sendMessage(Severity severity, String className, String elementName, String eventName, String message, long lastNotified) { requireArgument(elementName != null && !elementName.isEmpty(), "ElementName cannot be null or empty."); requireArgument(eventName != null && !eventName.isEmpty(), "EventName cannot be null or empty."); if (Boolean.valueOf(_config.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.GOC_ENABLED))) { try { GOCDataBuilder builder = new GOCDataBuilder(); builder.withClassName(className).withElementName(elementName).withEventName(eventName).withEventText(message); if (severity == Severity.OK) { builder.withActive(false).withClearedAt(lastNotified); } else { builder.withActive(true).withCreatedAt(lastNotified); } builder.withLastNotifiedAt(lastNotified); GOCData gocData = builder.build(); boolean refresh = false; GOCTransport gocTransport = new GOCTransport(); HttpClient httpclient = gocTransport.getHttpClient(_config); for (int i = 0; i < 2; i++) { PostMethod post = getRequestMethod(refresh, gocData.getsm_Alert_Id__c()); try { post.setRequestEntity(new StringRequestEntity(gocData.toJSON(), "application/json", null)); int respCode = httpclient.executeMethod(post); // Check for success if (respCode == 201 || respCode == 204) { _logger.info("Success - send GOC++ having element '{}' event '{}' severity {}.", elementName, eventName, severity.name()); break; } else if (respCode == 401) { // Indication that the session timedout, Need to refresh and retry refresh = true; } else { _logger.error("Failure - send GOC++ having element '{}' event '{}' severity {}. Response code '{}' response '{}'", elementName, eventName, severity.name(), respCode, post.getResponseBodyAsString()); } } catch (Exception e) { _logger.error("Failure - send GOC++ having element '{}' event '{}' severity {}. Exception '{}'", elementName, eventName, severity.name(), e); } finally { post.releaseConnection(); } } } catch (RuntimeException ex) { throw new SystemException("Failed to send an GOC++ notification.", ex); } } else { _logger.info("Sending GOC++ notification is disabled. Not sending message for element '{}' event '{}' severity {}.", elementName, eventName, severity.name()); } } @Override public String getName() { return GOCNotifier.class.getName(); } @Override protected void sendAdditionalNotification(NotificationContext context) { _sendAdditionalNotification(context, NotificationStatus.TRIGGERED); } @Override protected void clearAdditionalNotification(NotificationContext context) { _sendAdditionalNotification(context, NotificationStatus.CLEARED); } /** * Update the state of the notification to indicate whether the triggering condition exists or has been cleared. * * @param context The notification context. Cannot be null. * @param status The notification status. If null, will set the notification severity to <tt>ERROR</tt> */ protected void _sendAdditionalNotification(NotificationContext context, NotificationStatus status) { requireArgument(context != null, "Notification context cannot be null."); super.sendAdditionalNotification(context); Notification notification = null; Trigger trigger = null; for (Notification tempNotification : context.getAlert().getNotifications()) { if (tempNotification.getName().equalsIgnoreCase(context.getNotification().getName())) { notification = tempNotification; break; } } requireArgument(notification != null, "Notification in notification context cannot be null."); for (Trigger tempTrigger : context.getAlert().getTriggers()) { if (tempTrigger.getName().equalsIgnoreCase(context.getTrigger().getName())) { trigger = tempTrigger; break; } } requireArgument(trigger != null, "Trigger in notification context cannot be null."); String body = getGOCMessageBody(notification, trigger, context); Severity sev = status == NotificationStatus.CLEARED ? Severity.OK : Severity.ERROR; sendMessage(sev, context.getNotification().getName(), context.getAlert().getName(), context.getTrigger().getName(), body, context.getTriggerFiredTime()); } /** * Returns the goc message body content. * * @param notification The source notification. * @param trigger The source trigger. * @param context The notification context. * * @return The goc++ message body. */ protected String getGOCMessageBody(Notification notification, Trigger trigger, NotificationContext context) { StringBuilder sb = new StringBuilder(); sb.append(MessageFormat.format("Alert {0} was triggered at {1}\n", context.getAlert().getName(), DATE_FORMATTER.get().format(new Date(context.getTriggerFiredTime())))); sb.append(MessageFormat.format("Notification: {0}\n", notification.getName())); sb.append(MessageFormat.format("Triggered by: {0}\n", trigger.getName())); sb.append(MessageFormat.format("Notification is on cooldown until: {0}\n", DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration())))); sb.append(MessageFormat.format("Evaluated metric expression: {0}\n", context.getAlert().getExpression())); sb.append(MessageFormat.format("Trigger details: {0}\n", getTriggerDetails(trigger))); sb.append(MessageFormat.format("Triggering event value: {0}\n", context.getTriggerEventValue())); sb.append("\n"); for (String metricToAnnotate : notification.getMetricsToAnnotate()) { sb.append(MessageFormat.format("Annotated series for {0}: {1}\n", metricToAnnotate, getMetricUrl(metricToAnnotate, context.getTriggerFiredTime()))); } sb.append("\n"); sb.append(MessageFormat.format("Alert definition: {0}\n", getAlertUrl(notification.getAlert().getId()))); return sb.toString(); } @Override public Properties getNotifierProperties() { Properties notifierProps= super.getNotifierProperties(); for(Property property:Property.values()){ notifierProps.put(property.getName(), property.getDefaultValue()); } return notifierProps; } //~ Enums **************************************************************************************************************************************** /** * Sets the severity of the message. * * @author Fiaz Hossain (fiaz.hossain@salesforce.com) */ public enum Severity { OK, WARN, ERROR } /** * Enumerates implementation specific configuration properties. * * @author Tom Valine (tvaline@salesforce.com) */ public enum Property { /** The GOC endpoint. */ GOC_ENDPOINT("notifier.property.goc.endpoint", "https://test.com"), /** The GOC user with which to authenticate. */ GOC_USER("notifier.property.goc.username", "test_user"), /** The GOC password with which to authenticate. */ GOC_PWD("notifier.property.goc.password", "test_password"), /** The GOC proxy host. */ GOC_PROXY_HOST("notifier.property.goc.proxy.host", ""), /** The GOC port. */ GOC_PROXY_PORT("notifier.property.goc.proxy.port", ""), /** The GOC client ID. */ GOC_CLIENT_ID("notifier.property.goc.client.id", "default_client_id"), /** The GOC client secret. */ GOC_CLIENT_SECRET("notifier.property.goc.client.secret", "default_pass"), /** The alert URL template to be included with GOC notifications. */ EMAIL_ALERT_URL_TEMPLATE("notifier.property.goc.alerturl.template", "http://localhost:8080/argus/alertId"), /** The metric URL template to be included with GOC notifications. */ EMAIL_METRIC_URL_TEMPLATE("notifier.property.goc.metricurl.template", "http://localhost:8080/argus/metrics"); private final String _name; private final String _defaultValue; private Property(String name, String defaultValue) { _name = name; _defaultValue = defaultValue; } /** * Returns the property name. * * @return The property name. */ public String getName() { return _name; } /** * Returns the default property value. * * @return The default property value. */ public String getDefaultValue() { return _defaultValue; } } //~ Inner classes ******************************************************************************************************************************** /** * GOCData object to generate JSON. * * @author Fiaz Hossain (fiaz.hossain@salesforce.com) */ public class GOCData { //~ Static fields/initializers ******************************************************************************************************************* private static final String SM_ACTIVE__C_FIELD = "SM_Active__c"; /** * The name of the GOC alert ID field. * @todo Move this to DefaultGOCService. */ public static final String SM_ALERT_ID__C_FIELD = "SM_Alert_Id__c"; private static final String SM_CLASSNAME__C_FIELD = "SM_ClassName__c"; private static final String SM_CLEAREDAT__C_FIELD = "SM_ClearedAt__c"; private static final String SM_CREATEDAT__C_FIELD = "SM_CreatedAt__c"; private static final String SM_ELEMENTNAME__C_FIELD = "SM_ElementName__c"; private static final String SM_EVENTNAME__C_FIELD = "SM_EventName__c"; private static final String SM_EVENTTEXT__C_FIELD = "SM_EventText__c"; private static final String SM_LASTNOTIFIEDAT__C_FIELD = "SM_LastNotifiedAt__c"; private static final String SM_SEVERITY__C_FIELD = "SM_Severity__c"; private static final String SM_SOURCEDOMAIN__C_FIELD = "SM_SourceDomain__c"; private static final String SR_ACTIONABLE__C_FIELD = "SR_Actionable__c"; //~ Instance fields ****************************************************************************************************************************** private final boolean smActivec; // true when alert is active, false when alert is cleared private final String smAlertIdc; // Text(200) (External ID) --> sm_ElementName__c + ALERT_ID_SEPARATOR + sm_EventName__c private final String smClassNamec; // Text(50) private final long smClearedAtc; // Date/Time --> timestamp when the alert cleared, null while alert is still active private final long smCreatedAtc; // Date/Time --> timestamp when the alert last became active private final String smElementNamec; // Text(100) --> hostname private final String smEventNamec; // Text(50) private final String smEventTextc; // Long Text Area(32768) private final long smLastNotifiedAtc; // Date/Time --> timestamp private final int smSeverityc; // Number(1, 0) (External ID) --> 0 through 5 private final String smSourceDomainc; private final boolean srActionablec; // Checkbox --> true if SR needs to respond to this alert //~ Constructors ********************************************************************************************************************************* private GOCData(final boolean smActivec, final String smAlertIdc, final String smClassNamec, final long smClearedAtc, final long smCreatedAtc, final String smElementNamec, final String smEventNamec, final String smEventTextc, final long smLastNotifiedAtc, final int smSeverityc, final String smSourceDomainc, final boolean srActionablec) { this.smActivec = smActivec; this.smAlertIdc = smAlertIdc; this.smClassNamec = smClassNamec; this.smClearedAtc = smClearedAtc; this.smCreatedAtc = smCreatedAtc; this.smElementNamec = smElementNamec; this.smEventNamec = smEventNamec; this.smEventTextc = smEventTextc; this.smLastNotifiedAtc = smLastNotifiedAtc; this.smSeverityc = smSeverityc; this.smSourceDomainc = smSourceDomainc; this.srActionablec = srActionablec; } //~ Methods ************************************************************************************************************************************** /** * Returns the GOC alert ID field name. * * @return The GOC alert ID field name. */ public String getsm_Alert_Id__c() { return smAlertIdc; } /** * Convert data to a JSON string. * * @return JSON string */ public String toJSON() { JsonObject gocData = new JsonObject(); gocData.addProperty(SM_ACTIVE__C_FIELD, smActivec); /** * SM_ALERT_ID__C_FIELD will be in the URI and should not in sObject data */ gocData.addProperty(SM_CLASSNAME__C_FIELD, smClassNamec); if (smClearedAtc > 0) { gocData.addProperty(SM_CLEAREDAT__C_FIELD, smClearedAtc); } if (smCreatedAtc > 0) { gocData.addProperty(SM_CREATEDAT__C_FIELD, smCreatedAtc); } gocData.addProperty(SM_ELEMENTNAME__C_FIELD, smElementNamec); gocData.addProperty(SM_EVENTNAME__C_FIELD, smEventNamec); gocData.addProperty(SM_EVENTTEXT__C_FIELD, smEventTextc); gocData.addProperty(SM_LASTNOTIFIEDAT__C_FIELD, smLastNotifiedAtc); gocData.addProperty(SM_SEVERITY__C_FIELD, smSeverityc); gocData.addProperty(SM_SOURCEDOMAIN__C_FIELD, smSourceDomainc); //gocData.addProperty(SR_ACTIONABLE__C_FIELD, srActionablec); return gocData.toString(); } //~ Inner Classes ******************************************************************************************************************************** } /** * Utility builder. * * @author Fiaz Hossain (fiaz.hossain@salesforce.com) */ public class GOCDataBuilder { private static final String SM_SOURCE_DOMAIN__C = "Argus"; private static final String ALERT_ID_SEPARATOR = "."; private boolean smActivec; // true when alert is active, false when alert is cleared private String smClassNamec; // Text(50) private long smClearedAtc; // Date/Time --> timestamp when the alert cleared, null while alert is still active private long smCreatedAtc; // Date/Time --> timestamp when the alert last became active private String smElementNamec; // Text(100) --> hostname private String smEventNamec; // Text(50) private String smEventTextc; // Long Text Area(32768) private long smLastNotifiedAtc; // Date/Time --> timestamp private int smSeverityc = 5; // Number(1, 0) (External ID) --> 0 through 5 private boolean srActionablec = false; /** Creates a new GOCDataBuilder object. */ public GOCDataBuilder() { } /** * Specifies the active status. * * @param smActivec True if active. * * @return The updated builder object. */ public GOCDataBuilder withActive(final boolean smActivec) { this.smActivec = smActivec; return this; } /** * Indicates the class name. * * @param smClassNamec The class name. * * @return The updated builder object. */ public GOCDataBuilder withClassName(final String smClassNamec) { this.smClassNamec = smClassNamec; return this; } /** * Specifies the cleared at time. * * @param smClearedAtc The cleared time. * * @return The updated builder object. */ public GOCDataBuilder withClearedAt(final long smClearedAtc) { this.smClearedAtc = smClearedAtc; return this; } /** * Specifies the created at time. * * @param smCreatedAtc The created at time. * * @return The updated builder object. */ public GOCDataBuilder withCreatedAt(final long smCreatedAtc) { this.smCreatedAtc = smCreatedAtc; return this; } /** * Specifies the element name. * * @param smElementNamec The element name. * * @return The updated builder object. */ public GOCDataBuilder withElementName(final String smElementNamec) { this.smElementNamec = smElementNamec; return this; } /** * Specifies the event name. * * @param smEventNamec The event name. * * @return The updated builder object. */ public GOCDataBuilder withEventName(final String smEventNamec) { this.smEventNamec = smEventNamec; return this; } /** * Specifies the event text. * * @param smEventTextc The event text. * * @return The updated builder object. */ public GOCDataBuilder withEventText(final String smEventTextc) { this.smEventTextc = smEventTextc; return this; } /** * Specifies the last notified date. * * @param smLastNotifiedAtc The last notified date. * * @return The updated builder object. */ public GOCDataBuilder withLastNotifiedAt(final long smLastNotifiedAtc) { this.smLastNotifiedAtc = smLastNotifiedAtc; return this; } /** * Specifies the severity. * * @param smSeverityc The severity. * * @return The updated builder object. */ public GOCDataBuilder withSeverity(final int smSeverityc) { this.smSeverityc = smSeverityc; return this; } /** * Specifies whether the alert is actionable. * * @param sRActionablec True if actionable. * * @return The updated builder object. */ public GOCDataBuilder withSRActionable(final boolean sRActionablec) { this.srActionablec = sRActionablec; return this; } /** * Create the GOCData object, use defaults where needed. * * @return GOCData created based on builder data */ public GOCData build() { return new GOCData(smActivec, smElementNamec + ALERT_ID_SEPARATOR + smEventNamec, smClassNamec, smClearedAtc, smCreatedAtc, smElementNamec, smEventNamec, smEventTextc, smLastNotifiedAtc, smSeverityc, SM_SOURCE_DOMAIN__C, srActionablec); } } /** * Manage GOC connections, oAuth and timeouts. * * @author Fiaz Hossain (fiaz.hossain@salesforce.com) */ public class GOCTransport { //~ Static fields/initializers ******************************************************************************************************************* private static final String UTF_8 = "UTF-8"; private static final String NO_TOKEN = "NO_TOKEN"; private static final long MIN_SESSION_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; // Wait at least 5 minutes between refresh attemps private static final int CONNECTION_TIMEOUT_MILLIS = 10000; private static final int READ_TIMEOUT_MILLIS = 10000; private volatile EndpointInfo theEndpointInfo = null; private volatile long lastRefresh = 0; private final MultiThreadedHttpConnectionManager theConnectionManager; { theConnectionManager = new MultiThreadedHttpConnectionManager(); HttpConnectionManagerParams params = theConnectionManager.getParams(); params.setConnectionTimeout(CONNECTION_TIMEOUT_MILLIS); params.setSoTimeout(READ_TIMEOUT_MILLIS); } //~ Methods ************************************************************************************************************************************** /** * Get authenticated endpoint and token. * * @param config The system configuration. Cannot be null. * @param logger The logger. Cannot be null. * @param refresh - If true get a new token even if one exists. * * @return EndpointInfo - with valid endpoint and token. The token can be a dummy or expired. */ public EndpointInfo getEndpointInfo(SystemConfiguration config, Logger logger, boolean refresh) { if (theEndpointInfo == null || refresh) { updateEndpoint(config, logger, lastRefresh); } return theEndpointInfo; } /** * Get HttpClient with proper proxy and timeout settings. * * @param config The system configuration. Cannot be null. * * @return HttpClient */ public HttpClient getHttpClient(SystemConfiguration config) { HttpClient httpclient = new HttpClient(theConnectionManager); httpclient.getParams().setParameter("http.connection-manager.timeout", 2000L); // Wait for 2 seconds to get a connection from pool String host = config.getValue(Property.GOC_PROXY_HOST.getName(), Property.GOC_PROXY_HOST.getDefaultValue()); if (host != null && host.length() > 0) { httpclient.getHostConfiguration().setProxy(host, Integer.parseInt(config.getValue(Property.GOC_PROXY_PORT.getName(), Property.GOC_PROXY_PORT.getDefaultValue()))); } return httpclient; } /** * Update the global 'theEndpointInfo' state with a valid endpointInfo if login is successful or a dummy value if not successful. * * @param config The system configuration. Cannot be null. * @param logger The logger. Cannot be null. * @param previousRefresh The last refresh time. */ private synchronized void updateEndpoint(SystemConfiguration config, Logger logger, long previousRefresh) { long diff = System.currentTimeMillis() - previousRefresh; if (diff > MIN_SESSION_REFRESH_THRESHOLD_MILLIS) { lastRefresh = System.currentTimeMillis(); PostMethod post = new PostMethod(config.getValue(Property.GOC_ENDPOINT.getName(), Property.GOC_ENDPOINT.getDefaultValue()) + "/services/oauth2/token"); try { post.addParameter("grant_type", "password"); post.addParameter("client_id", URLEncoder.encode(config.getValue(Property.GOC_CLIENT_ID.getName(), Property.GOC_CLIENT_ID.getDefaultValue()), UTF_8)); post.addParameter("client_secret", URLEncoder.encode(config.getValue(Property.GOC_CLIENT_SECRET.getName(), Property.GOC_CLIENT_SECRET.getDefaultValue()), UTF_8)); post.addParameter("username", config.getValue(Property.GOC_USER.getName(), Property.GOC_USER.getDefaultValue())); post.addParameter("password", config.getValue(Property.GOC_PWD.getName(), Property.GOC_PWD.getDefaultValue())); HttpClient httpclient = getHttpClient(config); int respCode = httpclient.executeMethod(post); // Check for success if (respCode == 200) { JsonObject authResponse = new Gson().fromJson(post.getResponseBodyAsString(), JsonObject.class); String endpoint = authResponse.get("instance_url").getAsString(); String token = authResponse.get("access_token").getAsString(); logger.info("Success - getting access_token for endpoint '{}'", endpoint); logger.debug("access_token '{}'", token); theEndpointInfo = new EndpointInfo(endpoint, token); } else { logger.error("Failure - getting oauth2 token, check username/password: '{}'", post.getResponseBodyAsString()); } } catch (Exception e) { logger.error("Failure - exception getting access_token '{}'", e); } finally { if (theEndpointInfo == null) { theEndpointInfo = new EndpointInfo(config.getValue(Property.GOC_ENDPOINT.getName(), Property.GOC_ENDPOINT.getDefaultValue()), NO_TOKEN); } post.releaseConnection(); } } } //~ Inner Classes ******************************************************************************************************************************** } /** * Utility class for endpoint information. * * @author fiaz.hossain */ public class EndpointInfo { private final String endPoint; private final String token; private EndpointInfo(final String endPoint, final String token) { this.endPoint = endPoint; this.token = token; } /** * Valid endpoint. Either from config or endpont after authentication * * @return endpoint */ public String getEndPoint() { return endPoint; } /** * Token can be either active, expired or a dummy value. * * @return token */ public String getToken() { return token; } } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.mock; import com.google.common.base.Functions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.analysis.ConfigurationCollectionFactory; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.config.ConfigurationFactory; import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory; import com.google.devtools.build.lib.analysis.util.AnalysisMock; import com.google.devtools.build.lib.bazel.rules.BazelConfiguration; import com.google.devtools.build.lib.bazel.rules.BazelConfigurationCollection; import com.google.devtools.build.lib.bazel.rules.python.BazelPythonConfiguration; import com.google.devtools.build.lib.packages.util.BazelMockCcSupport; import com.google.devtools.build.lib.packages.util.MockCcSupport; import com.google.devtools.build.lib.packages.util.MockToolsConfig; import com.google.devtools.build.lib.rules.android.AndroidConfiguration; import com.google.devtools.build.lib.rules.apple.AppleConfiguration; import com.google.devtools.build.lib.rules.cpp.CppConfigurationLoader; import com.google.devtools.build.lib.rules.cpp.FdoSupportFunction; import com.google.devtools.build.lib.rules.cpp.FdoSupportValue; import com.google.devtools.build.lib.rules.java.JavaConfigurationLoader; import com.google.devtools.build.lib.rules.java.JvmConfigurationLoader; import com.google.devtools.build.lib.rules.objc.J2ObjcConfiguration; import com.google.devtools.build.lib.rules.objc.ObjcConfigurationLoader; import com.google.devtools.build.lib.rules.proto.ProtoConfiguration; import com.google.devtools.build.lib.rules.python.PythonConfigurationLoader; import com.google.devtools.build.lib.testutil.BuildRuleBuilder; import com.google.devtools.build.lib.testutil.BuildRuleWithDefaultsBuilder; import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionName; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; public final class BazelAnalysisMock extends AnalysisMock { public static final AnalysisMock INSTANCE = new BazelAnalysisMock(); private BazelAnalysisMock() { } @Override public void setupMockClient(MockToolsConfig config) throws IOException { String bazelToolWorkspace = config.getPath("/bazel_tools_workspace").getPathString(); ArrayList<String> workspaceContents = new ArrayList<>( ImmutableList.of( "local_repository(name = 'bazel_tools', path = '" + bazelToolWorkspace + "')", "local_repository(name = 'local_config_xcode', path = '/local_config_xcode')", "bind(", " name = 'objc_proto_lib',", " actual = '//objcproto:ProtocolBuffers_lib',", ")", "bind(", " name = 'objc_protobuf_lib',", " actual = '//objcproto:protobuf_lib',", ")", "bind(name = 'android/sdk', actual='@bazel_tools//tools/android:sdk')", "bind(name = 'tools/python', actual='//tools/python')")); config.create( "/local_config_xcode/BUILD", "xcode_config(name = 'host_xcodes')"); config.overwrite("WORKSPACE", workspaceContents.toArray(new String[workspaceContents.size()])); config.create("/bazel_tools_workspace/WORKSPACE", "workspace(name = 'bazel_tools')"); config.create( "/bazel_tools_workspace/tools/jdk/BUILD", "package(default_visibility=['//visibility:public'])", "java_toolchain(", " name = 'toolchain',", " encoding = 'UTF-8',", " source_version = '8',", " target_version = '8',", " bootclasspath = [':bootclasspath'],", " extclasspath = [':extclasspath'],", " javac = [':langtools'],", " javabuilder = ['JavaBuilder_deploy.jar'],", " header_compiler = ['turbine_deploy.jar'],", " singlejar = ['SingleJar_deploy.jar'],", " genclass = ['GenClass_deploy.jar'],", " ijar = ['ijar'],", ")", "filegroup(name = 'jdk-null')", "filegroup(name = 'jdk-default', srcs = [':java'], path = 'jdk/jre')", "filegroup(name = 'jdk', srcs = [':jdk-default', ':jdk-null'])", "filegroup(name='langtools', srcs=['jdk/lib/tools.jar'])", "filegroup(name='bootclasspath', srcs=['jdk/jre/lib/rt.jar'])", "filegroup(name='extdir', srcs=glob(['jdk/jre/lib/ext/*']))", // "dummy" is needed so that RedirectChaser stops here "filegroup(name='java', srcs = ['jdk/jre/bin/java', 'dummy'])", "exports_files(['JavaBuilder_deploy.jar','SingleJar_deploy.jar','TestRunner_deploy.jar',", " 'JavaBuilderCanary_deploy.jar', 'ijar', 'GenClass_deploy.jar',", " 'turbine_deploy.jar'])"); ImmutableList<String> androidBuildContents = createAndroidBuildContents(); config.create( "/bazel_tools_workspace/tools/android/BUILD", androidBuildContents.toArray(new String[androidBuildContents.size()])); config.create( "/bazel_tools_workspace/tools/android/android_sdk_repository_template.bzl", "def create_android_sdk_rules(" + "name, build_tools_version, build_tools_directory, api_level):", " pass"); config.create( "/bazel_tools_workspace/tools/genrule/BUILD", "exports_files(['genrule-setup.sh'])"); config.create( "/bazel_tools_workspace/third_party/java/jarjar/BUILD", "package(default_visibility=['//visibility:public'])", "licenses(['notice'])", "java_binary(name = 'jarjar_bin',", " runtime_deps = [ ':jarjar_import' ],", " main_class = 'com.tonicsystems.jarjar.Main')", "java_import(name = 'jarjar_import',", " jars = [ 'jarjar.jar' ])"); config.create("/bazel_tools_workspace/tools/test/BUILD", "filegroup(name = 'runtime')", "filegroup(name = 'coverage_support')", "filegroup(name = 'coverage_report_generator', srcs = ['coverage_report_generator.sh'])"); config.create( "/bazel_tools_workspace/tools/python/BUILD", "package(default_visibility=['//visibility:public'])", "exports_files(['precompile.py'])", "sh_binary(name='2to3', srcs=['2to3.sh'])"); config.create( "/bazel_tools_workspace/tools/zip/BUILD", "package(default_visibility=['//visibility:public'])", "exports_files(['precompile.py'])", "cc_binary(name='zipper', srcs=['zip_main.cc'])"); ccSupport().setup(config); } private ImmutableList<String> createAndroidBuildContents() { ImmutableList.Builder<String> androidBuildContents = ImmutableList.builder(); BuildRuleWithDefaultsBuilder ruleBuilder = new BuildRuleWithDefaultsBuilder("android_sdk", "sdk") .popuplateAttributes("", false); androidBuildContents.add(ruleBuilder.build()); for (BuildRuleBuilder generatedRuleBuilder : ruleBuilder.getRulesToGenerate()) { androidBuildContents.add(generatedRuleBuilder.build()); } androidBuildContents .add("sh_binary(name = 'aar_generator', srcs = ['empty.sh'])") .add("sh_binary(name = 'desugar_java8', srcs = ['empty.sh'])") .add("filegroup(name = 'desugar_java8_extra_bootclasspath', srcs = ['fake.jar'])") .add("sh_binary(name = 'aar_native_libs_zip_creator', srcs = ['empty.sh'])") .add("sh_binary(name = 'dexbuilder', srcs = ['empty.sh'])") .add("sh_binary(name = 'dexmerger', srcs = ['empty.sh'])") .add("sh_binary(name = 'manifest_merger', srcs = ['empty.sh'])") .add("sh_binary(name = 'rclass_generator', srcs = ['empty.sh'])") .add("sh_binary(name = 'resources_processor', srcs = ['empty.sh'])") .add("sh_binary(name = 'resource_merger', srcs = ['empty.sh'])") .add("sh_binary(name = 'resource_parser', srcs = ['empty.sh'])") .add("sh_binary(name = 'resource_shrinker', srcs = ['empty.sh'])") .add("sh_binary(name = 'resource_validator', srcs = ['empty.sh'])") .add("sh_binary(name = 'rex_wrapper', srcs = ['empty.sh'])") .add("android_library(name = 'incremental_stub_application')") .add("android_library(name = 'incremental_split_stub_application')") .add("sh_binary(name = 'stubify_manifest', srcs = ['empty.sh'])") .add("sh_binary(name = 'merge_dexzips', srcs = ['empty.sh'])") .add("sh_binary(name = 'merge_manifests', srcs = ['empty.sh'])") .add("sh_binary(name = 'build_split_manifest', srcs = ['empty.sh'])") .add("filegroup(name = 'debug_keystore', srcs = ['fake.file'])") .add("sh_binary(name = 'shuffle_jars', srcs = ['empty.sh'])") .add("sh_binary(name = 'strip_resources', srcs = ['empty.sh'])") .add("sh_binary(name = 'build_incremental_dexmanifest', srcs = ['empty.sh'])") .add("sh_binary(name = 'incremental_install', srcs = ['empty.sh'])") .add("java_binary(name = 'JarFilter',") .add(" runtime_deps = [ ':JarFilter_import'],") .add(" main_class = 'com.google.devtools.build.android.ideinfo.JarFilter')") .add("java_import(name = 'JarFilter_import',") .add(" jars = [ 'jar_filter_deploy.jar' ])") .add("java_binary(name = 'PackageParser',") .add(" runtime_deps = [ ':PackageParser_import'],") .add(" main_class = 'com.google.devtools.build.android.ideinfo.PackageParser')") .add("java_import(name = 'PackageParser_import',") .add(" jars = [ 'package_parser_deploy.jar' ])") .add("java_binary(name = 'IdlClass',") .add(" runtime_deps = [ ':idlclass_import' ],") .add(" main_class = 'com.google.devtools.build.android.idlclass.IdlClass')") .add("sh_binary(name = 'zip_manifest_creator', srcs = ['empty.sh'])") .add("sh_binary(name = 'aar_embedded_jars_extractor', srcs = ['empty.sh'])") .add("java_import(name = 'idlclass_import',") .add(" jars = [ 'idlclass.jar' ])"); return androidBuildContents.build(); } @Override public void setupMockWorkspaceFiles(Path embeddedBinariesRoot) throws IOException { Path jdkWorkspacePath = embeddedBinariesRoot.getRelative("jdk.WORKSPACE"); FileSystemUtils.createDirectoryAndParents(jdkWorkspacePath.getParentDirectory()); FileSystemUtils.writeContentAsLatin1(jdkWorkspacePath, ""); } @Override public ConfigurationFactory createConfigurationFactory() { return createConfigurationFactory(getDefaultConfigurationFactories()); } @Override public ConfigurationFactory createConfigurationFactory( List<ConfigurationFragmentFactory> configurationFragmentFactories) { return new ConfigurationFactory( new BazelConfigurationCollection(), configurationFragmentFactories); } private static List<ConfigurationFragmentFactory> getDefaultConfigurationFactories() { return ImmutableList.<ConfigurationFragmentFactory>of( new BazelConfiguration.Loader(), new CppConfigurationLoader(Functions.<String>identity()), new PythonConfigurationLoader(), new BazelPythonConfiguration.Loader(), new JvmConfigurationLoader(), new JavaConfigurationLoader(), new ObjcConfigurationLoader(), new AppleConfiguration.Loader(), new J2ObjcConfiguration.Loader(), new ProtoConfiguration.Loader(), new AndroidConfiguration.Loader()); } @Override public ConfigurationCollectionFactory createConfigurationCollectionFactory() { return new BazelConfigurationCollection(); } @Override public ConfiguredRuleClassProvider createRuleClassProvider() { return TestRuleClassProvider.getRuleClassProvider(); } @Override public Collection<String> getOptionOverrides() { return ImmutableList.of(); } @Override public boolean isThisBazel() { return true; } @Override public MockCcSupport ccSupport() { return BazelMockCcSupport.INSTANCE; } @Override public ImmutableMap<SkyFunctionName, SkyFunction> getSkyFunctions() { ImmutableMap.Builder<SkyFunctionName, SkyFunction> skyFunctions = ImmutableMap.builder(); skyFunctions.putAll(super.getSkyFunctions()); skyFunctions.put(FdoSupportValue.SKYFUNCTION, new FdoSupportFunction()); return skyFunctions.build(); } }
/** * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION * * 1. Definitions. * * "License" shall mean the terms and conditions for use, reproduction, * and distribution as defined by Sections 1 through 9 of this document. * * "Licensor" shall mean the copyright owner or entity authorized by * the copyright owner that is granting the License. * * "Legal Entity" shall mean the union of the acting entity and all * other entities that control, are controlled by, or are under common * control with that entity. For the purposes of this definition, * "control" means (i) the power, direct or indirect, to cause the * direction or management of such entity, whether by contract or * otherwise, or (ii) ownership of fifty percent (50%) or more of the * outstanding shares, or (iii) beneficial ownership of such entity. * * "You" (or "Your") shall mean an individual or Legal Entity * exercising permissions granted by this License. * * "Source" form shall mean the preferred form for making modifications, * including but not limited to software source code, documentation * source, and configuration files. * * "Object" form shall mean any form resulting from mechanical * transformation or translation of a Source form, including but * not limited to compiled object code, generated documentation, * and conversions to other media types. * * "Work" shall mean the work of authorship, whether in Source or * Object form, made available under the License, as indicated by a * copyright notice that is included in or attached to the work * (an example is provided in the Appendix below). * * "Derivative Works" shall mean any work, whether in Source or Object * form, that is based on (or derived from) the Work and for which the * editorial revisions, annotations, elaborations, or other modifications * represent, as a whole, an original work of authorship. For the purposes * of this License, Derivative Works shall not include works that remain * separable from, or merely link (or bind by name) to the interfaces of, * the Work and Derivative Works thereof. * * "Contribution" shall mean any work of authorship, including * the original version of the Work and any modifications or additions * to that Work or Derivative Works thereof, that is intentionally * submitted to Licensor for inclusion in the Work by the copyright owner * or by an individual or Legal Entity authorized to submit on behalf of * the copyright owner. For the purposes of this definition, "submitted" * means any form of electronic, verbal, or written communication sent * to the Licensor or its representatives, including but not limited to * communication on electronic mailing lists, source code control systems, * and issue tracking systems that are managed by, or on behalf of, the * Licensor for the purpose of discussing and improving the Work, but * excluding communication that is conspicuously marked or otherwise * designated in writing by the copyright owner as "Not a Contribution." * * "Contributor" shall mean Licensor and any individual or Legal Entity * on behalf of whom a Contribution has been received by Licensor and * subsequently incorporated within the Work. * * 2. Grant of Copyright License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * copyright license to reproduce, prepare Derivative Works of, * publicly display, publicly perform, sublicense, and distribute the * Work and such Derivative Works in Source or Object form. * * 3. Grant of Patent License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * (except as stated in this section) patent license to make, have made, * use, offer to sell, sell, import, and otherwise transfer the Work, * where such license applies only to those patent claims licensable * by such Contributor that are necessarily infringed by their * Contribution(s) alone or by combination of their Contribution(s) * with the Work to which such Contribution(s) was submitted. If You * institute patent litigation against any entity (including a * cross-claim or counterclaim in a lawsuit) alleging that the Work * or a Contribution incorporated within the Work constitutes direct * or contributory patent infringement, then any patent licenses * granted to You under this License for that Work shall terminate * as of the date such litigation is filed. * * 4. Redistribution. You may reproduce and distribute copies of the * Work or Derivative Works thereof in any medium, with or without * modifications, and in Source or Object form, provided that You * meet the following conditions: * * (a) You must give any other recipients of the Work or * Derivative Works a copy of this License; and * * (b) You must cause any modified files to carry prominent notices * stating that You changed the files; and * * (c) You must retain, in the Source form of any Derivative Works * that You distribute, all copyright, patent, trademark, and * attribution notices from the Source form of the Work, * excluding those notices that do not pertain to any part of * the Derivative Works; and * * (d) If the Work includes a "NOTICE" text file as part of its * distribution, then any Derivative Works that You distribute must * include a readable copy of the attribution notices contained * within such NOTICE file, excluding those notices that do not * pertain to any part of the Derivative Works, in at least one * of the following places: within a NOTICE text file distributed * as part of the Derivative Works; within the Source form or * documentation, if provided along with the Derivative Works; or, * within a display generated by the Derivative Works, if and * wherever such third-party notices normally appear. The contents * of the NOTICE file are for informational purposes only and * do not modify the License. You may add Your own attribution * notices within Derivative Works that You distribute, alongside * or as an addendum to the NOTICE text from the Work, provided * that such additional attribution notices cannot be construed * as modifying the License. * * You may add Your own copyright statement to Your modifications and * may provide additional or different license terms and conditions * for use, reproduction, or distribution of Your modifications, or * for any such Derivative Works as a whole, provided Your use, * reproduction, and distribution of the Work otherwise complies with * the conditions stated in this License. * * 5. Submission of Contributions. Unless You explicitly state otherwise, * any Contribution intentionally submitted for inclusion in the Work * by You to the Licensor shall be under the terms and conditions of * this License, without any additional terms or conditions. * Notwithstanding the above, nothing herein shall supersede or modify * the terms of any separate license agreement you may have executed * with Licensor regarding such Contributions. * * 6. Trademarks. This License does not grant permission to use the trade * names, trademarks, service marks, or product names of the Licensor, * except as required for reasonable and customary use in describing the * origin of the Work and reproducing the content of the NOTICE file. * * 7. Disclaimer of Warranty. Unless required by applicable law or * agreed to in writing, Licensor provides the Work (and each * Contributor provides its Contributions) on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied, including, without limitation, any warranties or conditions * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A * PARTICULAR PURPOSE. You are solely responsible for determining the * appropriateness of using or redistributing the Work and assume any * risks associated with Your exercise of permissions under this License. * * 8. Limitation of Liability. In no event and under no legal theory, * whether in tort (including negligence), contract, or otherwise, * unless required by applicable law (such as deliberate and grossly * negligent acts) or agreed to in writing, shall any Contributor be * liable to You for damages, including any direct, indirect, special, * incidental, or consequential damages of any character arising as a * result of this License or out of the use or inability to use the * Work (including but not limited to damages for loss of goodwill, * work stoppage, computer failure or malfunction, or any and all * other commercial damages or losses), even if such Contributor * has been advised of the possibility of such damages. * * 9. Accepting Warranty or Additional Liability. While redistributing * the Work or Derivative Works thereof, You may choose to offer, * and charge a fee for, acceptance of support, warranty, indemnity, * or other liability obligations and/or rights consistent with this * License. However, in accepting such obligations, You may act only * on Your own behalf and on Your sole responsibility, not on behalf * of any other Contributor, and only if You agree to indemnify, * defend, and hold each Contributor harmless for any liability * incurred by, or claims asserted against, such Contributor by reason * of your accepting any such warranty or additional liability. * * END OF TERMS AND CONDITIONS * * APPENDIX: How to apply the Apache License to your work. * * To apply the Apache License to your work, attach the following * boilerplate notice, with the fields enclosed by brackets "{}" * replaced with your own identifying information. (Don't include * the brackets!) The text should be enclosed in the appropriate * comment syntax for the file format. We also recommend that a * file or class name and description of purpose be included on the * same "printed page" as the copyright notice for easier * identification within third-party archives. * * Copyright {yyyy} {name of copyright owner} * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deleidos.rtws.webapp.gatewayapi.servlet.enunciate; import java.util.Iterator; import java.util.StringTokenizer; import javax.ws.rs.Path; import messages.usage.SystemEventMessage; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.log4j.Logger; import com.deleidos.rtws.apisecurity.auth.ApiAuthUtils; import com.deleidos.rtws.commons.cloud.beans.State; import com.deleidos.rtws.commons.cloud.exception.ServiceException; import com.deleidos.rtws.commons.config.RtwsConfig; import com.deleidos.rtws.commons.config.UserDataProperties; import com.deleidos.rtws.commons.exception.InvalidParameterException; import com.deleidos.rtws.commons.jersey.config.JerseyClientConfig; import com.deleidos.rtws.commons.model.response.ErrorResponse; import com.deleidos.rtws.commons.model.response.PropertiesResponse; import com.deleidos.rtws.commons.model.response.StandardResponse; import com.deleidos.rtws.commons.model.tmsdb.AccountSystem; import com.deleidos.rtws.commons.model.tmsdb.SystemSubnetMapping; import com.deleidos.rtws.webapp.gatewayapi.cache.ClusterCache; import com.deleidos.rtws.webapp.gatewayapi.client.BroadcastCommandClient; import com.deleidos.rtws.webapp.gatewayapi.client.RestClientException; import com.deleidos.rtws.webapp.gatewayapi.client.RestClientManager; import com.deleidos.rtws.webapp.gatewayapi.common.UserDataBuilder; import com.deleidos.rtws.webapp.gatewayapi.dao.AccountSystemDao; import com.deleidos.rtws.webapp.gatewayapi.dao.BusinessMetricsDao; import com.deleidos.rtws.webapp.gatewayapi.dao.EmbeddedH2DataAccessUtility; import com.deleidos.rtws.webapp.gatewayapi.dao.SystemSubnetMappingDao; import com.deleidos.rtws.webapp.gatewayapi.exception.SystemServiceClientException; import com.deleidos.rtws.webapp.gatewayapi.exception.SystemServiceException; import com.deleidos.rtws.webapp.gatewayapi.model.Cluster; import com.deleidos.rtws.webapp.gatewayapi.model.ClusterState; import com.deleidos.rtws.webapp.gatewayapi.model.ClusterStatus; import com.deleidos.rtws.webapp.gatewayapi.model.Node; import com.deleidos.rtws.webapp.gatewayapi.util.EndpointUtil; import com.deleidos.rtws.webapp.gatewayapi.util.MasterUtil; import com.deleidos.rtws.webapp.gatewayapi.util.RetrieveHashUtil; import com.deleidos.rtws.webapp.gatewayapi.util.UserDataUtil; import com.deleidos.rtws.webapp.gatewayapi.vpc.SubnetManager; import com.deleidos.rtws.webapp.gatewayapi.worker.action.MonitorAction; import com.deleidos.rtws.webapp.gatewayapi.worker.queue.QueueManager; import com.deleidos.rtws.webapp.gatewayapi.worker.request.AbstractSystemRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.CreateNatRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.CreateSystemRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.DeleteNatRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.DeleteSystemRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.StartSystemRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.StopSystemRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.SyncRequest; import com.deleidos.rtws.webapp.gatewayapi.worker.request.UpdateSystemConfigRequest; import com.sun.jersey.api.client.Client; /** * Implementation of System Service. */ @Path("/system") public class SystemServiceImpl implements SystemService { private static final String MANIFEST_DIR = "/mnt/appfs/manifest/"; private static final String DNS_ADDRESS = "127.0.0.1"; private Logger logger = Logger.getLogger(SystemServiceImpl.class); @Override public Cluster getSystem(String domain) throws ServiceException, RestClientException, SystemServiceClientException, InvalidParameterException { ApiAuthUtils.assertTmsOrTenantFromUserData(); return ClusterCache.getInstance().getCluster(domain); } @Override public StandardResponse<?> broadcast(String domain, String jsonDef) { try { AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); String swVersion = system.getSwVersionId(); if (!swVersion.startsWith("rtws-nightly") && swVersion.compareTo("1.1.3") < 0) { throw new SystemServiceException( "Broadcast command functionality is supported on system built with software version 1.1.3+."); } Cluster cluster = ClusterCache.getInstance().getCluster(domain); if (cluster.getStatus().getState() != ClusterState.OK && cluster.getStatus().getState() != ClusterState.Warning) { throw new SystemServiceException(String.format( "Failed to broadcast command because system %s is in an invalid state.", domain)); } String result = BroadcastCommandClient.getInstance().send(domain, jsonDef); JSONObject jsonResponse = JSONObject.fromObject(result); String status = null; if (jsonResponse.has("status")) { status = jsonResponse.getString("status"); } if (status == null || status.equals("Error")) { throw new SystemServiceException(String.format( "Error occurred while broadcasting command to system %s.", domain)); } String token = null; if (jsonResponse.has("message")) { String message = jsonResponse.getString("message"); StringTokenizer st = new StringTokenizer(message); if (st.countTokens() == 0) { throw new SystemServiceException(String.format( "No return token found after sending broadcast command to system %s.", domain)); } else if (st.countTokens() == 1) { token = message.trim(); } else { throw new SystemServiceException(message); } } PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); response.setProperty("Token", token); return response; } catch (Exception ex) { logger.error("broadcast - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> broadcastStatus(String domain, String jsonDef) { try { AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); String swVersion = system.getSwVersionId(); if (!swVersion.startsWith("rtws-nightly") && swVersion.compareTo("1.1.3") < 0) { throw new SystemServiceException( "Broadcast command functionality is supported on system built with software version 1.1.3+."); } String result = BroadcastCommandClient.getInstance().send(domain, jsonDef); JSONObject jsonResponse = JSONObject.fromObject(result); String status = null; if (jsonResponse.has("status")) { status = jsonResponse.getString("status"); } PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", status); return response; } catch (Exception ex) { logger.error("broadcastStatus - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> create(String accountId, String domain, String cfgVersion, String swVersion, String accountEmail, String imageId, String keypairName, String instanceType, String securityGroup, String mountDevice, String iaasServiceName, String aZone, String subnetId) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); Node node = RestClientManager.getSystemServiceClient().getNodeWithoutVolumes( system.getMasterInstanceId()); if (node == null || node.getState() == State.Terminated) { String manifestFile = MANIFEST_DIR + domain + "/master.ini"; String storageEndpoint = EndpointUtil.getStorageEndpoint(); String gatewayIpAddress = subnetId != null ? RestClientManager .getServiceInterface().self().getPublicIpAddress() : RestClientManager .getServiceInterface().self().getPrivateIpAddress(); String bucketName = UserDataBuilder.createBucketName(Integer.parseInt(accountId)); String masterSetupKey = RetrieveHashUtil.getHash(String.format("%s%s%s", domain, accountEmail, imageId)); UserDataBuilder masterUserData = new UserDataBuilder(); masterUserData.setAccessKey(UserDataUtil.getAccessKey()) .setSecretKey(UserDataUtil.getSecretKey()).setMountDevice(mountDevice) .setMountMode("s3cmd").setManifestFile(manifestFile) .setSwVersion(swVersion).setDnsAddress(DNS_ADDRESS) .setTenantId(UserDataUtil.getTenantId()).setDomain(domain) .setDomainSuffix(UserDataUtil.getDomainSuffix()).setBucketName(bucketName) .setStorageEndpoint(storageEndpoint).setGatewayIpAddress(gatewayIpAddress) .setSetupKey(masterSetupKey); CreateSystemRequest cRequest = new CreateSystemRequest(); cRequest.setImageId(imageId); cRequest.setInstanceType(instanceType); cRequest.setAzone(aZone); cRequest.setSecurity(securityGroup); cRequest.setKeyPair(keypairName); cRequest.setCfgVersion(cfgVersion); cRequest.setSWVersion(swVersion); cRequest.setDomain(domain); cRequest.setSubnetId(subnetId); cRequest.setIaasServiceName(iaasServiceName); cRequest.setUserData(masterUserData); CreateNatRequest nRequest = null; if (subnetId != null) { nRequest = new CreateNatRequest(); nRequest.setImageId(imageId); nRequest.setDomain(domain); nRequest.setAzone(aZone); nRequest.setKeyPair(keypairName); nRequest.setNextRequest(cRequest); String natSetupKey = RetrieveHashUtil.getHash(String.format("%s%s%s", domain, accountEmail, imageId)); UserDataBuilder natUserData = new UserDataBuilder(); natUserData.setAccessKey(UserDataUtil.getAccessKey()) .setSecretKey(UserDataUtil.getSecretKey()).setMountDevice(mountDevice) .setMountMode("s3cmd") .setManifestFile(MANIFEST_DIR + domain + "/nat.ini") .setSwVersion(swVersion).setDnsAddress(DNS_ADDRESS) .setTenantId(UserDataUtil.getTenantId()).setDomain(domain) .setDomainSuffix(UserDataUtil.getDomainSuffix()) .setBucketName(bucketName).setStorageEndpoint(storageEndpoint) .setGatewayIpAddress(gatewayIpAddress).setSetupKey(natSetupKey) .setIsNat(Boolean.TRUE); nRequest.setUserData(natUserData); } AbstractSystemRequest request = (nRequest != null) ? nRequest : cRequest; if (QueueManager.getSystemRequestQueue().addRequest(request)) { Cluster cluster = ClusterCache.getInstance().getCluster(domain); cluster.setStatus(ClusterStatus.createStartingStatus()); ClusterCache.getInstance().clearErrorReport(domain); String tenantName = UserDataProperties.getInstance().getString( UserDataProperties.RTWS_TENANT_ID); BusinessMetricsDao.addEvent(domain, tenantName, SystemEventMessage.CREATED, System.currentTimeMillis()); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } throw new SystemServiceException("Create system '" + domain + "' is in progress."); } throw new SystemServiceException("System '" + domain + "' already exists."); } catch (Exception ex) { logger.error("create - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> start(String accountId, String domain, String cfgVersion, String swVersion, String keypairName, String aZone, String subnetId) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); Node node = RestClientManager.getSystemServiceClient().getNodeWithoutVolumes( system.getMasterInstanceId()); if (State.Stopped == node.getState()) { StartSystemRequest sRequest = new StartSystemRequest(); sRequest.setDomain(domain); sRequest.setSWVersion(swVersion); sRequest.setCfgVersion(cfgVersion); sRequest.setSubnetId(subnetId); CreateNatRequest nRequest = null; if (subnetId != null) { nRequest = new CreateNatRequest(); nRequest.setDomain(domain); nRequest.setAzone(aZone); nRequest.setKeyPair(keypairName); SystemSubnetMappingDao subnetDao = new SystemSubnetMappingDao( EmbeddedH2DataAccessUtility.getInstance()); SystemSubnetMapping mapping = subnetDao.get(domain); String natSetupKey = RetrieveHashUtil.getHash(String.format("%s%s%s", domain, swVersion, subnetId)); String userData = mapping.getNatUserData(); userData = UserDataBuilder.modifySWVersion(userData, swVersion); userData = UserDataBuilder.modifySetupKey(userData, natSetupKey); UserDataBuilder natUserData = new UserDataBuilder(userData); nRequest.setUserData(natUserData); nRequest.setNextRequest(sRequest); } AbstractSystemRequest request = (nRequest != null) ? nRequest : sRequest; if (QueueManager.getSystemRequestQueue().addRequest(request)) { Cluster cluster = ClusterCache.getInstance().getCluster(domain); cluster.setStatus(ClusterStatus.createStartingStatus()); ClusterCache.getInstance().clearErrorReport(domain); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } throw new SystemServiceException("Start system '" + domain + "' is in progress."); } throw new SystemServiceException("System '" + domain + "' master instance is not in a STOPPED state."); } catch (Exception ex) { logger.error("start - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> updateConfig(String domain, String cfgVersion) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); Node node = RestClientManager.getSystemServiceClient().getNodeWithoutVolumes( system.getMasterInstanceId()); if (State.Running == node.getState()) { UpdateSystemConfigRequest updateSysCfgRequest = new UpdateSystemConfigRequest(); updateSysCfgRequest.setDomain(domain); updateSysCfgRequest.setCfgVersion(cfgVersion); SyncRequest syncRequest = new SyncRequest(); syncRequest.setDomain(domain); syncRequest.setNextRequest(updateSysCfgRequest); if (QueueManager.getSystemRequestQueue().addRequest(syncRequest)) { Cluster cluster = ClusterCache.getInstance().getCluster(domain); cluster.setStatus(ClusterStatus.createUpdatingStatus()); ClusterCache.getInstance().clearErrorReport(domain); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } throw new SystemServiceException("Update system '" + domain + "' configuration is in progress."); } throw new SystemServiceException("System '" + domain + "' master instance is not in a RUNNING state."); } catch (Exception ex) { logger.error("updateIngest - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> stop(String accountId, String domain) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); Node node = RestClientManager.getSystemServiceClient().getNodeWithoutVolumes( system.getMasterInstanceId()); if (State.Running == node.getState()) { StopSystemRequest sRequest = new StopSystemRequest(); sRequest.setDomain(domain); sRequest.setNode(node); SystemSubnetMappingDao subnetDao = new SystemSubnetMappingDao( EmbeddedH2DataAccessUtility.getInstance()); SystemSubnetMapping mapping = subnetDao.get(domain); if (mapping != null && mapping.getNatInstanceId() != null) { DeleteNatRequest dRequest = new DeleteNatRequest(); dRequest.setDomain(domain); sRequest.setNextRequest(dRequest); } if (QueueManager.getSystemRequestQueue().addRequest(sRequest)) { Cluster cluster = ClusterCache.getInstance().getCluster(domain); cluster.setStatus(ClusterStatus.createStoppingStatus()); ClusterCache.getInstance().clearErrorReport(domain); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } throw new SystemServiceException("Stop system '" + domain + "' is in progress."); } throw new SystemServiceException("System '" + domain + "' master instance is not in a RUNNING state."); } catch (Exception ex) { logger.error("stop - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> delete(String accountId, String domain) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); // Verify system is not running. Do not delete if system is running, // otherwise delete everything that can be found for the system. AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem system = dao.getAccountSystem(domain); DeleteSystemRequest.Type type = null; if (system.getMasterInstanceId() == null) { // Never created or euca and its down type = DeleteSystemRequest.Type.NEW; } else { // Could be euca and its up Node node = RestClientManager.getSystemServiceClient().getNodeWithoutVolumes( system.getMasterInstanceId()); if (node != null) { switch (node.getState()) { case Pending: break; // do not delete case ShuttingDown: break; // do not delete case Running: break; // do not delete case Stopping: break; // do not delete default: type = DeleteSystemRequest.Type.NORMAL; break; // all other cases - delete } } else { // Node is not null (not found, probably) type = DeleteSystemRequest.Type.ERROR; } } // We only allow systems to be deleted if the system is brand new or // the master instance doesn't exist in the cloud environment even // tho the database says otherwise, or the system is currently down. if (type != null) { DeleteSystemRequest request = new DeleteSystemRequest(); request.setAccountId(accountId); request.setDomain(domain); request.setType(type); if (QueueManager.getSystemRequestQueue().addRequest(request)) { Cluster cluster = ClusterCache.getInstance().getCluster(domain); cluster.setStatus(ClusterStatus.createDeletingStatus()); ClusterCache.getInstance().clearErrorReport(domain); String tenantName = UserDataProperties.getInstance().getString( UserDataProperties.RTWS_TENANT_ID); BusinessMetricsDao.addEvent(domain, tenantName, SystemEventMessage.DELETED, System.currentTimeMillis()); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } throw new SystemServiceException("Delete system '" + domain + "' is in progress."); } throw new SystemServiceException("Unable to determine the status for system '" + domain + "'."); } catch (Exception ex) { logger.error("delete - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> syncRepository(String domain) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); String dns = MasterUtil.getDns(domain); String token = RestClientManager.getRespositoryApiRestClient().sendSyncMessage(dns); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); response.setProperty("token", token); return response; } catch (Exception ex) { logger.error("syncRepository - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> syncRepositoryStatus(String domain, String token) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); String dns = MasterUtil.getDns(domain); String syncStatus = RestClientManager.getRespositoryApiRestClient().getSyncStatus(dns, token); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); JSONObject jsonSyncStatus = JSONObject.fromObject(syncStatus); @SuppressWarnings("rawtypes") Iterator it = jsonSyncStatus.keys(); while (it.hasNext()) { String key = (String) it.next(); String value = jsonSyncStatus.getString(key); response.setProperty(key, value); } return response; } catch (Exception ex) { logger.error("syncRepositoryStatus - Exception: " + ex.getMessage(), ex); ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> updateMasterInstanceId(String domain, String instanceId) { AccountSystemDao accountDao = new AccountSystemDao( EmbeddedH2DataAccessUtility.getInstance()); try { ApiAuthUtils.assertTmsOrTenantFromUserData(); accountDao.update(domain, instanceId); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } catch (Exception e) { ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(e.getMessage()); return response; } } @Override public StandardResponse<?> setSystemError(String domain, String error) { ApiAuthUtils.assertTmsOrTenantFromUserData(); // Set the error in the cache so it will be reported back to MC. ClusterCache.getInstance().reportMonitorError(domain, MonitorAction.External, error); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } @Override public StandardResponse<?> clearSystemError(String domain) { ApiAuthUtils.assertTmsOrTenantFromUserData(); // Clears the error for the given system. ClusterCache.getInstance().clearMonitorErrors(domain); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } @Override public String getMasterInstanceId(String domain) { ApiAuthUtils.assertTmsOrTenantFromUserData(); AccountSystemDao dao = new AccountSystemDao(EmbeddedH2DataAccessUtility.getInstance()); AccountSystem sys = dao.getAccountSystem(domain); if (sys != null) { return sys.getMasterInstanceId(); } return null; } @Override public SystemSubnetMapping getSystemSubnetMapping(String domain) { ApiAuthUtils.assertTmsOrTenantFromUserData(); SystemSubnetMappingDao dao = new SystemSubnetMappingDao( EmbeddedH2DataAccessUtility.getInstance()); return dao.get(domain); } @Override public Node getNatInstance(String domain) { ApiAuthUtils.assertTmsOrTenantFromUserData(); SystemSubnetMappingDao dao = new SystemSubnetMappingDao( EmbeddedH2DataAccessUtility.getInstance()); SystemSubnetMapping mapping = dao.get(domain); if (mapping != null && mapping.getNatInstanceId() != null) { Node nat = RestClientManager.getInstanceServiceClient().getNodeWithoutVolumes( mapping.getNatInstanceId()); nat.setSubnetId(mapping.getPublicSubnetId()); return nat; } return new Node(); } @Override public StandardResponse<?> setMasterAttributes(String instance, String domain, String group, String host, String name) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); RestClientManager.getInstanceServiceClient().assignAttributes(instance, domain, group, host, name); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } catch (Exception e) { ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(e.getMessage()); return response; } } @Override public StandardResponse<?> assignSubnet(String domain, String azone) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); String subnetId = SubnetManager.getInstance().assign(domain, azone); if (subnetId != null) { PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); response.setProperty("SubnetId", subnetId); return response; } throw new SystemServiceException("Fail to assign a vpc subnet for domain '" + domain + "'."); } catch (Exception ex) { ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } @Override public StandardResponse<?> unassignSubnet(String domain) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); if (SubnetManager.getInstance().unassign(domain)) { PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } throw new SystemServiceException("Fail to unassign vpc subnet from domain '" + domain + "'."); } catch (Exception ex) { ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(ex.getMessage()); return response; } } public String getMasterAddress(String domain) { ApiAuthUtils.assertTmsOrTenantFromUserData(); return MasterUtil.getDns(domain); } public String getDefaultAddress(String domain) { ApiAuthUtils.assertTmsOrTenantFromUserData(); if (UserDataUtil.isVpcEnabled()) { // This gateway is running inside a vpc, in order // to contact the default instance in ec2 we must use // the nat at port 443 return domain + ":" + RtwsConfig.getInstance().getInt("rtws.default.webapp.port");// 443 } else { // some euca systems in NOT vpc do not have DNS entries, so we have // to get the private ip. String master = MasterUtil.getDns(domain);// actually gets the // master private ip String baseNodeInfoURI = "%s://%s:%s%s/json/process/retrieve/all"; String scheme = RtwsConfig.getInstance().getString("rtws.internal.webapp.scheme"); String port = RtwsConfig.getInstance().getString("rtws.internal.webapp.port"); String context = RtwsConfig.getInstance() .getString("webapp.repository.url.contextPath"); String requestUrl = String.format(baseNodeInfoURI, scheme, master, port, context); Client client = Client.create(JerseyClientConfig.getInstance().getInternalConfig()); String systemInfo = client.resource(requestUrl).get(String.class); JSONObject systemObj = JSONObject.fromObject(systemInfo); JSONArray nodes = systemObj.getJSONArray("node"); for (int i = 0; i < nodes.size(); i++) { JSONObject nodeInfo = nodes.getJSONObject(i); if (nodeInfo.getString("host").equalsIgnoreCase("default")) { return nodeInfo.getString("privateIp") + ":" + RtwsConfig.getInstance().getInt("rtws.internal.webapp.port");// 8443"; } } return null; // no default found } } @Override public StandardResponse<?> getSystemLock(String domain) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); String dns = MasterUtil.getDns(domain); RestClientManager.getRespositoryApiRestClient().getSystemLock(dns); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } catch (Exception e) { ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(e.getMessage()); return response; } } @Override public StandardResponse<?> releaseSystemLock(String domain) { try { ApiAuthUtils.assertTmsOrTenantFromUserData(); String dns = MasterUtil.getDns(domain); RestClientManager.getRespositoryApiRestClient().releaseSystemLock(dns); PropertiesResponse response = new PropertiesResponse(); response.setStandardHeaderCode(200); response.setProperty("Status", "true"); return response; } catch (Exception e) { ErrorResponse response = new ErrorResponse(); response.setStandardHeaderCode(500); response.setMessage(e.getMessage()); return response; } } }
package org.maltparserx.core.options; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import org.maltparserx.core.options.option.Option; /** * An option container stores the option values for one instance usage. For example, a * single malt configuration there will only be one option container, but for an ensemble parser there * could be several option containers. * * There are four types internal option container: * <ul> * <li>SAVEDOPTION, contains option values load from the saved option file. * <li>DEPENDENCIES_RESOLVED, contains option values that overload option values in COMMANDLINE and OPTIONFILE * due to dependencies with other options. * <li>COMMANDLINE, contains option values that are read from the command-line prompt. * <li>OPTIONFILE, contains option values that are read from the option file. * </ul> * <p>These internal option containers have following priority: SAVEDOPTION, DEPENDENCIES_RESOLVED, COMMANDLINE, * OPTIONFILE. If an option cannot be found in the SAVEDOPTION internal option container it will continue to * look in the DEPENDENCIES_RESOLVED internal option container and and so fourth. If the option value cannot be * found in none of the internal option container, the option manager uses the default option value provided by * the option description.</p> * * @author Johan Hall * @since 1.0 **/ public class OptionContainer implements Comparable<OptionContainer>{ /* Types of internal option container */ public static final int SAVEDOPTION = 0; public static final int DEPENDENCIES_RESOLVED = 1; public static final int COMMANDLINE = 2; public static final int OPTIONFILE = 3; private int index; private SortedMap<Option,Object> savedOptionMap; private SortedMap<Option,Object> dependenciesResolvedOptionMap; private SortedMap<Option,Object> commandLineOptionMap; private SortedMap<Option,Object> optionFileOptionMap; /** * Creates an option container * * @param index The index of the option container (0..n). */ public OptionContainer(int index) throws OptionException { setIndex(index); savedOptionMap = new TreeMap<Option,Object>(); dependenciesResolvedOptionMap = new TreeMap<Option,Object>(); commandLineOptionMap = new TreeMap<Option,Object>(); optionFileOptionMap = new TreeMap<Option,Object>(); } /** * Adds an option value to an option to one of the internal option container specified by the type. * * @param type the internal option container * @param option the option object * @param value the option value object * @throws OptionException */ public void addOptionValue(int type, Option option, Object value) throws OptionException { if (type == OptionContainer.SAVEDOPTION) { savedOptionMap.put(option, value); } else if (type == OptionContainer.DEPENDENCIES_RESOLVED) { dependenciesResolvedOptionMap.put(option, value); } else if (type == OptionContainer.COMMANDLINE) { commandLineOptionMap.put(option, value); } else if (type == OptionContainer.OPTIONFILE) { optionFileOptionMap.put(option, value); } else { throw new OptionException("Unknown option container type"); } } /** * Returns the option value object for the option. It uses the priority amongst the internal * option containers. * * @param option the option object * @return the option value object */ public Object getOptionValue(Option option) { Object value = null; for (int i = OptionContainer.SAVEDOPTION; i <= OptionContainer.OPTIONFILE; i++) { if (i == OptionContainer.SAVEDOPTION) { value = savedOptionMap.get(option); } else if (i == OptionContainer.DEPENDENCIES_RESOLVED) { value = dependenciesResolvedOptionMap.get(option); } else if (i == OptionContainer.COMMANDLINE) { value = commandLineOptionMap.get(option); } else if (i == OptionContainer.OPTIONFILE) { value = optionFileOptionMap.get(option); } if (value != null) { return value; } } return null; } /** * Returns a string representation of the option value for the specified option. It uses the priority * amongst the internal option containers. * * @param option the option object * @return a string representation of the option value */ public String getOptionValueString(Option option) { String value = null; for (int i = OptionContainer.SAVEDOPTION; i <= OptionContainer.OPTIONFILE; i++) { if (i == OptionContainer.SAVEDOPTION) { value = option.getStringRepresentation(savedOptionMap.get(option)); } else if (i == OptionContainer.DEPENDENCIES_RESOLVED) { value = option.getStringRepresentation(dependenciesResolvedOptionMap.get(option)); } else if (i == OptionContainer.COMMANDLINE) { value = option.getStringRepresentation(commandLineOptionMap.get(option)); } else if (i == OptionContainer.OPTIONFILE) { value = option.getStringRepresentation(optionFileOptionMap.get(option)); } if (value != null) { return value; } } return null; } /** * Returns true if the option is present in the specified internal option container, otherwise false. * * @param type the internal option container * @param option the option object * @return true if the option is present in the specified internal option container, otherwise false * @throws OptionException */ public boolean contains(int type, Option option) throws OptionException { if (type == OptionContainer.SAVEDOPTION) { return savedOptionMap.containsValue(option); } else if (type == OptionContainer.DEPENDENCIES_RESOLVED) { return dependenciesResolvedOptionMap.containsValue(option); } else if (type == OptionContainer.COMMANDLINE) { return commandLineOptionMap.containsValue(option); } else if (type == OptionContainer.OPTIONFILE) { return optionFileOptionMap.containsValue(option); } else { throw new OptionException("Unknown option container type"); } } /** * Returns the number of option values amongst all internal option containers. * * @return the number of option values amongst all internal option containers */ public int getNumberOfOptionValues() { SortedSet<Option> union = new TreeSet<Option>(savedOptionMap.keySet()); union.addAll(dependenciesResolvedOptionMap.keySet()); union.addAll(commandLineOptionMap.keySet()); union.addAll(optionFileOptionMap.keySet()); return union.size(); } /** * Returns the option container index. * * @return the option container index */ public int getIndex() { return index; } /** * Sets the option container index, if the index is great than or equal 0. * @param index the option container index * @throws OptionException */ private void setIndex(int index) throws OptionException { if (index < 0) { throw new OptionException("The option container index must be an integer value great than or equal 0. "); } this.index = index; } public int compareTo(OptionContainer that) { final int BEFORE = -1; final int EQUAL = 0; final int AFTER = 1; if (this == that) return EQUAL; if (this.index < that.index) return BEFORE; if (this.index > that.index) return AFTER; return EQUAL; } /* (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { final StringBuilder sb = new StringBuilder(); SortedSet<Option> union = new TreeSet<Option>(savedOptionMap.keySet()); union.addAll(dependenciesResolvedOptionMap.keySet()); union.addAll(commandLineOptionMap.keySet()); union.addAll(optionFileOptionMap.keySet()); for (Option option : union) { Object value = null; for (int i = OptionContainer.SAVEDOPTION; i <= OptionContainer.OPTIONFILE; i++) { if (i == OptionContainer.SAVEDOPTION) { value = savedOptionMap.get(option); } else if (i == OptionContainer.DEPENDENCIES_RESOLVED) { value = dependenciesResolvedOptionMap.get(option); } else if (i == OptionContainer.COMMANDLINE) { value = commandLineOptionMap.get(option); } else if (i == OptionContainer.OPTIONFILE) { value = optionFileOptionMap.get(option); } if (value != null) { break; } } sb.append(option.getGroup().getName()+"\t"+option.getName()+"\t"+value+"\n"); } return sb.toString(); } }
package me.lsdo.processing; import java.util.*; enum PanelLayout { _2, _6, _8, _10, _13, _24 } public class LayoutUtil { /** * Axial coordinates are a coordinate system referring to vertices of a triangular grid. The * grid is oriented such that triangle edges are horizontal (coincident with the x-axis) and at * +/-60 degrees. For a grid coordinate (u, v), u refers to coordinates increasing from the * origin in the line 60 degrees counter-clockwise from the +x axis, while v refers to * increasing along the x-axis. */ static double SQRT_3 = Math.sqrt(3.); static double PI = Math.PI; // Convenience methods to make vector math easier. Input arguments are treated as constants. // Create a new vector (x, y) static public PVector2 V(double x, double y) { return new PVector2((float)x, (float)y); } // Clone a vector public static PVector2 V(PVector2 v) { return V(v.x, v.y); } // Return a + b static public PVector2 Vadd(PVector2 a, PVector2 b) { return PVector2.add(a, b); } // Return a - b public static PVector2 Vsub(PVector2 a, PVector2 b) { return Vadd(a, Vmult(b, -1.)); } // Return k * a public static PVector2 Vmult(PVector2 v, double k) { return PVector2.mult(v, (float)k); } // Return v rotated counter-clockwise by theta radians public static PVector2 Vrot(PVector2 v, double theta) { PVector2 rot = V(v); rot.rotate((float)theta); return rot; } // Compute a basis transformation for vector p, where u is the transformation result of basis vector U (1, 0), // and v is the transformation of basis V (0, 1) public static PVector2 basisTransform(PVector2 p, PVector2 U, PVector2 V) { return Vadd(Vmult(U, p.x), Vmult(V, p.y)); } // Spacing between points of a triangular grid of size 'n' where distance between edge points on opposing // panels is 'k' times the distance between adjacent points on the same panel. public static double pixelSpacing(int n) { // Even spacing across the panel gap. .5*SQRT_3 may be a better choice as it makes the density more // consistent across the gap (the gaps jump out less), but makes a bunch of other logic more complicated. double k = 1.; return 1. / (n - 1 + k * SQRT_3); } static interface Transform { public PVector2 transform(PVector2 p); } // Convert a set of points in bulk according to some transformation function. public static ArrayList<PVector2> transform(ArrayList<PVector2> points, Transform tx) { ArrayList<PVector2> transformed = new ArrayList<PVector2>(); for (PVector2 p : points) { transformed.add(tx.transform(p)); } return transformed; } // Transformation that translates a point by 'offset' public static Transform translate(final PVector2 offset) { return new Transform() { public PVector2 transform(PVector2 p) { return Vadd(p, offset); } }; } // Evenly fill a triangle with a grid of points of size n. The triangle filled is an equilateral triangle // with points at (0, 0), (1, 0), and (.5, sqrt(3)/2). Returns a list of points traversed in a boustrophedon // manner, starting near the origin, proceeding left/right, then upward. The point near (0, 0) will thus be // known as the 'entry' point, and the top-most point as the 'exit' point. public static ArrayList<TriCoord> fillTriangle(int n) { ArrayList<TriCoord> coords = new ArrayList<TriCoord>(); for (int row = 0; row < n; row++) { boolean reversed = (row % 2 == 1); int width = n - row; for (int col = 0; col < width; col++) { int c = (reversed ? width - 1 - col : col); TriCoord tc = TriCoord.fromParts(TriCoord.CoordType.PIXEL, TriCoord.Axis.U, row, TriCoord.Axis.V, c, TriCoord.PanelOrientation.A, n); coords.add(tc); } } return coords; } // Fill a triangle using the sizing and entry/exit semantics from above, where the triangle's origin is // the axial UV coordinate 'entry' and rotated clockwise by angle 60deg * rot public static ArrayList<DomeCoord> fillTriangle(final PVector2 entry, final int rot, int n) { // TODO can these be derived from first principles? int[][] offsets = {{0, 0, -1}, {-1, 0, -1}, {-1, 0, 0}, {-1, -1, 0}, {0, -1, 0}, {0, -1, -1}}; int u0 = (int)entry.x; int v0 = (int)entry.y; int[] o = offsets[MathUtil.mod(rot, 6)]; TriCoord panel = TriCoord.fromParts(TriCoord.CoordType.PANEL, TriCoord.Axis.U, u0 + o[0], TriCoord.Axis.V, v0 + o[1], MathUtil.mod(rot, 2) == 0 ? TriCoord.PanelOrientation.A : TriCoord.PanelOrientation.B, 0); ArrayList<DomeCoord> coords = new ArrayList<DomeCoord>(); for (TriCoord c : fillTriangle(n)) { coords.add(new DomeCoord(panel, c.rotate(rot))); } return coords; } // Get the exit point for a triangle fill public static PVector2 exitPointForFill(PVector2 entry, int rot) { return axialNeighbor(entry, rot - 1); } public static ArrayList<DomeCoord> fillFan(int orientation, int segments, int pixels) { return fillFan(orientation, segments, pixels, V(0, 0)); } // Fill a fan of triangles proceeding in a clockwise fashion until a complete hexagon whose perimeter // intersects the origin is filled. 'segments' is the number of triangular segments to fill (up to 6). // 'pixels' is the fill density within each triangle. 'orientation' is the initial orientation in // which the long axis of the hexagon follows the angle specified by 'rot' semantics above. public static ArrayList<DomeCoord> fillFan(int orientation, int segments, int pixels, PVector2 entry) { ArrayList<DomeCoord> points = new ArrayList<DomeCoord>(); int rot = orientation; for (int i = 0; i < segments; i++) { points.addAll(fillTriangle(entry, rot, pixels)); entry = exitPointForFill(entry, rot); rot += 1; } return points; } // Convert tri-grid u/v/w coordinates to cartesian x/y coordinates. Points are placed such that spacing // between two adjacent points will match the spacing between an edge point and the opposing point of a // neighboring triangle. public static PVector2 coordToXy(DomeCoord c) { double spacing = pixelSpacing(c.pixel.panel_length); PVector2 root = c.panel.toV(); PVector2 px = c.pixel.toV(); PVector2 offset = V(1/SQRT_3, 1/SQRT_3); if (c.panel.getOrientation() == TriCoord.PanelOrientation.B) { root = Vadd(root, V(1, 1)); px = Vsub(px, V(c.pixel.panel_length - 1, c.pixel.panel_length - 1)); offset = Vmult(offset, -1); } return axialToXy(Vadd(root, Vmult(Vadd(px, offset), spacing))); } // All metadata associated with a particular layout of panels. public static abstract class PanelConfig { double radius; // Max radius of panel configuration, in panel lengths int[] arms; // Number of panels per fadecandy 'arm' PVector2 origin; // Center the layout on this point (in UV coordinates) double theta; // Rotate the layout counter-clockwise by this many degrees public PanelConfig(int num_panels, double radius, int[] arms, PVector2 origin, double theta) { this.radius = radius; this.arms = arms; this.origin = origin; this.theta = theta; int panel_count = 0; for (int n : arms) { panel_count += n; } assert num_panels == panel_count; } // Fill the me.lsdo configuration with pixels abstract ArrayList<DomeCoord> fill(int n); // Convert the filled grid points to a mapping of grid points to their actual positions. HashMap<DomeCoord, PVector2> coordsToXy(ArrayList<DomeCoord> coords) { HashMap<DomeCoord, PVector2> coordToPoint = new HashMap<DomeCoord, PVector2>(); PVector2 offset = axialToXy(origin); for (DomeCoord c : coords) { PVector2 p = Vrot(Vsub(coordToXy(c), offset), Math.toRadians(theta)); coordToPoint.put(c, p); } return coordToPoint; } } // Note: this layout is off-center. public static PanelConfig _2 = new PanelConfig(2, 2./3.*SQRT_3, new int[] {2}, V(1/3., 1/3.), 0.) { ArrayList<DomeCoord> fill(int n) { return fillFan(0, 2, n); } }; public static PanelConfig _6 = new PanelConfig(6, 1., new int[] {4, 2}, V(0, 0.), 0.) { ArrayList<DomeCoord> fill(int n) { ArrayList<DomeCoord> points = new ArrayList<DomeCoord>(); points.addAll(fillFan(4, 4, n, V(-1, 1))); points.addAll(fillFan(5, 2, n, V(-1, 1))); return points; } }; public static PanelConfig _8 = new PanelConfig(8, 1.5, new int[] {4, 4}, V(0, 0.5), 0.) { ArrayList<DomeCoord> fill(int n) { ArrayList<DomeCoord> points = new ArrayList<DomeCoord>(); points.addAll(fillFan(1, 4, n, V(1, 0))); points.addAll(fillFan(1, 4, n, V(1, -1))); return points; } }; public static PanelConfig _10 = new PanelConfig(10, 1.5, new int[] {4, 4, 2}, V(0, 0.5), 0.) { ArrayList<DomeCoord> fill(int n) { ArrayList<DomeCoord> points = new ArrayList<DomeCoord>(); points.addAll(fillFan(1, 4, n, V(1, 0))); points.addAll(fillFan(2, 4, n, V(1, 0))); points.addAll(fillFan(3, 2, n, V(1, 0))); return points; } }; public static PanelConfig _13 = new PanelConfig(13, Math.sqrt(7/3.), // just trust me new int[] {4, 4, 4, 1}, V(1/3., 1/3.), 0.) { ArrayList<DomeCoord> fill(int n) { final PVector2[] entries = {V(1, 0), V(0, 1), V(0, 0)}; ArrayList<DomeCoord> points = new ArrayList<DomeCoord>(); for (int i = 0; i < 3; i++) { points.addAll(fillFan(2*i+1, 4, n, entries[i])); } points.addAll(fillTriangle(V(0, 0), 0, n)); return points; } }; public static PanelConfig _24 = new PanelConfig(24, 2., new int[] {4, 4, 4, 4, 4, 4}, V(0, 0), 0.) { ArrayList<DomeCoord> fill(int n) { ArrayList<DomeCoord> points = new ArrayList<DomeCoord>(); for (int i = 0; i < 6; i++) { points.addAll(fillFan(i, 4, n)); } return points; } }; public static PanelLayout getPanelLayoutForNumPanels(int numPanels) { switch (numPanels) { case 2: return PanelLayout._2; case 6: return PanelLayout._6; case 8: return PanelLayout._8; case 10: return PanelLayout._10; case 13: return PanelLayout._13; case 24: return PanelLayout._24; default: throw new RuntimeException(String.format("no defined layout for %d panels", numPanels)); } } public static PanelConfig getPanelConfig(PanelLayout config) { switch (config) { case _2: return _2; case _6: return _6; case _8: return _8; case _10: return _10; case _13: return _13; case _24: return _24; default: throw new RuntimeException(); } } // Convert a 2-vector of (U, V) coordinates from the axial coordinate scheme into (x, y) cartesian coordinates public static PVector2 axialToXy(PVector2 p) { PVector2 U = V(.5, .5 * SQRT_3); PVector2 V = V(1., 0.); return basisTransform(p, U, V); } // Convert (x, y) coordinate p to screen pixel coordinates where top-left is pixel (0, 0) and bottom-right is // pixel (width, height). 'span' is the size of the viewport in world coordinates, where size means width if horizSpan is // true and height if horizSpan is false. World origin is in the center of the viewport. public static PVector2 xyToScreen(PVector2 p, int width, int height, double span, boolean horizSpan) { double scale = span / (horizSpan ? width : height); PVector2 U = V(1. / scale, 0); PVector2 V = V(0, -1. / scale); PVector2 offset = Vmult(V(width, height), .5); return Vadd(basisTransform(p, U, V), offset); } public static PVector2 xyToScreenAsym(PVector2 p, int width, int height, double hspan, double vspan) { PVector2 U = V(width / hspan, 0); PVector2 V = V(0, -height / vspan); PVector2 offset = Vmult(V(width, height), .5); return Vadd(basisTransform(p, U, V), offset); } public static PVector2 normalizedXyToScreen(PVector2 p, int width, int height) { return xyToScreen(p, width, height, 2., true); } // Inverse of xyToScreen public static PVector2 screenToXy(PVector2 p, int width, int height, double span, boolean horizSpan) { double scale = span / (horizSpan ? width : height); PVector2 U = V(scale, 0); PVector2 V = V(0, -scale); PVector2 offset = Vmult(V(width, height), .5); return basisTransform(Vsub(p, offset), U, V); } // Convert (x, y) coordinate to polar coordinates (radius, theta [counter-clockwise]) public static PVector2 xyToPolar(PVector2 p) { return V(p.mag(), Math.atan2(p.y, p.x)); } // Convert polar coordinates (radius, theta [counter-clockwise]) to cartesian (x, y) public static PVector2 polarToXy(PVector2 p) { double r = p.x; double theta = p.y; return Vrot(V(r, 0), theta); } // Return the adjacent axial coordinate moving from 'p' in direction 'rot' public static PVector2 axialNeighbor(PVector2 p, int rot) { int axis = MathUtil.mod(rot, 3); boolean hemi = (MathUtil.mod(rot, 6) < 3); int du = (axis == 0 ? 0 : (hemi ? -1 : 1)); int dv = (axis == 2 ? 0 : (hemi ? 1 : -1)); return V(p.x + du, p.y + dv); } // Return whether two axial coordinates are adjacent lattice points public static boolean axialCoordsAdjacent(PVector2 a, PVector2 b) { int du = (int)a.x - (int)b.x; int dv = (int)a.y - (int)b.y; return (du >= -1 && du <= 1 && dv >= -1 && dv <= 1 && du != dv); } // Number of pixels in a single panel of size n public static int pixelsPerPanel(int n) { return n * (n + 1) / 2; } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple; import com.dd.plist.NSArray; import com.dd.plist.NSNumber; import com.dd.plist.NSObject; import com.dd.plist.NSString; import com.facebook.buck.apple.platform_type.ApplePlatformType; import com.facebook.buck.apple.toolchain.AppleCxxPlatform; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.apple.toolchain.AppleSdk; import com.facebook.buck.apple.toolchain.CodeSignIdentity; import com.facebook.buck.apple.toolchain.CodeSignIdentityStore; import com.facebook.buck.apple.toolchain.ProvisioningProfileMetadata; import com.facebook.buck.apple.toolchain.ProvisioningProfileStore; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleParams; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.attr.HasRuntimeDeps; import com.facebook.buck.core.rules.impl.AbstractBuildRuleWithDeclaredAndExtraDeps; import com.facebook.buck.core.rules.tool.BinaryBuildRule; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.PathSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver; import com.facebook.buck.core.toolchain.tool.Tool; import com.facebook.buck.core.toolchain.tool.impl.CommandTool; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.cxx.CxxPreprocessorInput; import com.facebook.buck.cxx.HasAppleDebugSymbolDeps; import com.facebook.buck.cxx.NativeTestable; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.file.WriteFile; import com.facebook.buck.io.BuildCellRelativePath; import com.facebook.buck.io.file.MorePaths; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.CopyStep; import com.facebook.buck.step.fs.FindAndReplaceStep; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirStep; import com.facebook.buck.step.fs.MoveStep; import com.facebook.buck.step.fs.RmStep; import com.facebook.buck.step.fs.WriteFileStep; import com.facebook.buck.util.types.Either; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.hash.HashCode; import com.google.common.io.Files; import com.google.common.util.concurrent.Futures; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Creates a bundle: a directory containing files and subdirectories, described by an Info.plist. */ public class AppleBundle extends AbstractBuildRuleWithDeclaredAndExtraDeps implements NativeTestable, BuildRuleWithBinary, HasRuntimeDeps, BinaryBuildRule { private static final Logger LOG = Logger.get(AppleBundle.class); public static final String CODE_SIGN_ENTITLEMENTS = "CODE_SIGN_ENTITLEMENTS"; private static final String FRAMEWORK_EXTENSION = AppleBundleExtension.FRAMEWORK.toFileExtension(); private static final String PP_DRY_RUN_RESULT_FILE = "BUCK_pp_dry_run.plist"; private static final String CODE_SIGN_DRY_RUN_ARGS_FILE = "BUCK_code_sign_args.plist"; private static final String CODE_SIGN_DRY_RUN_ENTITLEMENTS_FILE = "BUCK_code_sign_entitlements.plist"; @AddToRuleKey private final String extension; @AddToRuleKey private final Optional<String> productName; @AddToRuleKey private final SourcePath infoPlist; @AddToRuleKey private final ImmutableMap<String, String> infoPlistSubstitutions; @AddToRuleKey private final Optional<SourcePath> entitlementsFile; @AddToRuleKey private final Optional<BuildRule> binary; @AddToRuleKey private final Optional<AppleDsym> appleDsym; @AddToRuleKey private final ImmutableSet<BuildRule> extraBinaries; @AddToRuleKey private final AppleBundleDestinations destinations; @AddToRuleKey private final AppleBundleResources resources; @AddToRuleKey private final Set<SourcePath> frameworks; @AddToRuleKey private final Tool ibtool; @AddToRuleKey private final ImmutableSortedSet<BuildTarget> tests; @AddToRuleKey private final ApplePlatform platform; @AddToRuleKey private final String sdkName; @AddToRuleKey private final String sdkVersion; @AddToRuleKey private final ProvisioningProfileStore provisioningProfileStore; @AddToRuleKey private final Supplier<ImmutableList<CodeSignIdentity>> codeSignIdentitiesSupplier; @AddToRuleKey private final Optional<Tool> codesignAllocatePath; @AddToRuleKey private final Tool codesign; @AddToRuleKey private final Optional<Tool> swiftStdlibTool; @AddToRuleKey private final boolean dryRunCodeSigning; @AddToRuleKey private final ImmutableList<String> codesignFlags; @AddToRuleKey private final Optional<String> codesignIdentitySubjectName; // Need to use String here as RuleKeyBuilder requires that paths exist to compute hashes. @AddToRuleKey private final ImmutableMap<SourcePath, String> extensionBundlePaths; @AddToRuleKey private final boolean copySwiftStdlibToFrameworks; private final Optional<AppleAssetCatalog> assetCatalog; private final Optional<CoreDataModel> coreDataModel; private final Optional<SceneKitAssets> sceneKitAssets; private final Optional<String> platformBuildVersion; private final Optional<String> xcodeVersion; private final Optional<String> xcodeBuildVersion; private final Path sdkPath; private final String minOSVersion; private final String binaryName; private final Path bundleRoot; private final Path binaryPath; private final Path bundleBinaryPath; private final boolean ibtoolModuleFlag; private final ImmutableList<String> ibtoolFlags; private final boolean hasBinary; private final boolean cacheable; private final boolean verifyResources; private final Duration codesignTimeout; private static final ImmutableList<String> BASE_IBTOOL_FLAGS = ImmutableList.of( "--output-format", "human-readable-text", "--notices", "--warnings", "--errors"); AppleBundle( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, Either<AppleBundleExtension, String> extension, Optional<String> productName, SourcePath infoPlist, Map<String, String> infoPlistSubstitutions, Optional<BuildRule> binary, Optional<AppleDsym> appleDsym, ImmutableSet<BuildRule> extraBinaries, AppleBundleDestinations destinations, AppleBundleResources resources, ImmutableMap<SourcePath, String> extensionBundlePaths, Set<SourcePath> frameworks, AppleCxxPlatform appleCxxPlatform, Optional<AppleAssetCatalog> assetCatalog, Optional<CoreDataModel> coreDataModel, Optional<SceneKitAssets> sceneKitAssets, Set<BuildTarget> tests, CodeSignIdentityStore codeSignIdentityStore, ProvisioningProfileStore provisioningProfileStore, boolean dryRunCodeSigning, boolean cacheable, boolean verifyResources, ImmutableList<String> codesignFlags, Optional<String> codesignIdentity, Optional<Boolean> ibtoolModuleFlag, ImmutableList<String> ibtoolFlags, Duration codesignTimeout, boolean copySwiftStdlibToFrameworks) { super(buildTarget, projectFilesystem, params); this.extension = extension.isLeft() ? extension.getLeft().toFileExtension() : extension.getRight(); this.productName = productName; this.infoPlist = infoPlist; this.infoPlistSubstitutions = ImmutableMap.copyOf(infoPlistSubstitutions); this.binary = binary; Optional<SourcePath> entitlementsFile = Optional.empty(); if (binary.isPresent()) { Optional<HasEntitlementsFile> hasEntitlementsFile = graphBuilder.requireMetadata(binary.get().getBuildTarget(), HasEntitlementsFile.class); if (hasEntitlementsFile.isPresent()) { entitlementsFile = hasEntitlementsFile.get().getEntitlementsFile(); } } this.entitlementsFile = entitlementsFile; this.appleDsym = appleDsym; this.extraBinaries = extraBinaries; this.destinations = destinations; this.resources = resources; this.extensionBundlePaths = extensionBundlePaths; this.frameworks = frameworks; this.ibtool = appleCxxPlatform.getIbtool(); this.assetCatalog = assetCatalog; this.coreDataModel = coreDataModel; this.sceneKitAssets = sceneKitAssets; this.binaryName = getBinaryName(getBuildTarget(), this.productName); this.bundleRoot = getBundleRoot(getProjectFilesystem(), getBuildTarget(), this.binaryName, this.extension); this.binaryPath = this.destinations.getExecutablesPath().resolve(this.binaryName); this.tests = ImmutableSortedSet.copyOf(tests); AppleSdk sdk = appleCxxPlatform.getAppleSdk(); this.platform = sdk.getApplePlatform(); this.sdkName = sdk.getName(); this.sdkPath = appleCxxPlatform.getAppleSdkPaths().getSdkPath(); this.sdkVersion = sdk.getVersion(); this.minOSVersion = appleCxxPlatform.getMinVersion(); this.platformBuildVersion = appleCxxPlatform.getBuildVersion(); this.xcodeBuildVersion = appleCxxPlatform.getXcodeBuildVersion(); this.xcodeVersion = appleCxxPlatform.getXcodeVersion(); this.dryRunCodeSigning = dryRunCodeSigning; this.cacheable = cacheable; this.verifyResources = verifyResources; this.codesignFlags = codesignFlags; this.codesignIdentitySubjectName = codesignIdentity; this.ibtoolModuleFlag = ibtoolModuleFlag.orElse(false); this.ibtoolFlags = ibtoolFlags; bundleBinaryPath = bundleRoot.resolve(binaryPath); hasBinary = binary.isPresent() && binary.get().getSourcePathToOutput() != null; if (needCodeSign() && !adHocCodeSignIsSufficient()) { this.provisioningProfileStore = provisioningProfileStore; this.codeSignIdentitiesSupplier = codeSignIdentityStore.getIdentitiesSupplier(); } else { this.provisioningProfileStore = ProvisioningProfileStore.empty(); this.codeSignIdentitiesSupplier = Suppliers.ofInstance(ImmutableList.of()); } this.codesignAllocatePath = appleCxxPlatform.getCodesignAllocate(); this.codesign = appleCxxPlatform.getCodesignProvider().resolve(graphBuilder); this.swiftStdlibTool = appleCxxPlatform.getSwiftPlatform().isPresent() ? appleCxxPlatform.getSwiftPlatform().get().getSwiftStdlibTool() : Optional.empty(); this.codesignTimeout = codesignTimeout; this.copySwiftStdlibToFrameworks = copySwiftStdlibToFrameworks; } public static String getBinaryName(BuildTarget buildTarget, Optional<String> productName) { return productName.orElse(buildTarget.getShortName()); } public static Path getBundleRoot( ProjectFilesystem filesystem, BuildTarget buildTarget, String binaryName, String extension) { return BuildTargetPaths.getGenPath(filesystem, buildTarget, "%s") .resolve(binaryName + "." + extension); } public String getExtension() { return extension; } @Override public SourcePath getSourcePathToOutput() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), bundleRoot); } public Path getInfoPlistPath() { return getMetadataPath().resolve("Info.plist"); } public Path getUnzippedOutputFilePathToBinary() { return this.binaryPath; } private Path getMetadataPath() { return bundleRoot.resolve(destinations.getMetadataPath()); } public String getPlatformName() { return platform.getName(); } public Optional<BuildRule> getBinary() { return binary; } public Optional<AppleDsym> getAppleDsym() { return appleDsym; } public boolean isLegacyWatchApp() { return extension.equals(AppleBundleExtension.APP.toFileExtension()) && binary.isPresent() && binary .get() .getBuildTarget() .getFlavors() .contains(AppleBinaryDescription.LEGACY_WATCH_FLAVOR); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { ImmutableList.Builder<Step> stepsBuilder = ImmutableList.builder(); stepsBuilder.addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), bundleRoot))); Path resourcesDestinationPath = bundleRoot.resolve(this.destinations.getResourcesPath()); if (assetCatalog.isPresent()) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); Path bundleDir = assetCatalog.get().getOutputDir(); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), bundleDir, resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } if (coreDataModel.isPresent()) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context .getSourcePathResolver() .getRelativePath(coreDataModel.get().getSourcePathToOutput()), resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } if (sceneKitAssets.isPresent()) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context .getSourcePathResolver() .getRelativePath(sceneKitAssets.get().getSourcePathToOutput()), resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } Path metadataPath = getMetadataPath(); Path infoPlistInputPath = context.getSourcePathResolver().getAbsolutePath(infoPlist); Path infoPlistSubstitutionTempPath = BuildTargetPaths.getScratchPath(getProjectFilesystem(), getBuildTarget(), "%s.plist"); Path infoPlistOutputPath = metadataPath.resolve("Info.plist"); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), metadataPath))); if (needsPkgInfoFile()) { // TODO(bhamiltoncx): This is only appropriate for .app bundles. stepsBuilder.add( new WriteFileStep( getProjectFilesystem(), "APPLWRUN", metadataPath.resolve("PkgInfo"), /* executable */ false)); } stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), infoPlistSubstitutionTempPath.getParent())), new FindAndReplaceStep( getProjectFilesystem(), infoPlistInputPath, infoPlistSubstitutionTempPath, InfoPlistSubstitution.createVariableExpansionFunction( withDefaults( infoPlistSubstitutions, ImmutableMap.of( "EXECUTABLE_NAME", binaryName, "PRODUCT_NAME", binaryName)))), new PlistProcessStep( getProjectFilesystem(), infoPlistSubstitutionTempPath, assetCatalog.map(AppleAssetCatalog::getOutputPlist), infoPlistOutputPath, getInfoPlistAdditionalKeys(), getInfoPlistOverrideKeys(), PlistProcessStep.OutputFormat.BINARY)); if (hasBinary) { appendCopyBinarySteps(stepsBuilder, context); appendCopyDsymStep(stepsBuilder, buildableContext, context); } if (!Iterables.isEmpty( Iterables.concat( resources.getResourceDirs(), resources.getDirsContainingResourceDirs(), resources.getResourceFiles()))) { if (verifyResources) { verifyResourceConflicts(resources, context.getSourcePathResolver()); } stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resourcesDestinationPath))); for (SourcePath dir : resources.getResourceDirs()) { stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context.getSourcePathResolver().getAbsolutePath(dir), resourcesDestinationPath, CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); } for (SourcePath dir : resources.getDirsContainingResourceDirs()) { stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), context.getSourcePathResolver().getAbsolutePath(dir), resourcesDestinationPath, CopyStep.DirectoryMode.CONTENTS_ONLY)); } for (SourcePath file : resources.getResourceFiles()) { Path resolvedFilePath = context.getSourcePathResolver().getAbsolutePath(file); Path destinationPath = resourcesDestinationPath.resolve(resolvedFilePath.getFileName()); addResourceProcessingSteps( context.getSourcePathResolver(), resolvedFilePath, destinationPath, stepsBuilder); } } ImmutableList.Builder<Path> codeSignOnCopyPathsBuilder = ImmutableList.builder(); addStepsToCopyExtensionBundlesDependencies(context, stepsBuilder, codeSignOnCopyPathsBuilder); for (SourcePath variantSourcePath : resources.getResourceVariantFiles()) { Path variantFilePath = context.getSourcePathResolver().getAbsolutePath(variantSourcePath); Path variantDirectory = variantFilePath.getParent(); if (variantDirectory == null || !variantDirectory.toString().endsWith(".lproj")) { throw new HumanReadableException( "Variant files have to be in a directory with name ending in '.lproj', " + "but '%s' is not.", variantFilePath); } Path bundleVariantDestinationPath = resourcesDestinationPath.resolve(variantDirectory.getFileName()); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), bundleVariantDestinationPath))); Path destinationPath = bundleVariantDestinationPath.resolve(variantFilePath.getFileName()); addResourceProcessingSteps( context.getSourcePathResolver(), variantFilePath, destinationPath, stepsBuilder); } if (!frameworks.isEmpty()) { Path frameworksDestinationPath = bundleRoot.resolve(this.destinations.getFrameworksPath()); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), frameworksDestinationPath))); for (SourcePath framework : frameworks) { Path srcPath = context.getSourcePathResolver().getAbsolutePath(framework); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), srcPath, frameworksDestinationPath, CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); codeSignOnCopyPathsBuilder.add(frameworksDestinationPath.resolve(srcPath.getFileName())); } } if (needCodeSign()) { Optional<Path> signingEntitlementsTempPath; Supplier<CodeSignIdentity> codeSignIdentitySupplier; if (adHocCodeSignIsSufficient()) { signingEntitlementsTempPath = Optional.empty(); CodeSignIdentity identity = codesignIdentitySubjectName .map(id -> CodeSignIdentity.ofAdhocSignedWithSubjectCommonName(id)) .orElse(CodeSignIdentity.AD_HOC); codeSignIdentitySupplier = () -> identity; } else { // Copy the .mobileprovision file if the platform requires it, and sign the executable. Optional<Path> entitlementsPlist = Optional.empty(); // Try to use the entitlements file specified in the bundle's binary first. entitlementsPlist = entitlementsFile.map(p -> context.getSourcePathResolver().getAbsolutePath(p)); // Fall back to getting CODE_SIGN_ENTITLEMENTS from info_plist_substitutions. if (!entitlementsPlist.isPresent()) { Path srcRoot = getProjectFilesystem().getRootPath().resolve(getBuildTarget().getBasePath()); Optional<String> entitlementsPlistString = InfoPlistSubstitution.getVariableExpansionForPlatform( CODE_SIGN_ENTITLEMENTS, platform.getName(), withDefaults( infoPlistSubstitutions, ImmutableMap.of( "SOURCE_ROOT", srcRoot.toString(), "SRCROOT", srcRoot.toString()))); entitlementsPlist = entitlementsPlistString.map( entitlementsPlistName -> { ProjectFilesystem filesystem = getProjectFilesystem(); Path originalEntitlementsPlist = srcRoot.resolve(Paths.get(entitlementsPlistName)); Path entitlementsPlistWithSubstitutions = BuildTargetPaths.getScratchPath( filesystem, getBuildTarget(), "%s-Entitlements.plist"); stepsBuilder.add( new FindAndReplaceStep( filesystem, originalEntitlementsPlist, entitlementsPlistWithSubstitutions, InfoPlistSubstitution.createVariableExpansionFunction( infoPlistSubstitutions))); return filesystem.resolve(entitlementsPlistWithSubstitutions); }); } signingEntitlementsTempPath = Optional.of( BuildTargetPaths.getScratchPath( getProjectFilesystem(), getBuildTarget(), "%s.xcent")); Path dryRunResultPath = bundleRoot.resolve(PP_DRY_RUN_RESULT_FILE); ProvisioningProfileCopyStep provisioningProfileCopyStep = new ProvisioningProfileCopyStep( getProjectFilesystem(), infoPlistOutputPath, platform, Optional.empty(), // Provisioning profile UUID -- find automatically. entitlementsPlist, provisioningProfileStore, resourcesDestinationPath.resolve("embedded.mobileprovision"), dryRunCodeSigning ? bundleRoot.resolve(CODE_SIGN_DRY_RUN_ENTITLEMENTS_FILE) : signingEntitlementsTempPath.get(), codeSignIdentitiesSupplier, dryRunCodeSigning ? Optional.of(dryRunResultPath) : Optional.empty()); stepsBuilder.add(provisioningProfileCopyStep); codeSignIdentitySupplier = () -> { // Using getUnchecked here because the previous step should already throw if exception // occurred, and this supplier would never be evaluated. Optional<ProvisioningProfileMetadata> selectedProfile = Futures.getUnchecked( provisioningProfileCopyStep.getSelectedProvisioningProfileFuture()); if (!selectedProfile.isPresent()) { // This should only happen in dry-run codesign mode (since otherwise an exception // would have been thrown already.) Still, we need to return *something*. Preconditions.checkState(dryRunCodeSigning); return CodeSignIdentity.AD_HOC; } ImmutableSet<HashCode> fingerprints = selectedProfile.get().getDeveloperCertificateFingerprints(); if (fingerprints.isEmpty()) { // No constraints, pick an arbitrary identity. // If no identities are available, use an ad-hoc identity. return Iterables.getFirst( codeSignIdentitiesSupplier.get(), CodeSignIdentity.AD_HOC); } for (CodeSignIdentity identity : codeSignIdentitiesSupplier.get()) { if (identity.getFingerprint().isPresent() && fingerprints.contains(identity.getFingerprint().get())) { return identity; } } throw new HumanReadableException( "No code sign identity available for provisioning profile: %s\n" + "Profile requires an identity with one of the following SHA1 fingerprints " + "available in your keychain: \n %s", selectedProfile.get().getProfilePath(), Joiner.on("\n ").join(fingerprints)); }; } addSwiftStdlibStepIfNeeded( context.getSourcePathResolver(), bundleRoot.resolve(destinations.getFrameworksPath()), dryRunCodeSigning ? Optional.empty() : Optional.of(codeSignIdentitySupplier), stepsBuilder, false /* is for packaging? */); for (BuildRule extraBinary : extraBinaries) { Path outputPath = getBundleBinaryPathForBuildRule(extraBinary); codeSignOnCopyPathsBuilder.add(outputPath); } for (Path codeSignOnCopyPath : codeSignOnCopyPathsBuilder.build()) { stepsBuilder.add( new CodeSignStep( getProjectFilesystem(), context.getSourcePathResolver(), codeSignOnCopyPath, Optional.empty(), codeSignIdentitySupplier, codesign, codesignAllocatePath, dryRunCodeSigning ? Optional.of(codeSignOnCopyPath.resolve(CODE_SIGN_DRY_RUN_ARGS_FILE)) : Optional.empty(), codesignFlags, codesignTimeout)); } stepsBuilder.add( new CodeSignStep( getProjectFilesystem(), context.getSourcePathResolver(), bundleRoot, signingEntitlementsTempPath, codeSignIdentitySupplier, codesign, codesignAllocatePath, dryRunCodeSigning ? Optional.of(bundleRoot.resolve(CODE_SIGN_DRY_RUN_ARGS_FILE)) : Optional.empty(), codesignFlags, codesignTimeout)); } else { addSwiftStdlibStepIfNeeded( context.getSourcePathResolver(), bundleRoot.resolve(destinations.getFrameworksPath()), Optional.empty(), stepsBuilder, false /* is for packaging? */); } // Ensure the bundle directory is archived so we can fetch it later. buildableContext.recordArtifact( context.getSourcePathResolver().getRelativePath(getSourcePathToOutput())); return stepsBuilder.build(); } private void verifyResourceConflicts( AppleBundleResources resources, SourcePathResolver resolver) { // Ensure there are no resources that will overwrite each other // TODO: handle ResourceDirsContainingResourceDirs Set<Path> resourcePaths = new HashSet<>(); for (SourcePath path : Iterables.concat(resources.getResourceDirs(), resources.getResourceFiles())) { Path pathInBundle = resolver.getRelativePath(path).getFileName(); if (resourcePaths.contains(pathInBundle)) { throw new HumanReadableException( "Bundle contains multiple resources with path %s", pathInBundle); } else { resourcePaths.add(pathInBundle); } } } private boolean needsPkgInfoFile() { return !(extension.equals(AppleBundleExtension.XPC.toFileExtension()) || extension.equals(AppleBundleExtension.QLGENERATOR.toFileExtension())); } private void appendCopyBinarySteps( ImmutableList.Builder<Step> stepsBuilder, BuildContext context) { Preconditions.checkArgument(hasBinary); Path binaryOutputPath = context .getSourcePathResolver() .getAbsolutePath(Objects.requireNonNull(binary.get().getSourcePathToOutput())); ImmutableMap.Builder<Path, Path> binariesBuilder = ImmutableMap.builder(); binariesBuilder.put(bundleBinaryPath, binaryOutputPath); for (BuildRule extraBinary : extraBinaries) { Path outputPath = context.getSourcePathResolver().getRelativePath(extraBinary.getSourcePathToOutput()); Path bundlePath = getBundleBinaryPathForBuildRule(extraBinary); binariesBuilder.put(bundlePath, outputPath); } copyBinariesIntoBundle(stepsBuilder, context, binariesBuilder.build()); copyAnotherCopyOfWatchKitStub(stepsBuilder, context, binaryOutputPath); } private Path getBundleBinaryPathForBuildRule(BuildRule buildRule) { BuildTarget unflavoredTarget = buildRule.getBuildTarget().withFlavors(); String binaryName = getBinaryName(unflavoredTarget, Optional.empty()); Path pathRelativeToBundleRoot = destinations.getExecutablesPath().resolve(binaryName); return bundleRoot.resolve(pathRelativeToBundleRoot); } /** * @param binariesMap A map from destination to source. Destination is deliberately used as a key * prevent multiple sources overwriting the same destination. */ private void copyBinariesIntoBundle( ImmutableList.Builder<Step> stepsBuilder, BuildContext context, ImmutableMap<Path, Path> binariesMap) { stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), bundleRoot.resolve(this.destinations.getExecutablesPath())))); binariesMap.forEach( (binaryBundlePath, binaryOutputPath) -> { stepsBuilder.add( CopyStep.forFile(getProjectFilesystem(), binaryOutputPath, binaryBundlePath)); }); } private void copyAnotherCopyOfWatchKitStub( ImmutableList.Builder<Step> stepsBuilder, BuildContext context, Path binaryOutputPath) { if ((isLegacyWatchApp() || platform.getName().contains("watch")) && binary.get() instanceof WriteFile) { Path watchKitStubDir = bundleRoot.resolve("_WatchKitStub"); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), watchKitStubDir)), CopyStep.forFile( getProjectFilesystem(), binaryOutputPath, watchKitStubDir.resolve("WK"))); } } private void appendCopyDsymStep( ImmutableList.Builder<Step> stepsBuilder, BuildableContext buildableContext, BuildContext buildContext) { if (appleDsym.isPresent()) { stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), buildContext .getSourcePathResolver() .getAbsolutePath(appleDsym.get().getSourcePathToOutput()), bundleRoot.getParent(), CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); appendDsymRenameStepToMatchBundleName(stepsBuilder, buildableContext, buildContext); } } private void appendDsymRenameStepToMatchBundleName( ImmutableList.Builder<Step> stepsBuilder, BuildableContext buildableContext, BuildContext buildContext) { Preconditions.checkArgument(hasBinary && appleDsym.isPresent()); // rename dSYM bundle to match bundle name Path dsymPath = buildContext .getSourcePathResolver() .getRelativePath(appleDsym.get().getSourcePathToOutput()); Path dsymSourcePath = bundleRoot.getParent().resolve(dsymPath.getFileName()); Path dsymDestinationPath = bundleRoot .getParent() .resolve(bundleRoot.getFileName() + "." + AppleBundleExtension.DSYM.toFileExtension()); stepsBuilder.add( RmStep.of( BuildCellRelativePath.fromCellRelativePath( buildContext.getBuildCellRootPath(), getProjectFilesystem(), dsymDestinationPath)) .withRecursive(true)); stepsBuilder.add(new MoveStep(getProjectFilesystem(), dsymSourcePath, dsymDestinationPath)); String dwarfFilename = AppleDsym.getDwarfFilenameForDsymTarget(appleDsym.get().getBuildTarget()); // rename DWARF file inside dSYM bundle to match bundle name Path dwarfFolder = dsymDestinationPath.resolve(AppleDsym.DSYM_DWARF_FILE_FOLDER); Path dwarfSourcePath = dwarfFolder.resolve(dwarfFilename); Path dwarfDestinationPath = dwarfFolder.resolve(MorePaths.getNameWithoutExtension(bundleRoot)); stepsBuilder.add(new MoveStep(getProjectFilesystem(), dwarfSourcePath, dwarfDestinationPath)); // record dSYM so we can fetch it from cache buildableContext.recordArtifact(dsymDestinationPath); } private void addStepsToCopyExtensionBundlesDependencies( BuildContext context, ImmutableList.Builder<Step> stepsBuilder, ImmutableList.Builder<Path> codeSignOnCopyPathsBuilder) { for (Map.Entry<SourcePath, String> entry : extensionBundlePaths.entrySet()) { Path srcPath = context.getSourcePathResolver().getAbsolutePath(entry.getKey()); Path destPath = bundleRoot.resolve(entry.getValue()); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), destPath))); stepsBuilder.add( CopyStep.forDirectory( getProjectFilesystem(), srcPath, destPath, CopyStep.DirectoryMode.DIRECTORY_AND_CONTENTS)); if (srcPath.toString().endsWith("." + FRAMEWORK_EXTENSION)) { codeSignOnCopyPathsBuilder.add(destPath.resolve(srcPath.getFileName())); } } } public static ImmutableMap<String, String> withDefaults( ImmutableMap<String, String> map, ImmutableMap<String, String> defaults) { ImmutableMap.Builder<String, String> builder = ImmutableMap.<String, String>builder().putAll(map); for (ImmutableMap.Entry<String, String> entry : defaults.entrySet()) { if (!map.containsKey(entry.getKey())) { builder = builder.put(entry.getKey(), entry.getValue()); } } return builder.build(); } private boolean needsLSRequiresIPhoneOSInfoPlistKeyOnMac() { return !extension.equals(AppleBundleExtension.XPC.toFileExtension()); } private ImmutableMap<String, NSObject> getInfoPlistOverrideKeys() { ImmutableMap.Builder<String, NSObject> keys = ImmutableMap.builder(); if (platform.getType() == ApplePlatformType.MAC) { if (needsLSRequiresIPhoneOSInfoPlistKeyOnMac()) { keys.put("LSRequiresIPhoneOS", new NSNumber(false)); } } else if (!platform.getType().isWatch() && !isLegacyWatchApp()) { keys.put("LSRequiresIPhoneOS", new NSNumber(true)); } return keys.build(); } private boolean needsAppInfoPlistKeysOnMac() { // XPC bundles on macOS don't require app-specific keys // (which also confuses Finder in displaying the XPC bundles as apps) return !extension.equals(AppleBundleExtension.XPC.toFileExtension()); } private ImmutableMap<String, NSObject> getInfoPlistAdditionalKeys() { ImmutableMap.Builder<String, NSObject> keys = ImmutableMap.builder(); switch (platform.getType()) { case MAC: if (needsAppInfoPlistKeysOnMac()) { keys.put("NSHighResolutionCapable", new NSNumber(true)); keys.put("NSSupportsAutomaticGraphicsSwitching", new NSNumber(true)); } keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("MacOSX"))); break; case IOS_DEVICE: keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("iPhoneOS"))); break; case IOS_SIMULATOR: keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("iPhoneSimulator"))); break; case WATCH_DEVICE: if (!isLegacyWatchApp()) { keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("WatchOS"))); } break; case WATCH_SIMULATOR: if (!isLegacyWatchApp()) { keys.put("CFBundleSupportedPlatforms", new NSArray(new NSString("WatchSimulator"))); } break; case TV_DEVICE: case TV_SIMULATOR: case UNKNOWN: break; } keys.put("DTPlatformName", new NSString(platform.getName())); keys.put("DTPlatformVersion", new NSString(sdkVersion)); keys.put("DTSDKName", new NSString(sdkName + sdkVersion)); keys.put("MinimumOSVersion", new NSString(minOSVersion)); if (platformBuildVersion.isPresent()) { keys.put("DTPlatformBuild", new NSString(platformBuildVersion.get())); keys.put("DTSDKBuild", new NSString(platformBuildVersion.get())); } if (xcodeBuildVersion.isPresent()) { keys.put("DTXcodeBuild", new NSString(xcodeBuildVersion.get())); } if (xcodeVersion.isPresent()) { keys.put("DTXcode", new NSString(xcodeVersion.get())); } return keys.build(); } public void addSwiftStdlibStepIfNeeded( SourcePathResolver resolver, Path destinationPath, Optional<Supplier<CodeSignIdentity>> codeSignIdentitySupplier, ImmutableList.Builder<Step> stepsBuilder, boolean isForPackaging) { // It's apparently safe to run this even on a non-swift bundle (in that case, no libs // are copied over). boolean shouldCopySwiftStdlib = !extension.equals(AppleBundleExtension.APPEX.toFileExtension()) && (!extension.equals(AppleBundleExtension.FRAMEWORK.toFileExtension()) || copySwiftStdlibToFrameworks); if (swiftStdlibTool.isPresent() && shouldCopySwiftStdlib) { ImmutableList.Builder<String> swiftStdlibCommand = ImmutableList.builder(); swiftStdlibCommand.addAll(swiftStdlibTool.get().getCommandPrefix(resolver)); swiftStdlibCommand.add( "--scan-executable", bundleBinaryPath.toString(), "--scan-folder", bundleRoot.resolve(this.destinations.getFrameworksPath()).toString(), "--scan-folder", bundleRoot.resolve(destinations.getPlugInsPath()).toString()); String tempDirPattern = isForPackaging ? "__swift_packaging_temp__%s" : "__swift_temp__%s"; stepsBuilder.add( new SwiftStdlibStep( getProjectFilesystem().getRootPath(), BuildTargetPaths.getScratchPath( getProjectFilesystem(), getBuildTarget(), tempDirPattern), this.sdkPath, destinationPath, swiftStdlibCommand.build(), codeSignIdentitySupplier)); } } private void addStoryboardProcessingSteps( SourcePathResolver resolver, Path sourcePath, Path destinationPath, ImmutableList.Builder<Step> stepsBuilder) { ImmutableList<String> modifiedFlags = ImmutableList.<String>builder().addAll(BASE_IBTOOL_FLAGS).addAll(ibtoolFlags).build(); if (platform.getName().contains("watch") || isLegacyWatchApp()) { LOG.debug( "Compiling storyboard %s to storyboardc %s and linking", sourcePath, destinationPath); Path compiledStoryboardPath = BuildTargetPaths.getScratchPath( getProjectFilesystem(), getBuildTarget(), "%s.storyboardc"); stepsBuilder.add( new IbtoolStep( getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleFlag ? Optional.of(binaryName) : Optional.empty(), ImmutableList.<String>builder() .addAll(modifiedFlags) .add("--target-device", "watch", "--compile") .build(), sourcePath, compiledStoryboardPath)); stepsBuilder.add( new IbtoolStep( getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleFlag ? Optional.of(binaryName) : Optional.empty(), ImmutableList.<String>builder() .addAll(modifiedFlags) .add("--target-device", "watch", "--link") .build(), compiledStoryboardPath, destinationPath.getParent())); } else { LOG.debug("Compiling storyboard %s to storyboardc %s", sourcePath, destinationPath); String compiledStoryboardFilename = Files.getNameWithoutExtension(destinationPath.toString()) + ".storyboardc"; Path compiledStoryboardPath = destinationPath.getParent().resolve(compiledStoryboardFilename); stepsBuilder.add( new IbtoolStep( getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleFlag ? Optional.of(binaryName) : Optional.empty(), ImmutableList.<String>builder().addAll(modifiedFlags).add("--compile").build(), sourcePath, compiledStoryboardPath)); } } private void addResourceProcessingSteps( SourcePathResolver resolver, Path sourcePath, Path destinationPath, ImmutableList.Builder<Step> stepsBuilder) { String sourcePathExtension = Files.getFileExtension(sourcePath.toString()).toLowerCase(Locale.US); switch (sourcePathExtension) { case "plist": case "stringsdict": LOG.debug("Converting plist %s to binary plist %s", sourcePath, destinationPath); stepsBuilder.add( new PlistProcessStep( getProjectFilesystem(), sourcePath, Optional.empty(), destinationPath, ImmutableMap.of(), ImmutableMap.of(), PlistProcessStep.OutputFormat.BINARY)); break; case "storyboard": addStoryboardProcessingSteps(resolver, sourcePath, destinationPath, stepsBuilder); break; case "xib": String compiledNibFilename = Files.getNameWithoutExtension(destinationPath.toString()) + ".nib"; Path compiledNibPath = destinationPath.getParent().resolve(compiledNibFilename); LOG.debug("Compiling XIB %s to NIB %s", sourcePath, destinationPath); stepsBuilder.add( new IbtoolStep( getProjectFilesystem(), ibtool.getEnvironment(resolver), ibtool.getCommandPrefix(resolver), ibtoolModuleFlag ? Optional.of(binaryName) : Optional.empty(), ImmutableList.<String>builder() .addAll(BASE_IBTOOL_FLAGS) .addAll(ibtoolFlags) .addAll(ImmutableList.of("--compile")) .build(), sourcePath, compiledNibPath)); break; default: stepsBuilder.add(CopyStep.forFile(getProjectFilesystem(), sourcePath, destinationPath)); break; } } @Override public boolean isTestedBy(BuildTarget testRule) { if (tests.contains(testRule)) { return true; } if (binary.isPresent()) { BuildRule binaryRule = binary.get(); if (binaryRule instanceof NativeTestable) { return ((NativeTestable) binaryRule).isTestedBy(testRule); } } return false; } @Override public CxxPreprocessorInput getPrivateCxxPreprocessorInput( CxxPlatform cxxPlatform, ActionGraphBuilder graphBuilder) { if (binary.isPresent()) { BuildRule binaryRule = binary.get(); if (binaryRule instanceof NativeTestable) { return ((NativeTestable) binaryRule) .getPrivateCxxPreprocessorInput(cxxPlatform, graphBuilder); } } return CxxPreprocessorInput.of(); } private boolean adHocCodeSignIsSufficient() { return ApplePlatform.adHocCodeSignIsSufficient(platform.getName()); } // .framework bundles will be code-signed when they're copied into the containing bundle. private boolean needCodeSign() { return binary.isPresent() && ApplePlatform.needsCodeSign(platform.getName()) && !extension.equals(FRAMEWORK_EXTENSION); } @Override public BuildRule getBinaryBuildRule() { return binary.get(); } @Override public Stream<BuildTarget> getRuntimeDeps(SourcePathRuleFinder ruleFinder) { // When "running" an app bundle, ensure debug symbols are available. if (binary.get() instanceof HasAppleDebugSymbolDeps) { List<BuildRule> symbolDeps = ((HasAppleDebugSymbolDeps) binary.get()) .getAppleDebugSymbolDeps() .collect(Collectors.toList()); if (!symbolDeps.isEmpty()) { return Stream.concat(Stream.of(binary.get()), symbolDeps.stream()) .map(BuildRule::getBuildTarget); } } return Stream.empty(); } @Override public boolean isCacheable() { return cacheable; } @Override public Tool getExecutableCommand() { return new CommandTool.Builder() .addArg(SourcePathArg.of(PathSourcePath.of(getProjectFilesystem(), bundleBinaryPath))) .build(); } }
package cn.howardliu.gear.commons.utils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Collections; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; /** * <br>created at 17-5-9 * * @author liuxh * @version 1.0.0 * @since 1.0.0 */ public final class IntrospectionUtils { private static final Logger logger = LoggerFactory.getLogger(IntrospectionUtils.class); private static final Map<Class<?>, Method[]> objectMethods = Collections.synchronizedMap(new HashMap<Class<?>, Method[]>()); /** * Find a method with the right name if found, call the method (if param is * int or boolean we'll convert value to the right type before) - that means * you can have setDebug(1). * * @param o the object to set a property on * @param name the property name * @param value the property value * @return {@code true} if operation was successfully */ public static boolean setProperty(Object o, String name, String value) { return setProperty(o, name, value, true); } public static boolean setProperty(Object o, String name, String value, boolean invokeSetProperty) { if (logger.isDebugEnabled()) { logger.debug("IntrospectionUtils: setProperty({}, {}, {}, {})", o, name, value, invokeSetProperty); } String setter = "set" + capitalize(name); try { Method[] methods = findMethods(o.getClass()); Method setPropertyMethodVoid = null; Method setPropertyMethodBool = null; for (Method method : methods) { Class<?>[] parameterTypes = method.getParameterTypes(); if (setter.equals(method.getName()) && parameterTypes.length == 1 && "java.lang.String".equals(parameterTypes[0].getName())) { method.invoke(o, value); return true; } } // try a setFoo(int) or setFoo(boolean) for (Method method : methods) { boolean ok = true; if (setter.equals(method.getName()) && method.getParameterTypes().length == 1) { Class<?> paramType = method.getParameterTypes()[0]; Object[] params = new Object[1]; switch (paramType.getName()) { case "java.lang.Integer": case "int": { try { params[0] = Integer.valueOf(value); } catch (NumberFormatException e) { ok = false; } break; } case "java.lang.Long": case "long": { try { params[0] = Long.valueOf(value); } catch (NumberFormatException e) { ok = false; } break; } case "java.lang.Boolean": case "boolean": { params[0] = Boolean.valueOf(value); break; } default: { if (logger.isDebugEnabled()) { logger.debug("IntrospectionUtils: Unknown type {}", paramType.getName()); } } } if (ok) { method.invoke(o, params); return true; } } if ("setProperty".equals(method.getName())) { if (method.getReturnType() == Boolean.TYPE) { setPropertyMethodBool = method; } else { setPropertyMethodVoid = method; } } } // no setXXX found, try a setProperty("name", "value") if (invokeSetProperty && (setPropertyMethodBool != null || setPropertyMethodVoid != null)) { Object[] params = new Object[2]; params[0] = name; params[1] = value; if (setPropertyMethodBool != null) { try { return (Boolean) setPropertyMethodBool.invoke(o, params); } catch (IllegalArgumentException e) { if (setPropertyMethodVoid != null) { setPropertyMethodVoid.invoke(o, params); return true; } else { throw e; } } } else { setPropertyMethodVoid.invoke(o, params); return true; } } } catch (IllegalArgumentException e) { logger.warn("IAE {} {} {}", o, name, value, e); } catch (IllegalAccessException e) { logger.warn("IntrospectionUtils: IllegalAccessException for {} {}={}", o, name, value, e); } catch (InvocationTargetException e) { logger.warn("IntrospectionUtils: InvocationTargetException for {} {}={}", o, name, value, e); } return false; } public static Object getProperty(Object o, String name) { String getter = "get" + capitalize(name); String isGetter = "is" + capitalize(name); try { Method[] methods = findMethods(o.getClass()); Method getPropertyMethod = null; for (Method method : methods) { Class<?>[] parameterTypes = method.getParameterTypes(); if (getter.equals(method.getName()) && parameterTypes.length == 0) { return method.invoke(o, (Object[]) null); } if (isGetter.equals(method.getName()) && parameterTypes.length == 0) { return method.invoke(o, (Object[]) null); } if ("getProperty".equals(method.getName())) { getPropertyMethod = method; } } // no getXXX found, try a getProperty("name") if (getPropertyMethod != null) { Object[] params = new Object[1]; params[0] = name; return getPropertyMethod.invoke(o, params); } } catch (IllegalArgumentException ex2) { logger.warn("IAE {} {}", o, name, ex2); } catch (IllegalAccessException e) { logger.warn("IntrospectionUtils: IllegalAccessException for {} {}", o, name, e); } catch (InvocationTargetException e) { if (e.getCause() instanceof NullPointerException) { return null; } logger.warn("IntrospectionUtils: InvocationTargetException for {} {}", o, name, e); } return null; } /** * Replace ${NAME} with the property value. * * @param value the value * @param staticProp replacement properties * @param dynamicProp replacement properties * @return the replacement value */ public static String replaceProperties(String value, Hashtable<Object, Object> staticProp, PropertySource dynamicProp[]) { if (value.indexOf('$') < 0) { return value; } StringBuilder sb = new StringBuilder(); int prev = 0; int pos; while ((pos = value.indexOf('$', prev)) >= 0) { if (pos > 0) { sb.append(value.substring(prev, pos)); } if (pos == (value.length() - 1)) { sb.append('$'); prev = pos + 1; } else if (value.charAt(pos + 1) != '{') { sb.append('$'); prev = pos + 1; } else { int endName = value.indexOf('}', pos); if (endName < 0) { sb.append(value.substring(pos)); prev = value.length(); continue; } String n = value.substring(pos + 2, endName); String v = null; if (staticProp != null) { v = (String) staticProp.get(n); } if (v == null && dynamicProp != null) { for (PropertySource source : dynamicProp) { v = source.getProperty(n); if (v != null) { break; } } } if (v == null) { v = "${" + n + "}"; } sb.append(v); prev = endName + 1; } } if (prev < value.length()) { sb.append(value.substring(prev)); } return sb.toString(); } /** * Reverse of {@link java.beans.Introspector#decapitalize(java.lang.String)}. * * @param name The string to be capitalized. * @return The capitalized version of the string. */ public static String capitalize(String name) { if (name == null || name.length() == 0) { return name; } char chars[] = name.toCharArray(); chars[0] = Character.toUpperCase(chars[0]); return new String(chars); } public static void clear() { objectMethods.clear(); } public static Method[] findMethods(Class<?> c) { Method[] methods = objectMethods.get(c); if (methods != null) { return methods; } methods = c.getMethods(); objectMethods.put(c, methods); return methods; } public static Method findMethod(Class<?> c, String methodName, Class<?>[] params) { Method[] methods = findMethods(c); if (methods == null) { return null; } for (Method method : methods) { if (method.getName().equals(methodName)) { Class<?>[] parameterTypes = method.getParameterTypes(); if ((parameterTypes == null || parameterTypes.length == 0) && (params == null || params.length == 0)) { return method; } if (!(parameterTypes != null && params != null && params.length == parameterTypes.length)) { continue; } boolean found = true; for (int i = 0; i < params.length; i++) { if (params[i] != parameterTypes[i]) { found = false; break; } } if (found) { return method; } } } return null; } public static Object callMethod1(Object target, String methodN, Object param1, String typeParam1, ClassLoader cl) throws Exception { if (target == null || param1 == null) { throw new IllegalArgumentException("IntrospectionUtils: Assert: Illegal params " + target + " " + param1); } if (logger.isDebugEnabled()) { logger.debug("IntrospectionUtils: callMethod {} {} {}", target.getClass().getName(), param1.getClass().getName(), typeParam1); } Class<?>[] params = new Class[1]; if (typeParam1 == null) { params[0] = param1.getClass(); } else { params[0] = cl.loadClass(typeParam1); } Method m = findMethod(target.getClass(), methodN, params); if (m == null) { throw new NoSuchMethodException(target.getClass().getName() + " " + methodN); } return m.invoke(target, param1); } public static Object callMethodN(Object target, String methodN, Object[] params, Class<?>[] typeParams) throws Exception { Method m = findMethod(target.getClass(), methodN, typeParams); if (m == null) { if (logger.isDebugEnabled()) { logger.debug("IntrospectionUtils: Cannot find method {} in {} class {}", methodN, target, target.getClass()); } return null; } Object result = m.invoke(target, params); if (logger.isDebugEnabled()) { logger.debug("IntrospectionUtils: {}.{}({})", target.getClass().getName(), methodN, StringUtils.join(params, ",")); } return result; } public static Object callMethodN(Object target, String methodN, Object[] params, String[] signature) throws Exception { assert signature != null; Class<?>[] typeParams = new Class[signature.length]; for (int i = 0; i < signature.length; i++) { typeParams[i] = Class.forName(signature[i]); } return callMethodN(target, methodN, params, typeParams); } public static <T> T convert(String object, Class<T> paramType) { Object result = null; switch (paramType.getName()) { case "java.lang.String": { result = object; break; } case "java.lang.Integer": case "int": { try { result = Integer.valueOf(object); } catch (NumberFormatException ignore) { } break; } case "java.lang.Boolean": case "boolean": { result = Boolean.valueOf(object); break; } default: { if (logger.isDebugEnabled()) { logger.debug("IntrospectionUtils: Unknown type " + paramType.getName()); } } } if (result == null) { throw new IllegalArgumentException("Cannot convert argument " + object + " to type " + paramType.getName()); } //noinspection unchecked return (T) result; } public static interface PropertySource { public String getProperty(String key); } }
package com.example.msf.msf.Fragments.Admissions; import android.app.ProgressDialog; import android.content.Context; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; import com.example.msf.msf.API.Auth; import com.example.msf.msf.API.BusProvider; import com.example.msf.msf.API.Communicator; import com.example.msf.msf.API.Models.Admission; import com.example.msf.msf.API.ErrorEvent; import com.example.msf.msf.API.Interface; import com.example.msf.msf.API.ServerEvent; import com.example.msf.msf.Utils.DataAdapter; import com.example.msf.msf.HomeActivity; import com.example.msf.msf.LoginActivity; import com.example.msf.msf.R; import com.squareup.otto.Subscribe; import org.json.JSONException; import org.json.JSONObject; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Response; import retrofit.mime.TypedByteArray; /** * A simple {@link Fragment} subclass. * Activities that contain this fragment must implement the * {@link AdmissionInfoFragment.OnFragmentInteractionListener} interface * to handle interaction events. * Use the {@link AdmissionInfoFragment#newInstance} factory method to * create an instance of this fragment. */ public class AdmissionInfoFragment extends Fragment { // TODO: Rename parameter arguments, choose names that match // the fragment initialization parameters, e.g. ARG_ITEM_NUMBER private static final String ARG_PARAM1 = "param1"; private String id; private final String TAG = this.getClass().getSimpleName(); Button edit, delete; TextView patientName, healthCentre, admissionDate, dischargeDate, notes; private Communicator communicator; // Progress Dialog Object ProgressDialog prgDialog; public static String PATIENTINFOFILE = "Patients"; private OnFragmentInteractionListener mListener; public AdmissionInfoFragment() { // Required empty public constructor } /** * Use this factory method to create a new instance of * this fragment using the provided parameters. * * @param param1 Parameter 1. * @return A new instance of fragment AdmissionInfoFragment. */ // TODO: Rename and change types and number of parameters public static AdmissionInfoFragment newInstance(String param1) { AdmissionInfoFragment fragment = new AdmissionInfoFragment(); Bundle args = new Bundle(); args.putString(ARG_PARAM1, param1); fragment.setArguments(args); return fragment; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { id = getArguments().getString(ARG_PARAM1); } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { HomeActivity.navItemIndex = 6; // Inflate the layout for this fragment View view = inflater.inflate(R.layout.fragment_admission_info, container, false); patientName = (TextView) view.findViewById(R.id.patientNameTV); healthCentre = (TextView) view.findViewById(R.id.health_centreTV); admissionDate = (TextView) view.findViewById(R.id.admissionDateTV); dischargeDate = (TextView) view.findViewById(R.id.dischargeDateTV); notes = (TextView) view.findViewById(R.id.notesTV); communicator = new Communicator(); Log.d(TAG, id); // Instantiate Progress Dialog object prgDialog = new ProgressDialog(AdmissionInfoFragment.this.getActivity()); // Set Progress Dialog Text prgDialog.setMessage("Please wait..."); // Set Cancelable as False prgDialog.setCancelable(false); onButtonPressed(id); admissionGet(Long.parseLong(id)); edit = (Button) view.findViewById(R.id.editButton); editListener(); return view; } public void admissionGet(long admissionID){ prgDialog.show(); Interface communicatorInterface = Auth.getInterface(LoginActivity.username, LoginActivity.password); Callback<Admission> callback = new Callback<Admission>() { @Override public void success(Admission serverResponse, Response response2) { String resp = new String(((TypedByteArray) response2.getBody()).getBytes()); try{ JSONObject jsonObject = new JSONObject(resp); String pName = DataAdapter.patientInfo(Long.parseLong(jsonObject.getString("patient")), getActivity()); patientName.setText(pName); Log.d(TAG, "patientName "+jsonObject.getString("patient")); notes.setText(jsonObject.getString("notes")); healthCentre.setText(jsonObject.getString("health_centre")); admissionDate.setText(jsonObject.getString("admission_date")); if (!jsonObject.getString("discharge_date").equals("null")) { dischargeDate.setText(jsonObject.getString("discharge_date")); } } catch (JSONException e){ System.out.print("unsuccessful"); } } @Override public void failure(RetrofitError error) { if(error != null ){ Log.e(TAG, error.getMessage()); error.printStackTrace(); } } }; communicatorInterface.getAdmission(admissionID,callback); prgDialog.hide(); } public void editListener() { edit.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Log.e(TAG, id.toString()); String[] admissionInfo = {patientName.getText().toString(), healthCentre.getText().toString(), admissionDate.getText().toString(), dischargeDate.getText().toString(), notes.getText().toString(),id}; UpdateAdmissionFragment updateAdmissionFragment = new UpdateAdmissionFragment().newInstance(admissionInfo); FragmentManager manager = getActivity().getSupportFragmentManager(); manager.beginTransaction() .replace(R.id.rel_layout_for_frag, updateAdmissionFragment, updateAdmissionFragment.getTag()) .addToBackStack(null) .commit(); } }); } public void onButtonPressed(String data) { if (mListener != null) { mListener.onFragmentInteraction(data); } } @Override public void onAttach(Context context) { super.onAttach(context); if (context instanceof OnFragmentInteractionListener) { mListener = (OnFragmentInteractionListener) context; } else { throw new RuntimeException(context.toString() + " must implement OnFragmentInteractionListener"); } } @Override public void onDetach() { super.onDetach(); mListener = null; } public interface OnFragmentInteractionListener { void onFragmentInteraction(String data); } @Override public void onResume(){ super.onResume(); BusProvider.getInstance().register(this); } @Override public void onPause(){ super.onPause(); BusProvider.getInstance().unregister(this); } @Subscribe public void onServerEvent(ServerEvent serverEvent){ prgDialog.hide(); Toast.makeText(AdmissionInfoFragment.this.getActivity(), "You have successfully deleted a hospital admission", Toast.LENGTH_SHORT).show(); FragmentManager manager = getActivity().getSupportFragmentManager(); manager.popBackStackImmediate(); } @Subscribe public void onErrorEvent(ErrorEvent errorEvent){ prgDialog.hide(); Toast.makeText(AdmissionInfoFragment.this.getActivity(), "" + errorEvent.getErrorMsg(), Toast.LENGTH_SHORT).show(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BseRoomInfo.proto package com.xinqihd.sns.gameserver.proto; public final class XinqiBseRoomInfo { private XinqiBseRoomInfo() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface BseRoomInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // required int32 roomID = 1; boolean hasRoomID(); int getRoomID(); // required int32 roomMode = 2; boolean hasRoomMode(); int getRoomMode(); } public static final class BseRoomInfo extends com.google.protobuf.GeneratedMessage implements BseRoomInfoOrBuilder { // Use BseRoomInfo.newBuilder() to construct. private BseRoomInfo(Builder builder) { super(builder); } private BseRoomInfo(boolean noInit) {} private static final BseRoomInfo defaultInstance; public static BseRoomInfo getDefaultInstance() { return defaultInstance; } public BseRoomInfo getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_fieldAccessorTable; } private int bitField0_; // required int32 roomID = 1; public static final int ROOMID_FIELD_NUMBER = 1; private int roomID_; public boolean hasRoomID() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getRoomID() { return roomID_; } // required int32 roomMode = 2; public static final int ROOMMODE_FIELD_NUMBER = 2; private int roomMode_; public boolean hasRoomMode() { return ((bitField0_ & 0x00000002) == 0x00000002); } public int getRoomMode() { return roomMode_; } private void initFields() { roomID_ = 0; roomMode_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRoomID()) { memoizedIsInitialized = 0; return false; } if (!hasRoomMode()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, roomID_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, roomMode_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, roomID_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, roomMode_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_fieldAccessorTable; } // Construct using com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); roomID_ = 0; bitField0_ = (bitField0_ & ~0x00000001); roomMode_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo.getDescriptor(); } public com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo getDefaultInstanceForType() { return com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo.getDefaultInstance(); } public com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo build() { com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo buildPartial() { com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo result = new com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.roomID_ = roomID_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.roomMode_ = roomMode_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo) { return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo other) { if (other == com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo.getDefaultInstance()) return this; if (other.hasRoomID()) { setRoomID(other.getRoomID()); } if (other.hasRoomMode()) { setRoomMode(other.getRoomMode()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRoomID()) { return false; } if (!hasRoomMode()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 8: { bitField0_ |= 0x00000001; roomID_ = input.readInt32(); break; } case 16: { bitField0_ |= 0x00000002; roomMode_ = input.readInt32(); break; } } } } private int bitField0_; // required int32 roomID = 1; private int roomID_ ; public boolean hasRoomID() { return ((bitField0_ & 0x00000001) == 0x00000001); } public int getRoomID() { return roomID_; } public Builder setRoomID(int value) { bitField0_ |= 0x00000001; roomID_ = value; onChanged(); return this; } public Builder clearRoomID() { bitField0_ = (bitField0_ & ~0x00000001); roomID_ = 0; onChanged(); return this; } // required int32 roomMode = 2; private int roomMode_ ; public boolean hasRoomMode() { return ((bitField0_ & 0x00000002) == 0x00000002); } public int getRoomMode() { return roomMode_; } public Builder setRoomMode(int value) { bitField0_ |= 0x00000002; roomMode_ = value; onChanged(); return this; } public Builder clearRoomMode() { bitField0_ = (bitField0_ & ~0x00000002); roomMode_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BseRoomInfo) } static { defaultInstance = new BseRoomInfo(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BseRoomInfo) } private static com.google.protobuf.Descriptors.Descriptor internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\021BseRoomInfo.proto\022 com.xinqihd.sns.gam" + "eserver.proto\"/\n\013BseRoomInfo\022\016\n\006roomID\030\001" + " \002(\005\022\020\n\010roomMode\030\002 \002(\005B\022B\020XinqiBseRoomIn" + "fo" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_com_xinqihd_sns_gameserver_proto_BseRoomInfo_descriptor, new java.lang.String[] { "RoomID", "RoomMode", }, com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo.class, com.xinqihd.sns.gameserver.proto.XinqiBseRoomInfo.BseRoomInfo.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.http; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.RuntimeCamelException; import org.apache.camel.component.file.GenericFile; import org.apache.camel.converter.stream.CachedOutputStream; import org.apache.camel.http.common.HttpConstants; import org.apache.camel.http.common.HttpHelper; import org.apache.camel.http.common.HttpOperationFailedException; import org.apache.camel.http.common.HttpProtocolHeaderFilterStrategy; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.util.ExchangeHelper; import org.apache.camel.util.GZIPHelper; import org.apache.camel.util.IOHelper; import org.apache.camel.util.MessageHelper; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.URISupport; import org.apache.camel.util.UnsafeUriCharactersEncoder; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpMethod; import org.apache.commons.httpclient.HttpVersion; import org.apache.commons.httpclient.methods.ByteArrayRequestEntity; import org.apache.commons.httpclient.methods.EntityEnclosingMethod; import org.apache.commons.httpclient.methods.FileRequestEntity; import org.apache.commons.httpclient.methods.InputStreamRequestEntity; import org.apache.commons.httpclient.methods.RequestEntity; import org.apache.commons.httpclient.methods.StringRequestEntity; import org.apache.commons.httpclient.params.HttpMethodParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @version */ public class HttpProducer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(HttpProducer.class); private HttpClient httpClient; private boolean throwException; private boolean transferException; private HeaderFilterStrategy httpProtocolHeaderFilterStrategy = new HttpProtocolHeaderFilterStrategy(); public HttpProducer(HttpEndpoint endpoint) { super(endpoint); this.httpClient = endpoint.createHttpClient(); this.throwException = endpoint.isThrowExceptionOnFailure(); this.transferException = endpoint.isTransferException(); } public void process(Exchange exchange) throws Exception { // if we bridge endpoint then we need to skip matching headers with the HTTP_QUERY to avoid sending // duplicated headers to the receiver, so use this skipRequestHeaders as the list of headers to skip Map<String, Object> skipRequestHeaders = null; if (getEndpoint().isBridgeEndpoint()) { exchange.setProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.TRUE); String queryString = exchange.getIn().getHeader(Exchange.HTTP_QUERY, String.class); if (queryString != null) { skipRequestHeaders = URISupport.parseQuery(queryString, false, true); } // Need to remove the Host key as it should be not used exchange.getIn().getHeaders().remove("host"); } HttpMethod method = createMethod(exchange); Message in = exchange.getIn(); String httpProtocolVersion = in.getHeader(Exchange.HTTP_PROTOCOL_VERSION, String.class); if (httpProtocolVersion != null) { // set the HTTP protocol version HttpMethodParams params = method.getParams(); params.setVersion(HttpVersion.parse(httpProtocolVersion)); } HeaderFilterStrategy strategy = getEndpoint().getHeaderFilterStrategy(); // propagate headers as HTTP headers for (Map.Entry<String, Object> entry : in.getHeaders().entrySet()) { String key = entry.getKey(); Object headerValue = in.getHeader(key); if (headerValue != null) { // use an iterator as there can be multiple values. (must not use a delimiter, and allow empty values) final Iterator<?> it = ObjectHelper.createIterator(headerValue, null, true); // the value to add as request header final List<String> values = new ArrayList<String>(); // if its a multi value then check each value if we can add it and for multi values they // should be combined into a single value while (it.hasNext()) { String value = exchange.getContext().getTypeConverter().convertTo(String.class, it.next()); // we should not add headers for the parameters in the uri if we bridge the endpoint // as then we would duplicate headers on both the endpoint uri, and in HTTP headers as well if (skipRequestHeaders != null && skipRequestHeaders.containsKey(key)) { continue; } if (value != null && strategy != null && !strategy.applyFilterToCamelHeaders(key, value, exchange)) { values.add(value); } } // add the value(s) as a http request header if (values.size() > 0) { // use the default toString of a ArrayList to create in the form [xxx, yyy] // if multi valued, for a single value, then just output the value as is String s = values.size() > 1 ? values.toString() : values.get(0); method.addRequestHeader(key, s); } } } // lets store the result in the output message. try { if (LOG.isDebugEnabled()) { LOG.debug("Executing http {} method: {}", method.getName(), method.getURI().toString()); } int responseCode = executeMethod(method); LOG.debug("Http responseCode: {}", responseCode); if (!throwException) { // if we do not use failed exception then populate response for all response codes populateResponse(exchange, method, in, strategy, responseCode); } else { boolean ok = HttpHelper.isStatusCodeOk(responseCode, getEndpoint().getOkStatusCodeRange()); if (ok) { // only populate response for OK response populateResponse(exchange, method, in, strategy, responseCode); } else { // operation failed so populate exception to throw throw populateHttpOperationFailedException(exchange, method, responseCode); } } } finally { method.releaseConnection(); } } @Override public HttpEndpoint getEndpoint() { return (HttpEndpoint) super.getEndpoint(); } protected void populateResponse(Exchange exchange, HttpMethod method, Message in, HeaderFilterStrategy strategy, int responseCode) throws IOException, ClassNotFoundException { //We just make the out message is not create when extractResponseBody throws exception, Object response = extractResponseBody(method, exchange, getEndpoint().isIgnoreResponseBody()); Message answer = exchange.getOut(); answer.setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode); answer.setHeader(Exchange.HTTP_RESPONSE_TEXT, method.getStatusText()); answer.setBody(response); // propagate HTTP response headers Header[] headers = method.getResponseHeaders(); for (Header header : headers) { String name = header.getName(); String value = header.getValue(); if (name.toLowerCase().equals("content-type")) { name = Exchange.CONTENT_TYPE; exchange.setProperty(Exchange.CHARSET_NAME, IOHelper.getCharsetNameFromContentType(value)); } // use http helper to extract parameter value as it may contain multiple values Object extracted = HttpHelper.extractHttpParameterValue(value); if (strategy != null && !strategy.applyFilterToExternalHeaders(name, extracted, exchange)) { HttpHelper.appendHeader(answer.getHeaders(), name, extracted); } } // endpoint might be configured to copy headers from in to out // to avoid overriding existing headers with old values just // filter the http protocol headers if (getEndpoint().isCopyHeaders()) { MessageHelper.copyHeaders(exchange.getIn(), answer, httpProtocolHeaderFilterStrategy, false); } } protected Exception populateHttpOperationFailedException(Exchange exchange, HttpMethod method, int responseCode) throws IOException, ClassNotFoundException { Exception answer; String uri = method.getURI().toString(); String statusText = method.getStatusLine() != null ? method.getStatusLine().getReasonPhrase() : null; Map<String, String> headers = extractResponseHeaders(method.getResponseHeaders()); Object responseBody = extractResponseBody(method, exchange, getEndpoint().isIgnoreResponseBody()); if (transferException && responseBody != null && responseBody instanceof Exception) { // if the response was a serialized exception then use that return (Exception) responseBody; } // make a defensive copy of the response body in the exception so its detached from the cache String copy = null; if (responseBody != null) { copy = exchange.getContext().getTypeConverter().convertTo(String.class, exchange, responseBody); } if (responseCode >= 300 && responseCode < 400) { String redirectLocation; Header locationHeader = method.getResponseHeader("location"); if (locationHeader != null) { redirectLocation = locationHeader.getValue(); answer = new HttpOperationFailedException(uri, responseCode, statusText, redirectLocation, headers, copy); } else { // no redirect location answer = new HttpOperationFailedException(uri, responseCode, statusText, null, headers, copy); } } else { // internal server error (error code 500) answer = new HttpOperationFailedException(uri, responseCode, statusText, null, headers, copy); } return answer; } /** * Strategy when executing the method (calling the remote server). * * @param method the method to execute * @return the response code * @throws IOException can be thrown */ protected int executeMethod(HttpMethod method) throws IOException { return httpClient.executeMethod(method); } /** * Extracts the response headers * * @param responseHeaders the headers * @return the extracted headers or <tt>null</tt> if no headers existed */ protected static Map<String, String> extractResponseHeaders(Header[] responseHeaders) { if (responseHeaders == null || responseHeaders.length == 0) { return null; } Map<String, String> answer = new HashMap<String, String>(); for (Header header : responseHeaders) { answer.put(header.getName(), header.getValue()); } return answer; } /** * Extracts the response from the method as a InputStream. * * @param method the method that was executed * @param ignoreResponseBody if it is true, camel don't read the response and cached the input stream * @return the response either as a stream, or as a deserialized java object * @throws IOException can be thrown */ protected Object extractResponseBody(HttpMethod method, Exchange exchange, boolean ignoreResponseBody) throws IOException, ClassNotFoundException { InputStream is = method.getResponseBodyAsStream(); if (is == null) { return null; } Header header = method.getResponseHeader(Exchange.CONTENT_ENCODING); String contentEncoding = header != null ? header.getValue() : null; if (!exchange.getProperty(Exchange.SKIP_GZIP_ENCODING, Boolean.FALSE, Boolean.class)) { is = GZIPHelper.uncompressGzip(contentEncoding, is); } // Honor the character encoding String contentType = null; header = method.getResponseHeader("content-type"); if (header != null) { contentType = header.getValue(); // find the charset and set it to the Exchange HttpHelper.setCharsetFromContentType(contentType, exchange); } // if content type is a serialized java object then de-serialize it back to a Java object if (contentType != null && contentType.equals(HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT)) { // only deserialize java if allowed if (getEndpoint().getComponent().isAllowJavaSerializedObject() || getEndpoint().isTransferException()) { return HttpHelper.deserializeJavaObjectFromStream(is, exchange.getContext()); } else { // empty response return null; } } else { InputStream response = null; if (!ignoreResponseBody) { response = doExtractResponseBodyAsStream(is, exchange); } return response; } } private static InputStream doExtractResponseBodyAsStream(InputStream is, Exchange exchange) throws IOException { // As httpclient is using a AutoCloseInputStream, it will be closed when the connection is closed // we need to cache the stream for it. CachedOutputStream cos = null; try { // This CachedOutputStream will not be closed when the exchange is onCompletion cos = new CachedOutputStream(exchange, false); IOHelper.copy(is, cos); // When the InputStream is closed, the CachedOutputStream will be closed return cos.getWrappedInputStream(); } catch (IOException ex) { // try to close the CachedOutputStream when we get the IOException try { cos.close(); } catch (IOException ignore) { //do nothing here } throw ex; } finally { IOHelper.close(is, "Extracting response body", LOG); } } /** * Creates the HttpMethod to use to call the remote server, either its GET or POST. * * @param exchange the exchange * @return the created method as either GET or POST * @throws CamelExchangeException is thrown if error creating RequestEntity */ @SuppressWarnings("deprecation") protected HttpMethod createMethod(Exchange exchange) throws Exception { // creating the url to use takes 2-steps String url = HttpHelper.createURL(exchange, getEndpoint()); URI uri = HttpHelper.createURI(exchange, url, getEndpoint()); // get the url and query string from the uri url = uri.toASCIIString(); String queryString = uri.getRawQuery(); // execute any custom url rewrite String rewriteUrl = HttpHelper.urlRewrite(exchange, url, getEndpoint(), this); if (rewriteUrl != null) { // update url and query string from the rewritten url url = rewriteUrl; uri = new URI(url); // use raw query to have uri decimal encoded which http client requires queryString = uri.getRawQuery(); } // remove query string as http client does not accept that if (url.indexOf('?') != -1) { url = url.substring(0, url.indexOf('?')); } // create http holder objects for the request RequestEntity requestEntity = createRequestEntity(exchange); String methodName = HttpHelper.createMethod(exchange, getEndpoint(), requestEntity != null).name(); HttpMethods methodsToUse = HttpMethods.valueOf(methodName); HttpMethod method = methodsToUse.createMethod(url); if (queryString != null) { // need to encode query string queryString = UnsafeUriCharactersEncoder.encode(queryString); method.setQueryString(queryString); } LOG.trace("Using URL: {} with method: {}", url, method); if (methodsToUse.isEntityEnclosing()) { ((EntityEnclosingMethod) method).setRequestEntity(requestEntity); if (requestEntity != null && requestEntity.getContentType() == null) { LOG.debug("No Content-Type provided for URL: {} with exchange: {}", url, exchange); } } // there must be a host on the method if (method.getHostConfiguration().getHost() == null) { throw new IllegalArgumentException("Invalid uri: " + url + ". If you are forwarding/bridging http endpoints, then enable the bridgeEndpoint option on the endpoint: " + getEndpoint()); } return method; } /** * Creates a holder object for the data to send to the remote server. * * @param exchange the exchange with the IN message with data to send * @return the data holder * @throws CamelExchangeException is thrown if error creating RequestEntity */ protected RequestEntity createRequestEntity(Exchange exchange) throws CamelExchangeException { Message in = exchange.getIn(); if (in.getBody() == null) { return null; } RequestEntity answer = in.getBody(RequestEntity.class); if (answer == null) { try { Object data = in.getBody(); if (data != null) { String contentType = ExchangeHelper.getContentType(exchange); if (contentType != null && HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT.equals(contentType)) { if (!getEndpoint().getComponent().isAllowJavaSerializedObject()) { throw new CamelExchangeException("Content-type " + HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT + " is not allowed", exchange); } // serialized java object Serializable obj = in.getMandatoryBody(Serializable.class); // write object to output stream ByteArrayOutputStream bos = new ByteArrayOutputStream(); HttpHelper.writeObjectToStream(bos, obj); answer = new ByteArrayRequestEntity(bos.toByteArray(), HttpConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); IOHelper.close(bos); } else if (data instanceof File || data instanceof GenericFile) { // file based (could potentially also be a FTP file etc) File file = in.getBody(File.class); if (file != null) { answer = new FileRequestEntity(file, contentType); } } else if (data instanceof String) { // be a bit careful with String as any type can most likely be converted to String // so we only do an instanceof check and accept String if the body is really a String // do not fallback to use the default charset as it can influence the request // (for example application/x-www-form-urlencoded forms being sent) String charset = IOHelper.getCharsetName(exchange, false); answer = new StringRequestEntity((String) data, contentType, charset); } // fallback as input stream if (answer == null) { // force the body as an input stream since this is the fallback InputStream is = in.getMandatoryBody(InputStream.class); answer = new InputStreamRequestEntity(is, contentType); } } } catch (UnsupportedEncodingException e) { throw new CamelExchangeException("Error creating RequestEntity from message body", exchange, e); } catch (IOException e) { throw new CamelExchangeException("Error serializing message body", exchange, e); } } return answer; } public HttpClient getHttpClient() { return httpClient; } public void setHttpClient(HttpClient httpClient) { this.httpClient = httpClient; } }
/* * TouchImageView.java * By: Michael Ortiz * Updated By: Patrick Lackemacher * Updated By: Babay88 * Updated By: @ipsilondev * Updated By: hank-cp * Updated By: singpolyma * ------------------- * Extends Android ImageView to include pinch zooming, panning, fling and double tap zoom. * * GitHub: https://github.com/MikeOrtiz/TouchImageView/blob/master/src/com/ortiz/touch/TouchImageView.java * * LICENSE COPY opyright (c) 2012 Michael Ortiz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE */ package de.e621.rebane.components; import android.annotation.TargetApi; import android.content.Context; import android.content.res.Configuration; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.PointF; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.Build; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.Parcelable; import android.util.AttributeSet; import android.util.Log; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.View; import android.view.animation.AccelerateDecelerateInterpolator; import android.widget.OverScroller; import android.widget.Scroller; public class TouchImageView extends WebImageView { private static final String DEBUG = "DEBUG"; // // SuperMin and SuperMax multipliers. Determine how much the image can be // zoomed below or above the zoom boundaries, before animating back to the // min/max zoom boundary. // private static final float SUPER_MIN_MULTIPLIER = .75f; private static final float SUPER_MAX_MULTIPLIER = 1.25f; // // Scale of image ranges from minScale to maxScale, where minScale == 1 // when the image is stretched to fit view. // private float normalizedScale; // // Matrix applied to image. MSCALE_X and MSCALE_Y should always be equal. // MTRANS_X and MTRANS_Y are the other values used. prevMatrix is the matrix // saved prior to the screen rotating. // private Matrix matrix, prevMatrix; private enum State { NONE, DRAG, ZOOM, FLING, ANIMATE_ZOOM } private State state; private float minScale; private float maxScale; private float superMinScale; private float superMaxScale; private float[] m; private Context context; private Fling fling; private ScaleType mScaleType; private boolean imageRenderedAtLeastOnce; private boolean onDrawReady; private ZoomVariables delayedZoomVariables; // // Size of view and previous view size (ie before rotation) // private int viewWidth, viewHeight, prevViewWidth, prevViewHeight; // // Size of image when it is stretched to fit view. Before and After rotation. // private float matchViewWidth, matchViewHeight, prevMatchViewWidth, prevMatchViewHeight; private ScaleGestureDetector mScaleDetector; private GestureDetector mGestureDetector; private GestureDetector.OnDoubleTapListener doubleTapListener = null; private OnTouchListener userTouchListener = null; private OnTouchImageViewListener touchImageViewListener = null; public TouchImageView(Context context) { super(context); sharedConstructing(context); } public TouchImageView(Context context, AttributeSet attrs) { super(context, attrs); sharedConstructing(context); } public TouchImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); sharedConstructing(context); } private void sharedConstructing(Context context) { super.setClickable(true); this.context = context; mScaleDetector = new ScaleGestureDetector(context, new ScaleListener()); mGestureDetector = new GestureDetector(context, new GestureListener()); matrix = new Matrix(); prevMatrix = new Matrix(); m = new float[9]; normalizedScale = 1; if (mScaleType == null) { mScaleType = ScaleType.FIT_CENTER; } minScale = 1; maxScale = 3; superMinScale = SUPER_MIN_MULTIPLIER * minScale; superMaxScale = SUPER_MAX_MULTIPLIER * maxScale; setImageMatrix(matrix); setScaleType(ScaleType.MATRIX); setState(State.NONE); onDrawReady = false; super.setOnTouchListener(new PrivateOnTouchListener()); } @Override public void setOnTouchListener(View.OnTouchListener l) { userTouchListener = l; } public void setOnTouchImageViewListener(OnTouchImageViewListener l) { touchImageViewListener = l; } public void setOnDoubleTapListener(GestureDetector.OnDoubleTapListener l) { doubleTapListener = l; } @Override public void setImageResource(int resId) { super.setImageResource(resId); savePreviousImageValues(); fitImageToView(); } @Override public void setImageBitmap(Bitmap bm) { super.setImageBitmap(bm); savePreviousImageValues(); fitImageToView(); } @Override public void setImageDrawable(Drawable drawable) { super.setImageDrawable(drawable); savePreviousImageValues(); fitImageToView(); } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); savePreviousImageValues(); fitImageToView(); } @Override public void setScaleType(ScaleType type) { if (type == ScaleType.FIT_START || type == ScaleType.FIT_END) { throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END"); } if (type == ScaleType.MATRIX) { super.setScaleType(ScaleType.MATRIX); } else { mScaleType = type; if (onDrawReady) { // // If the image is already rendered, scaleType has been called programmatically // and the TouchImageView should be updated with the new scaleType. // setZoom(this); } } } @Override public ScaleType getScaleType() { return mScaleType; } /** * Returns false if image is in initial, unzoomed state. False, otherwise. * @return true if image is zoomed */ public boolean isZoomed() { return normalizedScale != 1; } /** * Return a Rect representing the zoomed image. * @return rect representing zoomed image */ public RectF getZoomedRect() { if (mScaleType == ScaleType.FIT_XY) { throw new UnsupportedOperationException("getZoomedRect() not supported with FIT_XY"); } PointF topLeft = transformCoordTouchToBitmap(0, 0, true); PointF bottomRight = transformCoordTouchToBitmap(viewWidth, viewHeight, true); float w = getDrawable().getIntrinsicWidth(); float h = getDrawable().getIntrinsicHeight(); return new RectF(topLeft.x / w, topLeft.y / h, bottomRight.x / w, bottomRight.y / h); } /** * Save the current matrix and view dimensions * in the prevMatrix and prevView variables. */ private void savePreviousImageValues() { if (matrix != null && viewHeight != 0 && viewWidth != 0) { matrix.getValues(m); prevMatrix.setValues(m); prevMatchViewHeight = matchViewHeight; prevMatchViewWidth = matchViewWidth; prevViewHeight = viewHeight; prevViewWidth = viewWidth; } } @Override public Parcelable onSaveInstanceState() { Bundle bundle = new Bundle(); bundle.putParcelable("instanceState", super.onSaveInstanceState()); bundle.putFloat("saveScale", normalizedScale); bundle.putFloat("matchViewHeight", matchViewHeight); bundle.putFloat("matchViewWidth", matchViewWidth); bundle.putInt("viewWidth", viewWidth); bundle.putInt("viewHeight", viewHeight); matrix.getValues(m); bundle.putFloatArray("matrix", m); bundle.putBoolean("imageRendered", imageRenderedAtLeastOnce); return bundle; } @Override public void onRestoreInstanceState(Parcelable state) { if (state instanceof Bundle) { Bundle bundle = (Bundle) state; normalizedScale = bundle.getFloat("saveScale"); m = bundle.getFloatArray("matrix"); prevMatrix.setValues(m); prevMatchViewHeight = bundle.getFloat("matchViewHeight"); prevMatchViewWidth = bundle.getFloat("matchViewWidth"); prevViewHeight = bundle.getInt("viewHeight"); prevViewWidth = bundle.getInt("viewWidth"); imageRenderedAtLeastOnce = bundle.getBoolean("imageRendered"); super.onRestoreInstanceState(bundle.getParcelable("instanceState")); return; } super.onRestoreInstanceState(state); } @Override protected void onDraw(Canvas canvas) { onDrawReady = true; imageRenderedAtLeastOnce = true; if (delayedZoomVariables != null) { setZoom(delayedZoomVariables.scale, delayedZoomVariables.focusX, delayedZoomVariables.focusY, delayedZoomVariables.scaleType); delayedZoomVariables = null; } super.onDraw(canvas); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); savePreviousImageValues(); } /** * Get the max zoom multiplier. * @return max zoom multiplier. */ public float getMaxZoom() { return maxScale; } /** * Set the max zoom multiplier. Default value: 3. * @param max max zoom multiplier. */ public void setMaxZoom(float max) { maxScale = max; superMaxScale = SUPER_MAX_MULTIPLIER * maxScale; } /** * Get the min zoom multiplier. * @return min zoom multiplier. */ public float getMinZoom() { return minScale; } /** * Get the current zoom. This is the zoom relative to the initial * scale, not the original resource. * @return current zoom multiplier. */ public float getCurrentZoom() { return normalizedScale; } /** * Set the min zoom multiplier. Default value: 1. * @param min min zoom multiplier. */ public void setMinZoom(float min) { minScale = min; superMinScale = SUPER_MIN_MULTIPLIER * minScale; } /** * Reset zoom and translation to initial state. */ public void resetZoom() { normalizedScale = 1; fitImageToView(); } /** * Set zoom to the specified scale. Image will be centered by default. * @param scale */ public void setZoom(float scale) { setZoom(scale, 0.5f, 0.5f); } /** * Set zoom to the specified scale. Image will be centered around the point * (focusX, focusY). These floats range from 0 to 1 and denote the focus point * as a fraction from the left and top of the view. For example, the top left * corner of the image would be (0, 0). And the bottom right corner would be (1, 1). * @param scale * @param focusX * @param focusY */ public void setZoom(float scale, float focusX, float focusY) { setZoom(scale, focusX, focusY, mScaleType); } /** * Set zoom to the specified scale. Image will be centered around the point * (focusX, focusY). These floats range from 0 to 1 and denote the focus point * as a fraction from the left and top of the view. For example, the top left * corner of the image would be (0, 0). And the bottom right corner would be (1, 1). * @param scale * @param focusX * @param focusY * @param scaleType */ public void setZoom(float scale, float focusX, float focusY, ScaleType scaleType) { // // setZoom can be called before the image is on the screen, but at this point, // image and view sizes have not yet been calculated in onMeasure. Thus, we should // delay calling setZoom until the view has been measured. // if (!onDrawReady) { delayedZoomVariables = new ZoomVariables(scale, focusX, focusY, scaleType); return; } if (scaleType != mScaleType) { setScaleType(scaleType); } resetZoom(); scaleImage(scale, viewWidth / 2, viewHeight / 2, true); matrix.getValues(m); m[Matrix.MTRANS_X] = -((focusX * getImageWidth()) - (viewWidth * 0.5f)); m[Matrix.MTRANS_Y] = -((focusY * getImageHeight()) - (viewHeight * 0.5f)); matrix.setValues(m); fixTrans(); setImageMatrix(matrix); } /** * Set zoom parameters equal to another TouchImageView. Including scale, position, * and ScaleType. * //@param TouchImageView */ public void setZoom(TouchImageView img) { PointF center = img.getScrollPosition(); setZoom(img.getCurrentZoom(), center.x, center.y, img.getScaleType()); } /** * Return the point at the center of the zoomed image. The PointF coordinates range * in value between 0 and 1 and the focus point is denoted as a fraction from the left * and top of the view. For example, the top left corner of the image would be (0, 0). * And the bottom right corner would be (1, 1). * @return PointF representing the scroll position of the zoomed image. */ public PointF getScrollPosition() { Drawable drawable = getDrawable(); if (drawable == null) { return null; } int drawableWidth = drawable.getIntrinsicWidth(); int drawableHeight = drawable.getIntrinsicHeight(); PointF point = transformCoordTouchToBitmap(viewWidth / 2, viewHeight / 2, true); point.x /= drawableWidth; point.y /= drawableHeight; return point; } /** * Set the focus point of the zoomed image. The focus points are denoted as a fraction from the * left and top of the view. The focus points can range in value between 0 and 1. * @param focusX * @param focusY */ public void setScrollPosition(float focusX, float focusY) { setZoom(normalizedScale, focusX, focusY); } /** * Performs boundary checking and fixes the image matrix if it * is out of bounds. */ private void fixTrans() { matrix.getValues(m); float transX = m[Matrix.MTRANS_X]; float transY = m[Matrix.MTRANS_Y]; float fixTransX = getFixTrans(transX, viewWidth, getImageWidth()); float fixTransY = getFixTrans(transY, viewHeight, getImageHeight()); if (fixTransX != 0 || fixTransY != 0) { matrix.postTranslate(fixTransX, fixTransY); } } /** * When transitioning from zooming from focus to zoom from center (or vice versa) * the image can become unaligned within the view. This is apparent when zooming * quickly. When the content size is less than the view size, the content will often * be centered incorrectly within the view. fixScaleTrans first calls fixTrans() and * then makes sure the image is centered correctly within the view. */ private void fixScaleTrans() { fixTrans(); matrix.getValues(m); if (getImageWidth() < viewWidth) { m[Matrix.MTRANS_X] = (viewWidth - getImageWidth()) / 2; } if (getImageHeight() < viewHeight) { m[Matrix.MTRANS_Y] = (viewHeight - getImageHeight()) / 2; } matrix.setValues(m); } private float getFixTrans(float trans, float viewSize, float contentSize) { float minTrans, maxTrans; if (contentSize <= viewSize) { minTrans = 0; maxTrans = viewSize - contentSize; } else { minTrans = viewSize - contentSize; maxTrans = 0; } if (trans < minTrans) return -trans + minTrans; if (trans > maxTrans) return -trans + maxTrans; return 0; } private float getFixDragTrans(float delta, float viewSize, float contentSize) { if (contentSize <= viewSize) { return 0; } return delta; } private float getImageWidth() { return matchViewWidth * normalizedScale; } private float getImageHeight() { return matchViewHeight * normalizedScale; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { Drawable drawable = getDrawable(); if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) { setMeasuredDimension(0, 0); return; } int drawableWidth = drawable.getIntrinsicWidth(); int drawableHeight = drawable.getIntrinsicHeight(); int widthSize = MeasureSpec.getSize(widthMeasureSpec); int widthMode = MeasureSpec.getMode(widthMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); int heightMode = MeasureSpec.getMode(heightMeasureSpec); viewWidth = setViewSize(widthMode, widthSize, drawableWidth); viewHeight = setViewSize(heightMode, heightSize, drawableHeight); // // Set view dimensions // setMeasuredDimension(viewWidth, viewHeight); // // Fit content within view // fitImageToView(); } /** * If the normalizedScale is equal to 1, then the image is made to fit the screen. Otherwise, * it is made to fit the screen according to the dimensions of the previous image matrix. This * allows the image to maintain its zoom after rotation. */ private void fitImageToView() { Drawable drawable = getDrawable(); if (drawable == null || drawable.getIntrinsicWidth() == 0 || drawable.getIntrinsicHeight() == 0) { return; } if (matrix == null || prevMatrix == null) { return; } int drawableWidth = drawable.getIntrinsicWidth(); int drawableHeight = drawable.getIntrinsicHeight(); // // Scale image for view // float scaleX = (float) viewWidth / drawableWidth; float scaleY = (float) viewHeight / drawableHeight; switch (mScaleType) { case CENTER: scaleX = scaleY = 1; break; case CENTER_CROP: scaleX = scaleY = Math.max(scaleX, scaleY); break; case CENTER_INSIDE: scaleX = scaleY = Math.min(1, Math.min(scaleX, scaleY)); case FIT_CENTER: scaleX = scaleY = Math.min(scaleX, scaleY); break; case FIT_XY: break; default: // // FIT_START and FIT_END not supported // throw new UnsupportedOperationException("TouchImageView does not support FIT_START or FIT_END"); } // // Center the image // float redundantXSpace = viewWidth - (scaleX * drawableWidth); float redundantYSpace = viewHeight - (scaleY * drawableHeight); matchViewWidth = viewWidth - redundantXSpace; matchViewHeight = viewHeight - redundantYSpace; if (!isZoomed() && !imageRenderedAtLeastOnce) { // // Stretch and center image to fit view // matrix.setScale(scaleX, scaleY); matrix.postTranslate(redundantXSpace / 2, redundantYSpace / 2); normalizedScale = 1; } else { // // These values should never be 0 or we will set viewWidth and viewHeight // to NaN in translateMatrixAfterRotate. To avoid this, call savePreviousImageValues // to set them equal to the current values. // if (prevMatchViewWidth == 0 || prevMatchViewHeight == 0) { savePreviousImageValues(); } prevMatrix.getValues(m); // // Rescale Matrix after rotation // m[Matrix.MSCALE_X] = matchViewWidth / drawableWidth * normalizedScale; m[Matrix.MSCALE_Y] = matchViewHeight / drawableHeight * normalizedScale; // // TransX and TransY from previous matrix // float transX = m[Matrix.MTRANS_X]; float transY = m[Matrix.MTRANS_Y]; // // Width // float prevActualWidth = prevMatchViewWidth * normalizedScale; float actualWidth = getImageWidth(); translateMatrixAfterRotate(Matrix.MTRANS_X, transX, prevActualWidth, actualWidth, prevViewWidth, viewWidth, drawableWidth); // // Height // float prevActualHeight = prevMatchViewHeight * normalizedScale; float actualHeight = getImageHeight(); translateMatrixAfterRotate(Matrix.MTRANS_Y, transY, prevActualHeight, actualHeight, prevViewHeight, viewHeight, drawableHeight); // // Set the matrix to the adjusted scale and translate values. // matrix.setValues(m); } fixTrans(); setImageMatrix(matrix); } /** * Set view dimensions based on layout params * * @param mode * @param size * @param drawableWidth * @return */ private int setViewSize(int mode, int size, int drawableWidth) { int viewSize; switch (mode) { case MeasureSpec.EXACTLY: viewSize = size; break; case MeasureSpec.AT_MOST: viewSize = Math.min(drawableWidth, size); break; case MeasureSpec.UNSPECIFIED: viewSize = drawableWidth; break; default: viewSize = size; break; } return viewSize; } /** * After rotating, the matrix needs to be translated. This function finds the area of image * which was previously centered and adjusts translations so that is again the center, post-rotation. * * @param axis Matrix.MTRANS_X or Matrix.MTRANS_Y * @param trans the value of trans in that axis before the rotation * @param prevImageSize the width/height of the image before the rotation * @param imageSize width/height of the image after rotation * @param prevViewSize width/height of view before rotation * @param viewSize width/height of view after rotation * @param drawableSize width/height of drawable */ private void translateMatrixAfterRotate(int axis, float trans, float prevImageSize, float imageSize, int prevViewSize, int viewSize, int drawableSize) { if (imageSize < viewSize) { // // The width/height of image is less than the view's width/height. Center it. // m[axis] = (viewSize - (drawableSize * m[Matrix.MSCALE_X])) * 0.5f; } else if (trans > 0) { // // The image is larger than the view, but was not before rotation. Center it. // m[axis] = -((imageSize - viewSize) * 0.5f); } else { // // Find the area of the image which was previously centered in the view. Determine its distance // from the left/top side of the view as a fraction of the entire image's width/height. Use that percentage // to calculate the trans in the new view width/height. // float percentage = (Math.abs(trans) + (0.5f * prevViewSize)) / prevImageSize; m[axis] = -((percentage * imageSize) - (viewSize * 0.5f)); } } private void setState(State state) { this.state = state; } public boolean canScrollHorizontallyFroyo(int direction) { return canScrollHorizontally(direction); } @Override public boolean canScrollHorizontally(int direction) { matrix.getValues(m); float x = m[Matrix.MTRANS_X]; if (getImageWidth() < viewWidth) { return false; } else if (x >= -1 && direction < 0) { return false; } else if (Math.abs(x) + viewWidth + 1 >= getImageWidth() && direction > 0) { return false; } return true; } /** * Gesture Listener detects a single click or long click and passes that on * to the view's listener. * @author Ortiz * */ private class GestureListener extends GestureDetector.SimpleOnGestureListener { @Override public boolean onSingleTapConfirmed(MotionEvent e) { if(doubleTapListener != null) { return doubleTapListener.onSingleTapConfirmed(e); } return performClick(); } @Override public void onLongPress(MotionEvent e) { performLongClick(); } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { if (fling != null) { // // If a previous fling is still active, it should be cancelled so that two flings // are not run simultaenously. // fling.cancelFling(); } fling = new Fling((int) velocityX, (int) velocityY); compatPostOnAnimation(fling); return super.onFling(e1, e2, velocityX, velocityY); } @Override public boolean onDoubleTap(MotionEvent e) { boolean consumed = false; if(doubleTapListener != null) { consumed = doubleTapListener.onDoubleTap(e); } if (state == State.NONE) { float targetZoom = (normalizedScale == minScale) ? maxScale : minScale; DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, e.getX(), e.getY(), false); compatPostOnAnimation(doubleTap); consumed = true; } return consumed; } @Override public boolean onDoubleTapEvent(MotionEvent e) { if(doubleTapListener != null) { return doubleTapListener.onDoubleTapEvent(e); } return false; } } public interface OnTouchImageViewListener { void onMove(); } /** * Responsible for all touch events. Handles the heavy lifting of drag and also sends * touch events to Scale Detector and Gesture Detector. * @author Ortiz * */ private class PrivateOnTouchListener implements OnTouchListener { // // Remember last point position for dragging // private PointF last = new PointF(); @Override public boolean onTouch(View v, MotionEvent event) { mScaleDetector.onTouchEvent(event); mGestureDetector.onTouchEvent(event); PointF curr = new PointF(event.getX(), event.getY()); if (state == State.NONE || state == State.DRAG || state == State.FLING) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: last.set(curr); if (fling != null) fling.cancelFling(); setState(State.DRAG); break; case MotionEvent.ACTION_MOVE: if (state == State.DRAG) { float deltaX = curr.x - last.x; float deltaY = curr.y - last.y; float fixTransX = getFixDragTrans(deltaX, viewWidth, getImageWidth()); float fixTransY = getFixDragTrans(deltaY, viewHeight, getImageHeight()); matrix.postTranslate(fixTransX, fixTransY); fixTrans(); last.set(curr.x, curr.y); } break; case MotionEvent.ACTION_UP: case MotionEvent.ACTION_POINTER_UP: setState(State.NONE); break; } } setImageMatrix(matrix); // // User-defined OnTouchListener // if(userTouchListener != null) { userTouchListener.onTouch(v, event); } // // OnTouchImageViewListener is set: TouchImageView dragged by user. // if (touchImageViewListener != null) { touchImageViewListener.onMove(); } // // indicate event was handled // return true; } } /** * ScaleListener detects user two finger scaling and scales image. * @author Ortiz * */ private class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { @Override public boolean onScaleBegin(ScaleGestureDetector detector) { setState(State.ZOOM); return true; } @Override public boolean onScale(ScaleGestureDetector detector) { scaleImage(detector.getScaleFactor(), detector.getFocusX(), detector.getFocusY(), true); // // OnTouchImageViewListener is set: TouchImageView pinch zoomed by user. // if (touchImageViewListener != null) { touchImageViewListener.onMove(); } return true; } @Override public void onScaleEnd(ScaleGestureDetector detector) { super.onScaleEnd(detector); setState(State.NONE); boolean animateToZoomBoundary = false; float targetZoom = normalizedScale; if (normalizedScale > maxScale) { targetZoom = maxScale; animateToZoomBoundary = true; } else if (normalizedScale < minScale) { targetZoom = minScale; animateToZoomBoundary = true; } if (animateToZoomBoundary) { DoubleTapZoom doubleTap = new DoubleTapZoom(targetZoom, viewWidth / 2, viewHeight / 2, true); compatPostOnAnimation(doubleTap); } } } private void scaleImage(double deltaScale, float focusX, float focusY, boolean stretchImageToSuper) { float lowerScale, upperScale; if (stretchImageToSuper) { lowerScale = superMinScale; upperScale = superMaxScale; } else { lowerScale = minScale; upperScale = maxScale; } float origScale = normalizedScale; normalizedScale *= deltaScale; if (normalizedScale > upperScale) { normalizedScale = upperScale; deltaScale = upperScale / origScale; } else if (normalizedScale < lowerScale) { normalizedScale = lowerScale; deltaScale = lowerScale / origScale; } matrix.postScale((float) deltaScale, (float) deltaScale, focusX, focusY); fixScaleTrans(); } /** * DoubleTapZoom calls a series of runnables which apply * an animated zoom in/out graphic to the image. * @author Ortiz * */ private class DoubleTapZoom implements Runnable { private long startTime; private static final float ZOOM_TIME = 500; private float startZoom, targetZoom; private float bitmapX, bitmapY; private boolean stretchImageToSuper; private AccelerateDecelerateInterpolator interpolator = new AccelerateDecelerateInterpolator(); private PointF startTouch; private PointF endTouch; DoubleTapZoom(float targetZoom, float focusX, float focusY, boolean stretchImageToSuper) { setState(State.ANIMATE_ZOOM); startTime = System.currentTimeMillis(); this.startZoom = normalizedScale; this.targetZoom = targetZoom; this.stretchImageToSuper = stretchImageToSuper; PointF bitmapPoint = transformCoordTouchToBitmap(focusX, focusY, false); this.bitmapX = bitmapPoint.x; this.bitmapY = bitmapPoint.y; // // Used for translating image during scaling // startTouch = transformCoordBitmapToTouch(bitmapX, bitmapY); endTouch = new PointF(viewWidth / 2, viewHeight / 2); } @Override public void run() { float t = interpolate(); double deltaScale = calculateDeltaScale(t); scaleImage(deltaScale, bitmapX, bitmapY, stretchImageToSuper); translateImageToCenterTouchPosition(t); fixScaleTrans(); setImageMatrix(matrix); // // OnTouchImageViewListener is set: double tap runnable updates listener // with every frame. // if (touchImageViewListener != null) { touchImageViewListener.onMove(); } if (t < 1f) { // // We haven't finished zooming // compatPostOnAnimation(this); } else { // // Finished zooming // setState(State.NONE); } } /** * Interpolate between where the image should start and end in order to translate * the image so that the point that is touched is what ends up centered at the end * of the zoom. * @param t */ private void translateImageToCenterTouchPosition(float t) { float targetX = startTouch.x + t * (endTouch.x - startTouch.x); float targetY = startTouch.y + t * (endTouch.y - startTouch.y); PointF curr = transformCoordBitmapToTouch(bitmapX, bitmapY); matrix.postTranslate(targetX - curr.x, targetY - curr.y); } /** * Use interpolator to get t * @return */ private float interpolate() { long currTime = System.currentTimeMillis(); float elapsed = (currTime - startTime) / ZOOM_TIME; elapsed = Math.min(1f, elapsed); return interpolator.getInterpolation(elapsed); } /** * Interpolate the current targeted zoom and get the delta * from the current zoom. * @param t * @return */ private double calculateDeltaScale(float t) { double zoom = startZoom + t * (targetZoom - startZoom); return zoom / normalizedScale; } } /** * This function will transform the coordinates in the touch event to the coordinate * system of the drawable that the imageview contain * @param x x-coordinate of touch event * @param y y-coordinate of touch event * @param clipToBitmap Touch event may occur within view, but outside image content. True, to clip return value * to the bounds of the bitmap size. * @return Coordinates of the point touched, in the coordinate system of the original drawable. */ private PointF transformCoordTouchToBitmap(float x, float y, boolean clipToBitmap) { matrix.getValues(m); float origW = getDrawable().getIntrinsicWidth(); float origH = getDrawable().getIntrinsicHeight(); float transX = m[Matrix.MTRANS_X]; float transY = m[Matrix.MTRANS_Y]; float finalX = ((x - transX) * origW) / getImageWidth(); float finalY = ((y - transY) * origH) / getImageHeight(); if (clipToBitmap) { finalX = Math.min(Math.max(finalX, 0), origW); finalY = Math.min(Math.max(finalY, 0), origH); } return new PointF(finalX , finalY); } /** * Inverse of transformCoordTouchToBitmap. This function will transform the coordinates in the * drawable's coordinate system to the view's coordinate system. * @param bx x-coordinate in original bitmap coordinate system * @param by y-coordinate in original bitmap coordinate system * @return Coordinates of the point in the view's coordinate system. */ private PointF transformCoordBitmapToTouch(float bx, float by) { matrix.getValues(m); float origW = getDrawable().getIntrinsicWidth(); float origH = getDrawable().getIntrinsicHeight(); float px = bx / origW; float py = by / origH; float finalX = m[Matrix.MTRANS_X] + getImageWidth() * px; float finalY = m[Matrix.MTRANS_Y] + getImageHeight() * py; return new PointF(finalX , finalY); } /** * Fling launches sequential runnables which apply * the fling graphic to the image. The values for the translation * are interpolated by the Scroller. * @author Ortiz * */ private class Fling implements Runnable { CompatScroller scroller; int currX, currY; Fling(int velocityX, int velocityY) { setState(State.FLING); scroller = new CompatScroller(context); matrix.getValues(m); int startX = (int) m[Matrix.MTRANS_X]; int startY = (int) m[Matrix.MTRANS_Y]; int minX, maxX, minY, maxY; if (getImageWidth() > viewWidth) { minX = viewWidth - (int) getImageWidth(); maxX = 0; } else { minX = maxX = startX; } if (getImageHeight() > viewHeight) { minY = viewHeight - (int) getImageHeight(); maxY = 0; } else { minY = maxY = startY; } scroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY); currX = startX; currY = startY; } public void cancelFling() { if (scroller != null) { setState(State.NONE); scroller.forceFinished(true); } } @Override public void run() { // // OnTouchImageViewListener is set: TouchImageView listener has been flung by user. // Listener runnable updated with each frame of fling animation. // if (touchImageViewListener != null) { touchImageViewListener.onMove(); } if (scroller.isFinished()) { scroller = null; return; } if (scroller.computeScrollOffset()) { int newX = scroller.getCurrX(); int newY = scroller.getCurrY(); int transX = newX - currX; int transY = newY - currY; currX = newX; currY = newY; matrix.postTranslate(transX, transY); fixTrans(); setImageMatrix(matrix); compatPostOnAnimation(this); } } } @TargetApi(Build.VERSION_CODES.GINGERBREAD) private class CompatScroller { Scroller scroller; OverScroller overScroller; boolean isPreGingerbread; public CompatScroller(Context context) { if (VERSION.SDK_INT < VERSION_CODES.GINGERBREAD) { isPreGingerbread = true; scroller = new Scroller(context); } else { isPreGingerbread = false; overScroller = new OverScroller(context); } } public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY) { if (isPreGingerbread) { scroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY); } else { overScroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY); } } public void forceFinished(boolean finished) { if (isPreGingerbread) { scroller.forceFinished(finished); } else { overScroller.forceFinished(finished); } } public boolean isFinished() { if (isPreGingerbread) { return scroller.isFinished(); } else { return overScroller.isFinished(); } } public boolean computeScrollOffset() { if (isPreGingerbread) { return scroller.computeScrollOffset(); } else { overScroller.computeScrollOffset(); return overScroller.computeScrollOffset(); } } public int getCurrX() { if (isPreGingerbread) { return scroller.getCurrX(); } else { return overScroller.getCurrX(); } } public int getCurrY() { if (isPreGingerbread) { return scroller.getCurrY(); } else { return overScroller.getCurrY(); } } } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void compatPostOnAnimation(Runnable runnable) { if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) { postOnAnimation(runnable); } else { postDelayed(runnable, 1000/60); } } private class ZoomVariables { public float scale; public float focusX; public float focusY; public ScaleType scaleType; public ZoomVariables(float scale, float focusX, float focusY, ScaleType scaleType) { this.scale = scale; this.focusX = focusX; this.focusY = focusY; this.scaleType = scaleType; } } private void printMatrixInfo() { float[] n = new float[9]; matrix.getValues(n); Log.d(DEBUG, "Scale: " + n[Matrix.MSCALE_X] + " TransX: " + n[Matrix.MTRANS_X] + " TransY: " + n[Matrix.MTRANS_Y]); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package mj.ocraptor.extraction.tika.parser.chm.lzx; import java.math.BigInteger; import java.util.Arrays; import mj.ocraptor.extraction.tika.parser.chm.core.ChmCommons; import org.apache.tika.exception.TikaException; public class ChmSection { private byte[] data; private int swath;// kiks private int total;// remains private int buffer;// val public ChmSection(byte[] data) throws TikaException { ChmCommons.assertByteArrayNotNull(data); setData(data); } /* Utilities */ public byte[] reverseByteOrder(byte[] toBeReversed) throws TikaException { ChmCommons.assertByteArrayNotNull(toBeReversed); ChmCommons.reverse(toBeReversed); return toBeReversed; } public int checkBit(int i) { return ((getBuffer() & (1 << (getTotal() - i))) == 0) ? 0 : 1; } public int getSyncBits(int bit) { return getDesyncBits(bit, bit); } public int getDesyncBits(int bit, int removeBit) { while (getTotal() < 16) { setBuffer((getBuffer() << 16) + unmarshalUByte() + (unmarshalUByte() << 8)); setTotal(getTotal() + 16); } int tmp = (getBuffer() >>> (getTotal() - bit)); setTotal(getTotal() - removeBit); setBuffer(getBuffer() - ((getBuffer() >>> getTotal()) << getTotal())); return tmp; } public int unmarshalUByte() { return getByte() & 255; } public byte getByte() { if (getSwath() < getData().length) { setSwath(getSwath() + 1); return getData()[getSwath() - 1]; } else return 0; } public int getLeft() { return (getData().length - getSwath()); } public byte[] getData() { return data; } public BigInteger getBigInteger(int i) { if (getData() == null) return BigInteger.ZERO; if (getData().length - getSwath() < i) i = getData().length - getSwath(); byte[] tmp = new byte[i]; for (int j = i - 1; j >= 0; j--) { tmp[i - j - 1] = getData()[getSwath() + j]; } setSwath(getSwath() + i); return new BigInteger(tmp); } public byte[] stringToAsciiBytes(String s) { char[] c = s.toCharArray(); byte[] byteval = new byte[c.length]; for (int i = 0; i < c.length; i++) byteval[i] = (byte) c[i]; return byteval; } public BigInteger unmarshalUlong() { return getBigInteger(8); } public long unmarshalUInt() { return getBigInteger(4).longValue(); } public int unmarshalInt() { return getBigInteger(4).intValue(); } public byte[] unmarshalBytes(int i) { if (i == 0) return new byte[1]; byte[] t = new byte[i]; for (int j = 0; j < i; j++) t[j] = getData()[j + getSwath()]; setSwath(getSwath() + i); return t; } public BigInteger getEncint() { byte ob; BigInteger bi = BigInteger.ZERO; byte[] nb = new byte[1]; while ((ob = this.getByte()) < 0) { nb[0] = (byte) ((ob & 0x7f)); bi = bi.shiftLeft(7).add(new BigInteger(nb)); } nb[0] = (byte) ((ob & 0x7f)); bi = bi.shiftLeft(7).add(new BigInteger(nb)); return bi; } public char unmarshalUtfChar() { byte ob; int i = 1; byte[] ba; ob = this.getByte(); if (ob < 0) { i = 2; while ((ob << (24 + i)) < 0) i++; } ba = new byte[i]; ba[0] = ob; int j = 1; while (j < i) { ba[j] = this.getByte(); j++; } i = ba.length; if (i == 1) return (char) ba[0]; else { int n; n = ba[0] & 15; // 00001111b, gets last 4 bits j = 1; while (j < i) n = (n << 6) + (ba[j++] & 63);// 00111111b,gets last 6 bits return (char) n; } } private void setData(byte[] data) { this.data = data; } public int getSwath() { return swath; } public void setSwath(int swath) { this.swath = swath; } public int getTotal() { return total; } public void setTotal(int total) { this.total = total; } private int getBuffer() { return buffer; } private void setBuffer(int buffer) { this.buffer = buffer; } /** * @param args * @throws TikaException */ public static void main(String[] args) throws TikaException { byte[] array = { 4, 78, -67, 90, 1, -33 }; ChmSection chmSection = new ChmSection(array); System.out.println("before " + Arrays.toString(array)); System.out.println("after " + Arrays.toString(chmSection.reverseByteOrder(array))); } }
/* First created by JCasGen Mon Mar 03 11:40:36 CST 2014 */ package org.apache.ctakes.typesystem.type.temporary.assertion; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.JCasRegistry; import org.apache.uima.cas.impl.CASImpl; import org.apache.uima.cas.impl.FSGenerator; import org.apache.uima.cas.FeatureStructure; import org.apache.uima.cas.impl.TypeImpl; import org.apache.uima.cas.Type; import org.apache.uima.cas.impl.FeatureImpl; import org.apache.uima.cas.Feature; import org.apache.uima.jcas.tcas.Annotation_Type; /** A cue phrase indicating potential negation, uncertainty, or conditional * Updated by JCasGen Fri Mar 07 16:43:05 CST 2014 * @generated */ public class AssertionCuePhraseAnnotation_Type extends Annotation_Type { /** @generated * @return the generator for this type */ @Override protected FSGenerator getFSGenerator() {return fsGenerator;} /** @generated */ private final FSGenerator fsGenerator = new FSGenerator() { public FeatureStructure createFS(int addr, CASImpl cas) { if (AssertionCuePhraseAnnotation_Type.this.useExistingInstance) { // Return eq fs instance if already created FeatureStructure fs = AssertionCuePhraseAnnotation_Type.this.jcas.getJfsFromCaddr(addr); if (null == fs) { fs = new AssertionCuePhraseAnnotation(addr, AssertionCuePhraseAnnotation_Type.this); AssertionCuePhraseAnnotation_Type.this.jcas.putJfsFromCaddr(addr, fs); return fs; } return fs; } else return new AssertionCuePhraseAnnotation(addr, AssertionCuePhraseAnnotation_Type.this); } }; /** @generated */ @SuppressWarnings ("hiding") public final static int typeIndexID = AssertionCuePhraseAnnotation.typeIndexID; /** @generated @modifiable */ @SuppressWarnings ("hiding") public final static boolean featOkTst = JCasRegistry.getFeatOkTst("org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); /** @generated */ final Feature casFeat_id; /** @generated */ final int casFeatCode_id; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public int getId(int addr) { if (featOkTst && casFeat_id == null) jcas.throwFeatMissing("id", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); return ll_cas.ll_getIntValue(addr, casFeatCode_id); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setId(int addr, int v) { if (featOkTst && casFeat_id == null) jcas.throwFeatMissing("id", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); ll_cas.ll_setIntValue(addr, casFeatCode_id, v);} /** @generated */ final Feature casFeat_cuePhraseCategory; /** @generated */ final int casFeatCode_cuePhraseCategory; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public String getCuePhraseCategory(int addr) { if (featOkTst && casFeat_cuePhraseCategory == null) jcas.throwFeatMissing("cuePhraseCategory", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); return ll_cas.ll_getStringValue(addr, casFeatCode_cuePhraseCategory); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setCuePhraseCategory(int addr, String v) { if (featOkTst && casFeat_cuePhraseCategory == null) jcas.throwFeatMissing("cuePhraseCategory", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); ll_cas.ll_setStringValue(addr, casFeatCode_cuePhraseCategory, v);} /** @generated */ final Feature casFeat_cuePhraseAssertionFamily; /** @generated */ final int casFeatCode_cuePhraseAssertionFamily; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public String getCuePhraseAssertionFamily(int addr) { if (featOkTst && casFeat_cuePhraseAssertionFamily == null) jcas.throwFeatMissing("cuePhraseAssertionFamily", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); return ll_cas.ll_getStringValue(addr, casFeatCode_cuePhraseAssertionFamily); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setCuePhraseAssertionFamily(int addr, String v) { if (featOkTst && casFeat_cuePhraseAssertionFamily == null) jcas.throwFeatMissing("cuePhraseAssertionFamily", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); ll_cas.ll_setStringValue(addr, casFeatCode_cuePhraseAssertionFamily, v);} /** @generated */ final Feature casFeat_cuePhraseFirstWord; /** @generated */ final int casFeatCode_cuePhraseFirstWord; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public String getCuePhraseFirstWord(int addr) { if (featOkTst && casFeat_cuePhraseFirstWord == null) jcas.throwFeatMissing("cuePhraseFirstWord", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); return ll_cas.ll_getStringValue(addr, casFeatCode_cuePhraseFirstWord); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setCuePhraseFirstWord(int addr, String v) { if (featOkTst && casFeat_cuePhraseFirstWord == null) jcas.throwFeatMissing("cuePhraseFirstWord", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); ll_cas.ll_setStringValue(addr, casFeatCode_cuePhraseFirstWord, v);} /** @generated */ final Feature casFeat_cuePhrase; /** @generated */ final int casFeatCode_cuePhrase; /** @generated * @param addr low level Feature Structure reference * @return the feature value */ public String getCuePhrase(int addr) { if (featOkTst && casFeat_cuePhrase == null) jcas.throwFeatMissing("cuePhrase", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); return ll_cas.ll_getStringValue(addr, casFeatCode_cuePhrase); } /** @generated * @param addr low level Feature Structure reference * @param v value to set */ public void setCuePhrase(int addr, String v) { if (featOkTst && casFeat_cuePhrase == null) jcas.throwFeatMissing("cuePhrase", "org.apache.ctakes.typesystem.type.temporary.assertion.AssertionCuePhraseAnnotation"); ll_cas.ll_setStringValue(addr, casFeatCode_cuePhrase, v);} /** initialize variables to correspond with Cas Type and Features * @generated * @param jcas JCas * @param casType Type */ public AssertionCuePhraseAnnotation_Type(JCas jcas, Type casType) { super(jcas, casType); casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator()); casFeat_id = jcas.getRequiredFeatureDE(casType, "id", "uima.cas.Integer", featOkTst); casFeatCode_id = (null == casFeat_id) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_id).getCode(); casFeat_cuePhraseCategory = jcas.getRequiredFeatureDE(casType, "cuePhraseCategory", "uima.cas.String", featOkTst); casFeatCode_cuePhraseCategory = (null == casFeat_cuePhraseCategory) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_cuePhraseCategory).getCode(); casFeat_cuePhraseAssertionFamily = jcas.getRequiredFeatureDE(casType, "cuePhraseAssertionFamily", "uima.cas.String", featOkTst); casFeatCode_cuePhraseAssertionFamily = (null == casFeat_cuePhraseAssertionFamily) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_cuePhraseAssertionFamily).getCode(); casFeat_cuePhraseFirstWord = jcas.getRequiredFeatureDE(casType, "cuePhraseFirstWord", "uima.cas.String", featOkTst); casFeatCode_cuePhraseFirstWord = (null == casFeat_cuePhraseFirstWord) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_cuePhraseFirstWord).getCode(); casFeat_cuePhrase = jcas.getRequiredFeatureDE(casType, "cuePhrase", "uima.cas.String", featOkTst); casFeatCode_cuePhrase = (null == casFeat_cuePhrase) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_cuePhrase).getCode(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.classgen.asm.sc; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.FieldNode; import org.codehaus.groovy.ast.MethodNode; import org.codehaus.groovy.ast.Parameter; import org.codehaus.groovy.ast.PropertyNode; import org.codehaus.groovy.ast.expr.ArgumentListExpression; import org.codehaus.groovy.ast.expr.AttributeExpression; import org.codehaus.groovy.ast.expr.BinaryExpression; import org.codehaus.groovy.ast.expr.CastExpression; import org.codehaus.groovy.ast.expr.ClassExpression; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.ConstructorCallExpression; import org.codehaus.groovy.ast.expr.DeclarationExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.LambdaExpression; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.ast.expr.PropertyExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.EmptyStatement; import org.codehaus.groovy.ast.stmt.ExpressionStatement; import org.codehaus.groovy.ast.stmt.ForStatement; import org.codehaus.groovy.ast.tools.WideningCategories; import org.codehaus.groovy.classgen.asm.BinaryExpressionMultiTypeDispatcher; import org.codehaus.groovy.classgen.asm.BinaryExpressionWriter; import org.codehaus.groovy.classgen.asm.BytecodeHelper; import org.codehaus.groovy.classgen.asm.CompileStack; import org.codehaus.groovy.classgen.asm.OperandStack; import org.codehaus.groovy.classgen.asm.TypeChooser; import org.codehaus.groovy.classgen.asm.VariableSlotLoader; import org.codehaus.groovy.classgen.asm.WriterController; import org.codehaus.groovy.syntax.Token; import org.codehaus.groovy.transform.sc.StaticCompilationMetadataKeys; import org.codehaus.groovy.transform.sc.StaticCompilationVisitor; import org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport; import org.codehaus.groovy.transform.stc.StaticTypeCheckingVisitor; import org.codehaus.groovy.transform.stc.StaticTypesMarker; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import java.lang.reflect.Modifier; import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.groovy.util.BeanUtils.capitalize; import static org.codehaus.groovy.ast.ClassHelper.CLOSURE_TYPE; import static org.codehaus.groovy.ast.ClassHelper.char_TYPE; import static org.codehaus.groovy.ast.ClassHelper.double_TYPE; import static org.codehaus.groovy.ast.ClassHelper.float_TYPE; import static org.codehaus.groovy.ast.ClassHelper.long_TYPE; import static org.codehaus.groovy.transform.sc.StaticCompilationVisitor.ARRAYLIST_ADD_METHOD; import static org.codehaus.groovy.transform.sc.StaticCompilationVisitor.ARRAYLIST_CLASSNODE; import static org.codehaus.groovy.transform.sc.StaticCompilationVisitor.ARRAYLIST_CONSTRUCTOR; import static org.codehaus.groovy.transform.stc.StaticTypesMarker.INFERRED_LAMBDA_TYPE; import static org.codehaus.groovy.transform.stc.StaticTypesMarker.INFERRED_TYPE; /** * A specialized version of the multi type binary expression dispatcher which is aware of static compilation. * It is able to generate optimized bytecode for some operations using JVM instructions when available. * * @author Cedric Champeau * @author Jochen Theodorou */ public class StaticTypesBinaryExpressionMultiTypeDispatcher extends BinaryExpressionMultiTypeDispatcher implements Opcodes { private final AtomicInteger labelCounter = new AtomicInteger(); private static final MethodNode CLOSURE_GETTHISOBJECT_METHOD = CLOSURE_TYPE.getMethod("getThisObject", Parameter.EMPTY_ARRAY); public StaticTypesBinaryExpressionMultiTypeDispatcher(WriterController wc) { super(wc); } @Override protected void writePostOrPrefixMethod(int op, String method, Expression expression, Expression orig) { MethodNode mn = orig.getNodeMetaData(StaticTypesMarker.DIRECT_METHOD_CALL_TARGET); WriterController controller = getController(); OperandStack operandStack = controller.getOperandStack(); if (mn!=null) { operandStack.pop(); MethodCallExpression call = new MethodCallExpression( expression, method, ArgumentListExpression.EMPTY_ARGUMENTS ); call.setMethodTarget(mn); call.visit(controller.getAcg()); return; } ClassNode top = operandStack.getTopOperand(); if (ClassHelper.isPrimitiveType(top) && (ClassHelper.isNumberType(top)||char_TYPE.equals(top))) { MethodVisitor mv = controller.getMethodVisitor(); visitInsnByType(top, mv, ICONST_1, LCONST_1, FCONST_1, DCONST_1); if ("next".equals(method)) { visitInsnByType(top, mv, IADD, LADD, FADD, DADD); } else { visitInsnByType(top, mv, ISUB, LSUB, FSUB, DSUB); } return; } super.writePostOrPrefixMethod(op, method, expression, orig); } private static void visitInsnByType(ClassNode top, MethodVisitor mv, int iInsn, int lInsn, int fInsn, int dInsn) { if (WideningCategories.isIntCategory(top) || char_TYPE.equals(top)) { mv.visitInsn(iInsn); } else if (long_TYPE.equals(top)) { mv.visitInsn(lInsn); } else if (float_TYPE.equals(top)) { mv.visitInsn(fInsn); } else if (double_TYPE.equals(top)) { mv.visitInsn(dInsn); } } @Override public void evaluateEqual(final BinaryExpression expression, final boolean defineVariable) { Expression leftExpression = expression.getLeftExpression(); if (!defineVariable) { if (leftExpression instanceof PropertyExpression) { PropertyExpression pexp = (PropertyExpression) leftExpression; if (makeSetProperty( pexp.getObjectExpression(), pexp.getProperty(), expression.getRightExpression(), pexp.isSafe(), pexp.isSpreadSafe(), pexp.isImplicitThis(), pexp instanceof AttributeExpression)) return; } } else { Expression rightExpression = expression.getRightExpression(); if (rightExpression instanceof LambdaExpression) { rightExpression.putNodeMetaData(INFERRED_LAMBDA_TYPE, leftExpression.getNodeMetaData(INFERRED_TYPE)); } } // GROOVY-5620: Spread safe/Null safe operator on LHS is not supported if (leftExpression instanceof PropertyExpression && ((PropertyExpression) leftExpression).isSpreadSafe() && StaticTypeCheckingSupport.isAssignment(expression.getOperation().getType())) { // rewrite it so that it can be statically compiled transformSpreadOnLHS(expression); return; } super.evaluateEqual(expression, defineVariable); } private void transformSpreadOnLHS(BinaryExpression origin) { PropertyExpression spreadExpression = (PropertyExpression) origin.getLeftExpression(); Expression value = origin.getRightExpression(); WriterController controller = getController(); MethodVisitor mv = controller.getMethodVisitor(); CompileStack compileStack = controller.getCompileStack(); TypeChooser typeChooser = controller.getTypeChooser(); OperandStack operandStack = controller.getOperandStack(); ClassNode classNode = controller.getClassNode(); int counter = labelCounter.incrementAndGet(); Expression receiver = spreadExpression.getObjectExpression(); // create an empty arraylist VariableExpression result = new VariableExpression( this.getClass().getSimpleName()+"$spreadresult" + counter, ARRAYLIST_CLASSNODE ); ConstructorCallExpression cce = new ConstructorCallExpression(ARRAYLIST_CLASSNODE, ArgumentListExpression.EMPTY_ARGUMENTS); cce.setNodeMetaData(StaticTypesMarker.DIRECT_METHOD_CALL_TARGET, ARRAYLIST_CONSTRUCTOR); DeclarationExpression declr = new DeclarationExpression( result, Token.newSymbol("=", spreadExpression.getLineNumber(), spreadExpression.getColumnNumber()), cce ); declr.visit(controller.getAcg()); // if (receiver != null) receiver.visit(controller.getAcg()); Label ifnull = compileStack.createLocalLabel("ifnull_" + counter); mv.visitJumpInsn(IFNULL, ifnull); operandStack.remove(1); // receiver consumed by if() Label nonull = compileStack.createLocalLabel("nonull_" + counter); mv.visitLabel(nonull); ClassNode componentType = StaticTypeCheckingVisitor.inferLoopElementType(typeChooser.resolveType(receiver, classNode)); Parameter iterator = new Parameter(componentType, "for$it$" + counter); VariableExpression iteratorAsVar = new VariableExpression(iterator); PropertyExpression pexp = spreadExpression instanceof AttributeExpression ? new AttributeExpression(iteratorAsVar, spreadExpression.getProperty(), true): new PropertyExpression(iteratorAsVar, spreadExpression.getProperty(), true); pexp.setImplicitThis(spreadExpression.isImplicitThis()); pexp.setSourcePosition(spreadExpression); BinaryExpression assignment = new BinaryExpression( pexp, origin.getOperation(), value ); MethodCallExpression add = new MethodCallExpression( result, "add", assignment ); add.setMethodTarget(ARRAYLIST_ADD_METHOD); // for (e in receiver) { result.add(e?.method(arguments) } ForStatement stmt = new ForStatement( iterator, receiver, new ExpressionStatement(add) ); stmt.visit(controller.getAcg()); // else { empty list } mv.visitLabel(ifnull); // end of if/else // return result list result.visit(controller.getAcg()); } private boolean makeSetProperty(final Expression receiver, final Expression message, final Expression arguments, final boolean safe, final boolean spreadSafe, final boolean implicitThis, final boolean isAttribute) { WriterController controller = getController(); TypeChooser typeChooser = controller.getTypeChooser(); ClassNode receiverType = typeChooser.resolveType(receiver, controller.getClassNode()); String property = message.getText(); boolean isThisExpression = receiver instanceof VariableExpression && ((VariableExpression) receiver).isThisExpression(); if (isAttribute || (isThisExpression && receiverType.getDeclaredField(property)!=null)) { ClassNode current = receiverType; FieldNode fn = null; while (fn==null && current!=null) { fn = current.getDeclaredField(property); if (fn==null){ current = current.getSuperClass(); } } if (fn!=null && receiverType!=current && !fn.isPublic()) { // check that direct access is allowed if (!fn.isProtected()) { return false; } if (!Objects.equals(receiverType.getPackageName(), current.getPackageName())) { return false; } OperandStack operandStack = controller.getOperandStack(); MethodVisitor mv = controller.getMethodVisitor(); if (!fn.isStatic()) { receiver.visit(controller.getAcg()); } arguments.visit(controller.getAcg()); operandStack.doGroovyCast(fn.getOriginType()); mv.visitFieldInsn(fn.isStatic() ? PUTSTATIC : PUTFIELD, BytecodeHelper.getClassInternalName(fn.getOwner()), property, BytecodeHelper.getTypeDescription(fn.getOriginType())); operandStack.remove(fn.isStatic()?1:2); return true; } } if (!isAttribute) { String setter = "set" + capitalize(property); MethodNode setterMethod = receiverType.getSetterMethod(setter, false); ClassNode declaringClass = setterMethod!=null?setterMethod.getDeclaringClass():null; if (isThisExpression && declaringClass!=null && declaringClass.equals(controller.getClassNode())) { // this.x = ... shouldn't use a setter if in the same class setterMethod = null; } else if (setterMethod == null) { PropertyNode propertyNode = receiverType.getProperty(property); if (propertyNode != null) { int mods = propertyNode.getModifiers(); if (!Modifier.isFinal(mods)) { setterMethod = new MethodNode( setter, ACC_PUBLIC, ClassHelper.VOID_TYPE, new Parameter[]{new Parameter(propertyNode.getOriginType(), "value")}, ClassNode.EMPTY_ARRAY, EmptyStatement.INSTANCE ); setterMethod.setDeclaringClass(receiverType); } } } if (setterMethod != null) { Expression call = StaticPropertyAccessHelper.transformToSetterCall( receiver, setterMethod, arguments, implicitThis, safe, spreadSafe, true, // to be replaced with a proper test whether a return value should be used or not message ); call.visit(controller.getAcg()); return true; } if (isThisExpression && !controller.isInClosure()) { receiverType = controller.getClassNode(); } if (makeSetPrivateFieldWithBridgeMethod(receiver, receiverType, property, arguments, safe, spreadSafe, implicitThis)) return true; } return false; } @SuppressWarnings("unchecked") private boolean makeSetPrivateFieldWithBridgeMethod(final Expression receiver, final ClassNode receiverType, final String fieldName, final Expression arguments, final boolean safe, final boolean spreadSafe, final boolean implicitThis) { WriterController controller = getController(); FieldNode field = receiverType.getField(fieldName); ClassNode outerClass = receiverType.getOuterClass(); if (field == null && implicitThis && outerClass != null && !receiverType.isStaticClass()) { Expression pexp; if (controller.isInClosure()) { MethodCallExpression mce = new MethodCallExpression( new VariableExpression("this"), "getThisObject", ArgumentListExpression.EMPTY_ARGUMENTS ); mce.putNodeMetaData(StaticTypesMarker.INFERRED_TYPE, controller.getOutermostClass()); mce.setImplicitThis(true); mce.setMethodTarget(CLOSURE_GETTHISOBJECT_METHOD); pexp = new CastExpression(controller.getOutermostClass(),mce); } else { pexp = new PropertyExpression( new ClassExpression(outerClass), "this" ); ((PropertyExpression)pexp).setImplicitThis(true); } pexp.putNodeMetaData(StaticTypesMarker.INFERRED_TYPE, outerClass); pexp.setSourcePosition(receiver); return makeSetPrivateFieldWithBridgeMethod(pexp, outerClass, fieldName, arguments, safe, spreadSafe, true); } ClassNode classNode = controller.getClassNode(); if (field != null && Modifier.isPrivate(field.getModifiers()) && (StaticInvocationWriter.isPrivateBridgeMethodsCallAllowed(receiverType, classNode) || StaticInvocationWriter.isPrivateBridgeMethodsCallAllowed(classNode,receiverType)) && !receiverType.equals(classNode)) { Map<String, MethodNode> mutators = receiverType.redirect().getNodeMetaData(StaticCompilationMetadataKeys.PRIVATE_FIELDS_MUTATORS); if (mutators != null) { MethodNode methodNode = mutators.get(fieldName); if (methodNode != null) { MethodCallExpression mce = new MethodCallExpression(receiver, methodNode.getName(), new ArgumentListExpression(field.isStatic()?new ConstantExpression(null):receiver, arguments)); mce.setMethodTarget(methodNode); mce.setSafe(safe); mce.setSpreadSafe(spreadSafe); mce.setImplicitThis(implicitThis); mce.visit(controller.getAcg()); return true; } } } return false; } @Override protected void assignToArray(Expression parent, Expression receiver, Expression index, Expression rhsValueLoader, boolean safe) { ClassNode current = getController().getClassNode(); ClassNode arrayType = getController().getTypeChooser().resolveType(receiver, current); ClassNode arrayComponentType = arrayType.getComponentType(); int operationType = getOperandType(arrayComponentType); BinaryExpressionWriter bew = binExpWriter[operationType]; if (bew.arraySet(true) && arrayType.isArray() && !safe) { super.assignToArray(parent, receiver, index, rhsValueLoader, safe); } else { /****** / This code path is needed because ACG creates array access expressions *******/ WriterController controller = getController(); StaticTypeCheckingVisitor visitor = new StaticCompilationVisitor(controller.getSourceUnit(), controller.getClassNode()); // let's replace this assignment to a subscript operator with a // method call // e.g. x[5] = 10 // -> (x, [], 5), =, 10 // -> methodCall(x, "putAt", [5, 10]) ArgumentListExpression ae = new ArgumentListExpression(index, rhsValueLoader); if (rhsValueLoader instanceof VariableSlotLoader && parent instanceof BinaryExpression) { // GROOVY-6061 rhsValueLoader.putNodeMetaData(StaticTypesMarker.INFERRED_TYPE, controller.getTypeChooser().resolveType(parent, controller.getClassNode())); } MethodCallExpression mce = new MethodCallExpression( receiver, "putAt", ae ); mce.setSafe(safe); mce.setSourcePosition(parent); visitor.visitMethodCallExpression(mce); OperandStack operandStack = controller.getOperandStack(); int height = operandStack.getStackLength(); mce.visit(controller.getAcg()); operandStack.pop(); operandStack.remove(operandStack.getStackLength()-height); // return value of assignment rhsValueLoader.visit(controller.getAcg()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.hops.rewrite; import java.util.ArrayList; import org.apache.sysml.api.DMLScript; import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM; import org.apache.sysml.conf.ConfigurationManager; import org.apache.sysml.hops.DataOp; import org.apache.sysml.hops.FunctionOp; import org.apache.sysml.hops.Hop; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.hops.Hop.FileFormatTypes; import org.apache.sysml.hops.HopsException; import org.apache.sysml.parser.Expression.DataType; /** * Rule: BlockSizeAndReblock. For all statement blocks, determine * "optimal" block size, and place reblock Hops. For now, we just * use BlockSize 1K x 1K and do reblock after Persistent Reads and * before Persistent Writes. */ public class RewriteBlockSizeAndReblock extends HopRewriteRule { @Override public ArrayList<Hop> rewriteHopDAGs(ArrayList<Hop> roots, ProgramRewriteStatus state) throws HopsException { if( roots == null ) return null; //maintain rewrite status if( isReblockValid() ) state.setBlocksize(ConfigurationManager.getBlocksize()); //perform reblock and blocksize rewrite for( Hop h : roots ) rule_BlockSizeAndReblock(h, ConfigurationManager.getBlocksize()); return roots; } @Override public Hop rewriteHopDAG(Hop root, ProgramRewriteStatus state) throws HopsException { if( root == null ) return null; //maintain rewrite status if( isReblockValid() ) state.setBlocksize(ConfigurationManager.getBlocksize()); //perform reblock and blocksize rewrite rule_BlockSizeAndReblock(root, ConfigurationManager.getBlocksize()); return root; } private void rule_BlockSizeAndReblock(Hop hop, final int blocksize) throws HopsException { // Go to the source(s) of the DAG for (Hop hi : hop.getInput()) { if (!hi.isVisited()) rule_BlockSizeAndReblock(hi, blocksize); } boolean canReblock = isReblockValid(); if (hop instanceof DataOp) { DataOp dop = (DataOp) hop; // if block size does not match if( canReblock && ( (dop.getDataType() == DataType.MATRIX && (dop.getRowsInBlock() != blocksize || dop.getColsInBlock() != blocksize)) ||(dop.getDataType() == DataType.FRAME && OptimizerUtils.isSparkExecutionMode() && (dop.getInputFormatType()==FileFormatTypes.TEXT || dop.getInputFormatType()==FileFormatTypes.CSV))) ) { if( dop.getDataOpType() == DataOp.DataOpTypes.PERSISTENTREAD) { // insert reblock after the hop dop.setRequiresReblock(true); dop.setOutputBlocksizes(blocksize, blocksize); } else if( dop.getDataOpType() == DataOp.DataOpTypes.PERSISTENTWRITE ) { if (dop.getRowsInBlock() == -1 && dop.getColsInBlock() == -1) { // if this dataop is for cell output, then no reblock is needed // as (A) all jobtypes can produce block2cell and cell2cell and // (B) we don't generate an explicit instruction for it (the info // is conveyed through OutputInfo. } else if (dop.getInput().get(0).requiresReblock() && dop.getInput().get(0).getParent().size() == 1) { // if a reblock is feeding into this, then use it if this is // the only parent, otherwise new Reblock dop.getInput().get(0).setOutputBlocksizes(dop.getRowsInBlock(),dop.getColsInBlock()); } else { // insert reblock after the hop dop.setRequiresReblock(true); dop.setOutputBlocksizes(blocksize, blocksize); } } else if (dop.getDataOpType() == DataOp.DataOpTypes.TRANSIENTWRITE || dop.getDataOpType() == DataOp.DataOpTypes.TRANSIENTREAD) { if ( DMLScript.rtplatform == RUNTIME_PLATFORM.SINGLE_NODE ) { // simply copy the values from its input dop.setRowsInBlock(hop.getInput().get(0).getRowsInBlock()); dop.setColsInBlock(hop.getInput().get(0).getColsInBlock()); } else { // by default, all transient reads and writes are in blocked format dop.setRowsInBlock(blocksize); dop.setColsInBlock(blocksize); } } else { throw new HopsException(hop.printErrorLocation() + "unexpected non-scalar Data HOP in reblock.\n"); } } } else //NO DATAOP { // TODO: following two lines are commented, and the subsequent hack is used instead! //set_rows_per_block(GLOBAL_BLOCKSIZE); //set_cols_per_block(GLOBAL_BLOCKSIZE); /* * Handle hops whose output dimensions are unknown! * * Constraint C1: * Currently, only ctable() and groupedAggregate() fall into this category. * The MR jobs for both these functions run in "cell" mode and hence make their * blocking dimensions to (-1,-1). * * Constraint C2: * Blocking dimensions are not applicable for hops that produce scalars. * CMCOV and GroupedAgg jobs always run in "cell" mode, and hence they * produce output in cell format. * * Constraint C3: * Remaining hops will get their blocking dimensions from their input hops. */ if ( hop.requiresReblock() ) { hop.setRowsInBlock(blocksize); hop.setColsInBlock(blocksize); } // Constraint C1: // Constraint C2: else if ( hop.getDataType() == DataType.SCALAR ) { hop.setRowsInBlock(-1); hop.setColsInBlock(-1); } // Constraint C3: else { if ( !canReblock ) { hop.setRowsInBlock(-1); hop.setColsInBlock(-1); } else { hop.setRowsInBlock(blocksize); hop.setColsInBlock(blocksize); // Functions may return multiple outputs, as defined in array of outputs in FunctionOp. // Reblock properties need to be set for each output. if ( hop instanceof FunctionOp ) { FunctionOp fop = (FunctionOp) hop; if ( fop.getOutputs() != null) { for(Hop out : fop.getOutputs()) { out.setRowsInBlock(blocksize); out.setColsInBlock(blocksize); } } } } // if any input is not blocked then the output of current Hop should not be blocked for ( Hop h : hop.getInput() ) { if ( h.getDataType() == DataType.MATRIX && h.getRowsInBlock() == -1 && h.getColsInBlock() == -1 ) { hop.setRowsInBlock(-1); hop.setColsInBlock(-1); break; } } } } hop.setVisited(); } private static boolean isReblockValid() { return ( DMLScript.rtplatform != RUNTIME_PLATFORM.SINGLE_NODE); } }
package pv; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import pv.util.Objects; import com.google.common.base.Function; @SuppressWarnings("unchecked") /** * The pv class provides a collection of common utility methods. */ public class pv { public static final Function<?,?> identity = new Function<Object,Object>() { public Object apply(Object from) { return from; } }; public static final Comparator<Number> numberOrder = new Comparator<Number>() { public int compare(Number a, Number b) { double x = a.doubleValue(), y = b.doubleValue(); return (x < y ? -1 : x > y ? 1 : 0); } }; public static List<Double> range(double start, double end, double step) { List<Double> list = (List<Double>) Objects.List.get(); if (step > 0) { for (double x = start; x < end; x += step) list.add(x); } else { for (double x = start; x > end; x += step) list.add(x); } return list; } public static <T> List<T> repeat(List<T> list, int n) { List<T> l = (List<T>) Objects.List.get(); for (int i=0; i<n; ++i) l.addAll(list); return l; } public static <T> List<T> blend(List<T>... lists) { List<T> l = (List<T>) Objects.List.get(); for (int i=0; i<lists.length; ++i) l.addAll(lists[i]); return l; } public static <F,T> List<T> map(Iterable<F> list, Function<? super F,T> f) { if (f == null) f = (Function<? super F,T>) identity; List<T> l = (List<T>) Objects.List.get(); for (F obj : list) { l.add(f.apply(obj)); } return l; } public static <F> List<Double> normalize(List<F> list, Function<? super F,Double> f) { List<Double> norm = map(list, f); double sum = sum(norm); for (int i=0; i<norm.size(); ++i) { norm.set(i, norm.get(i) / sum); } return norm; } public static double sum(Iterable<? extends Number> list) { double sum = 0; for (Number x : list) { sum += x.doubleValue(); } return sum; } public static <F> double sum(Iterable<F> list, Function<? super F,? extends Number> f) { double sum = 0; for (F obj : list) { sum += f.apply(obj).doubleValue(); } return sum; } public static <T extends Comparable> T max(Iterable<T> list) { T max = null; for (T obj : list) { if (max == null || obj.compareTo(max) > 0) { max = obj; } } return max; } public static <F,T extends Comparable> T max(Iterable<F> list, Function<F,T> f) { T max = null; for (F from : list) { T obj = f.apply(from); if (max == null || obj.compareTo(max) > 0) { max = obj; } } return max; } public static <T extends Comparable> int maxIndex(Iterable<T> list) { T max = null; int i = 0, maxi = -1; for (T obj : list) { if (max == null || obj.compareTo(max) > 0) { max = obj; maxi = i; } ++i; } return maxi; } public static <F,T extends Comparable> int maxIndex(Iterable<F> list, Function<F,T> f) { T max = null; int i = 0, maxi = -1; for (F from : list) { T obj = f.apply(from); if (max == null || obj.compareTo(max) > 0) { max = obj; maxi = i; } ++i; } return maxi; } public static <T extends Comparable> T min(Iterable<T> list) { T min = null; for (T obj : list) { if (min == null || obj.compareTo(min) < 0) { min = obj; } } return min; } public static <F,T extends Comparable> T min(Iterable<F> list, Function<F,T> f) { T min = null; for (F from : list) { T obj = f.apply(from); if (min == null || obj.compareTo(min) < 0) { min = obj; } } return min; } public static <T extends Comparable> int minIndex(Iterable<T> list) { T max = null; int i = 0, mini = -1; for (T obj : list) { if (max == null || obj.compareTo(max) < 0) { max = obj; mini = i; } ++i; } return mini; } public static <F,T extends Comparable> int minIndex(Iterable<F> list, Function<F,T> f) { T max = null; int i = 0, mini = -1; for (F from : list) { T obj = f.apply(from); if (max == null || obj.compareTo(max) < 0) { max = obj; mini = i; } ++i; } return mini; } public static double mean(Collection<Double> col) { return sum(col) / col.size(); } public static <F> double mean(Collection<F> col, Function<? super F,Double> f) { return sum(col, f) / col.size(); } public static <F,T extends Comparable> T median(Collection<F> col, Function<? super F,T> f) { List<T> list = map(col, f); Collections.sort(list); return list.get(list.size()/2); } // public static <F> Double median(Collection<F> col, Function<? super F,Number> f) { // List<Number> list = map(col, f); Collections.sort(list, numberOrder); // int i = list.size() / 2; // if (list.size() % 2 > 0) return list.get(i).doubleValue(); // return 0.5 * (list.get(i-1).doubleValue() + list.get(i).doubleValue()); // } public static <F> List<F> permute(List<F> list, List<? extends Number> indices) { List<F> l = new ArrayList<F>(list.size()); for (int i=0; i<indices.size(); ++i) { l.set(i, list.get(indices.get(i).intValue())); } return l; } public static <F,T> List<T> permute(List<F> list, List<? extends Number> indices, Function<? super F,T> f) { List<T> l = new ArrayList<T>(list.size()); for (int i=0; i<indices.size(); ++i) { l.set(i, f.apply(list.get(indices.get(i).intValue()))); } return l; } // -- public static double log(double x, double b) { return Math.log(x) / Math.log(b); } public static double logSymmetric(double x, double b) { return (x == 0) ? 0 : ((x < 0) ? -log(-x, b) : log(x, b)); } public static double logAdjusted(double x, double b) { boolean negative = x < 0; if (x < b) x += (b - x) / b; return negative ? -log(x, b) : log(x, b); } public static double logFloor(double x, double b) { return (x > 0) ? Math.pow(b, Math.floor(log(x, b))) : -Math.pow(b, -Math.floor(-log(-x, b))); } public static double logCeil(double x, double b) { return (x > 0) ? Math.pow(b, Math.ceil(pv.log(x, b))) : -Math.pow(b, -Math.ceil(-pv.log(-x, b))); } }
import java.io.*; import java.util.*; import org.openstates.bulkdata.LoadBulkData; import org.openstates.data.Bill; import org.openstates.data.Committee; import org.openstates.data.Legislator; import org.openstates.model.Bills; import org.openstates.model.Committees; import org.openstates.model.Legislators; public class CompLES { static class AuthorStats { public AuthorStats() { billData = new int[3][]; for ( int i=0; i<3; ++i ) { billData[i] = new int[4]; for ( int j=0;j<4;++j) { billData[i][j] = 0; } } } // int billIntroducedCount = 0; // int billOtherChamberCount = 0; // int billPassedCount = 0; // int billChapteredCount = 0; int billData[][]; int cmember = 0; int cvchair = 0; int cchair = 0; int leader = 0; int officeScore = -1; double les = 0.0; } private static TreeSet<String> currentTopics; public static void main(String[] args) throws Exception { TestAction testAction = new MSTestAction(); buildcurrentTopics(testAction); // new LoadBulkData().loadCurrentTerm( "2013-10-07-ca-json.zip", "2013", TimeZone.getTimeZone("GMT-08:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-09-mo-json.zip", "2013", TimeZone.getTimeZone("GMT-06:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-08-tx-json.zip", "83", TimeZone.getTimeZone("GMT-06:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-08-ny-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); new LoadBulkData().loadCurrentTerm( "2013-10-07-ms-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-07-md-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-08-pa-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-11-01-nj-json.zip", "215", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-08-va-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-07-tn-json.zip", "108", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-07-la-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-11-01-mn-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-09-hi-json.zip", "2013", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-07-az-json.zip", "51st-1st", TimeZone.getTimeZone("GMT-07:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-08-nc-json.zip", "2013", TimeZone.getTimeZone("GMT-07:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-09-ma-json.zip", "187th", TimeZone.getTimeZone("GMT-05:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-09-ok-json.zip", "2013", TimeZone.getTimeZone("GMT-06:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-07-ar-json.zip", "2013", TimeZone.getTimeZone("GMT-06:00") ); // new LoadBulkData().loadCurrentTerm( "2013-10-07-ga-json.zip", "2013", TimeZone.getTimeZone("GMT-06:00") ); TreeMap<Legislator, AuthorStats> legislatorStats = readLegislators(); determineOfficeScores(legislatorStats); ArrayList<Bill.Sponsor> sponsors = new ArrayList<Bill.Sponsor>(); Collection<Bill> bills = Bills.values(); for ( Bill bill: bills ) { // System.out.println(bill.bill_id+"---------------------------------------"); sponsors.clear(); determinePrincipalSponsors(bill, sponsors); for ( Bill.Sponsor sponsor: sponsors ) { Legislator legislator = null; AuthorStats sponsorStats = null; if ( sponsor != null && sponsor.leg_id != null ) { legislator = Legislators.get(sponsor.leg_id); if ( legislator != null ) sponsorStats = legislatorStats.get(legislator); } if ( sponsorStats != null ) determineBillProgress(bill, sponsorStats, testAction); } if ( sponsors.size() == 0 ) System.out.println("Principal Sponsor Not Found:" + bill.bill_id ); } computeLES(legislatorStats); System.out.print( "NAME" + "\t" + "CHAMBER" + "\t" + "DISTRICT" + "\t" + "PARTY" + "\t" + "OFFICE" + "\t"); System.out.print( "BILLSINT" + "\t" + "BILLSOC" + "\t" + "BILLSPASSED" + "\t" + "BILLSCHAP" + "\t" ); System.out.print( "BILLSINT" + "\t" + "BILLSOC" + "\t" + "BILLSPASSED" + "\t" + "BILLSCHAP" + "\t" ); System.out.print( "BILLSINT" + "\t" + "BILLSOC" + "\t" + "BILLSPASSED" + "\t" + "BILLSCHAP" + "\t" ); System.out.println( "LES"); for ( Legislator legislator: legislatorStats.keySet() ) { AuthorStats sponsorStats = legislatorStats.get(legislator); System.out.print( legislator.full_name + "\t" + legislator.chamber + "\t" + legislator.district + "\t" + legislator.party + "\t" + sponsorStats.officeScore + "\t" ); System.out.print( sponsorStats.billData[0][0] + "\t" + sponsorStats.billData[0][1] + "\t" + sponsorStats.billData[0][2] + "\t" + sponsorStats.billData[0][3] + "\t"); System.out.print( sponsorStats.billData[1][0] + "\t" + sponsorStats.billData[1][1] + "\t" + sponsorStats.billData[1][2] + "\t" + sponsorStats.billData[1][3] + "\t"); System.out.print( sponsorStats.billData[2][0] + "\t" + sponsorStats.billData[2][1] + "\t" + sponsorStats.billData[2][2] + "\t" + sponsorStats.billData[2][3] + "\t"); System.out.println( sponsorStats.les ); } } /* code that works for ca, but not sure about anywhere else .. // here, legislator.fullName can be really a committee name if ( authorStats == null && sponsor != null ) { String committeId = null; committeId = Committees.findCommitteeKey(sponsor.name, bill.chamber); if ( committeId != null ) { Committee committee = Committees.get(committeId); if ( committee != null ) { legislator = determineChair(committee); if ( legislator != null ) { authorStats = authorSuccess.get( legislator ); cFlag = true; } } } } */ private static void determinePrincipalSponsors(Bill bill, ArrayList<Bill.Sponsor> sponsors) { for ( Bill.Sponsor sponsor: bill.sponsors ) { if ( sponsor.type.toLowerCase().equals("primary") ) sponsors.add(sponsor); } } private static void determineBillProgress(Bill bill, AuthorStats sponsorStats, TestAction testAction) { int cat; // default resolution if ( testAction.testId(bill.bill_id) == true ) { if ( currentTopics.contains(bill.bill_id) ) { System.out.println("Topic: " + bill.bill_id); cat = 2; } else cat = 1; } else cat = 0; List<MyAction> actions = new ArrayList<MyAction>(); for ( Bill.Action action: bill.actions ) { actions.add(new MyAction(action)); } Collections.sort(actions); int progress = 0; for ( MyAction myAction: actions ) { String act = myAction.action.action.toLowerCase(); // if ( bill.bill_id.contains("SR") ) System.out.println(bill.bill_id + ":" + bill.chamber+":"+act); int tprog = testAction.testAction(bill.chamber, act); if ( tprog >= 0 ) progress = tprog; } sponsorStats.billData[cat][progress]++; } static class GATestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("senate read and referred") ) return 1; else if (chamber.equals("upper") && act.contains("house first readers") ) return 1; else if (act.contains("house sent to governor") ) return 2; else if (chamber.equals("lower") && act.contains("read and adopted") ) return 3; else if (chamber.equals("upper") && act.contains("read and adopted") ) return 3; else if ( act.contains("signed by governor") ) return 3; return -1; } @Override public String getState() { return "GA"; } } static class ARTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("transmitted to the senate") ) return 1; else if (chamber.equals("upper") && act.contains("transmitted to the house") ) return 1; else if (chamber.equals("upper") && act.contains("transmitted to the house") ) return 1; else if (act.contains("correctly enrolled and ordered transmitted to the governor's office.") ) return 2; else if (chamber.equals("lower") && act.contains("read and adopted") ) return 3; else if (chamber.equals("upper") && act.contains("read the third time and adopted.") ) return 3; else if ( act.contains("is now act ") ) return 3; return -1; } @Override public String getState() { return "AR"; } } static class OKTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("engrossed, signed, to senate") ) return 1; else if (chamber.equals("upper") && act.contains("engrossed to house") ) return 1; else if (act.contains("sent to governor") ) return 2; else if (chamber.equals("lower") && act.contains("enrolled, signed, filed with secretary of state") ) return 3; else if (chamber.equals("upper") && act.contains("enrolled, filed with secretary of state") ) return 3; else if ( act.contains("approved by governor") ) return 3; return -1; } @Override public String getState() { return "OK"; } } static class MATestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("H ") || bill_id.contains("S ") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("senate concurred") ) return 1; else if (chamber.equals("upper") && act.contains("house concurred") ) return 1; else if (act.contains("enacted and laid before the governor") ) return 2; else if ( act.contains("signed by the governor") ) return 3; return -1; } @Override public String getState() { return "MA"; } } static class NCTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("rec from house") ) return 1; else if (chamber.equals("upper") && act.contains("rec from senate") ) return 1; else if (act.contains("ratified") ) return 2; else if (chamber.equals("lower") && act.contains("adopted") ) return 3; else if (chamber.equals("upper") && act.contains("adopted") ) return 3; else if ( act.contains("signed by gov.") ) return 3; return -1; } @Override public String getState() { return "NC"; } } static class AZTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("transmit to house") ) return 1; else if (chamber.equals("upper") && act.contains("transmitted to house") ) return 1; else if (act.contains("transmitted to governor") ) return 2; else if (act.contains("enrolled to governor") ) return 2; else if (act.contains("resolution adopted in final form") ) return 3; else if (act.contains("transmitted to secretary of state") ) return 3; else if ( act.equals("signed") ) return 3; return -1; } @Override public String getState() { return "AZ"; } } static class MNTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HF") || bill_id.contains("SF") ) return true; return false; } @Override public int testAction(String chamber, String act) { return -1; } @Override public String getState() { return "MN"; } } static class HITestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("transmitted to senate") ) return 1; else if (chamber.equals("upper") && act.contains("transmitted to house") ) return 1; else if (act.contains("transmitted to governor") ) return 2; else if (act.contains("enrolled to governor") ) return 2; else if (act.contains("resolution adopted in final form") ) return 3; else if (act.contains("certified copies of resolutions sent") ) return 3; else if ( act.contains("act ") ) return 3; return -1; } @Override public String getState() { return "HI"; } } static class LATestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB") ) return true; return false; } @Override public int testAction(String chamber, String act) { // if ( bill.bill_id.contains("SCR") ) System.out.println(bill.bill_id + ":" + bill.chamber+":"+act); if (chamber.equals("lower") && act.contains("received in the senate.") ) return 1; else if (chamber.equals("lower") && act.contains("enrolled and signed by the speaker of the house.") ) return 1; else if (chamber.equals("upper") && act.contains("received in the house from the senate") ) return 1; else if (chamber.equals("upper") && act.contains("ordered sent to the house.") ) return 1; else if (act.contains("sent to the governor") ) return 2; else if (act.contains("sent to the secretary of state by the secretary") ) return 3; else if (act.contains("taken by the clerk of the house and presented to the secretary of state") ) return 3; else if ( act.contains("becomes act no.") ) return 3; return -1; } @Override public String getState() { return "LA"; } } static class TNTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("ready for transmission to sen") ) return 1; else if (chamber.equals("upper") && act.contains("ready for transmission to house") ) return 1; else if (act.contains("transmitted to gov. for action") ) return 2; else if (act.contains("adopted as am., ayes ") ) return 3; else if (act.contains("adopted, ayes ") ) return 3; else if ( act.contains("signed by governor") ) return 3; return -1; } @Override public String getState() { return "TN"; } } static class VATestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("passed house") ) return 1; else if (chamber.equals("upper") && act.contains("passed senate") ) return 1; else if (act.contains("enrolled") ) return 2; else if ( act.contains("enacted, chapter") ) return 3; else if (chamber.equals("lower") && act.contains("agreed to by house") ) return 3; else if (chamber.equals("upper") && act.contains("agreed to by senate") ) return 3; else if ( act.contains("approved by governor") ) return 3; return -1; } @Override public String getState() { return "VA"; } } static class NJTestAction implements TestAction { @Override public boolean testId(String bill_id) { if ( bill_id.contains("A ") || bill_id.contains("S ")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("received in the senate") ) return 1; else if (chamber.equals("upper") && act.contains("received in the assembly") ) return 1; else if (act.contains("passed both houses") ) return 2; else if ( act.contains("approved p.") ) return 3; else if ( act.contains("filed with secretary of state") ) return 3; return -1; } @Override public String getState() { return "NJ"; } } static class PATestAction implements TestAction { @Override public String getState() { return "PA"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("laid on the table") ) return 1; else if (chamber.equals("upper") && act.contains("laid on the table") ) return 1; else if (act.contains("presented to the governor") ) return 2; else if ( act.contains("approved by the governor") ) return 3; return -1; } } static class MDTestAction implements TestAction { @Override public String getState() { return "MD"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("first reading senate rules") ) return 1; else if (chamber.equals("upper") && act.contains("first reading") && !act.contains("first reading senate rules")) return 1; else if (act.contains("passed enrolled") ) return 2; else if (act.contains("returned passed") ) return 2; else if ( act.contains("approved by the governor") ) return 3; return -1; } } static class MSTestAction implements TestAction { @Override public String getState() { return "MS"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("transmitted to senate") ) return 1; else if (chamber.equals("upper") && act.contains("transmitted to house") ) return 1; else if (act.contains("enrolled bill signed") ) return 2; else if ( act.contains("approved by governor") ) return 3; return -1; } } static class MOTestAction implements TestAction { @Override public String getState() { return "MO"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("reported to the senate") ) return 1; else if (chamber.equals("upper") && act.contains("reported to the assembly") ) return 1; else if (act.contains("truly agreed to and finally passed") ) return 2; else if ( act.contains("approved by governor") ) return 3; else if ( act.contains("signed by governor") ) return 3; return -1; } } static class TXTestAction implements TestAction { @Override public String getState() { return "TX"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("HB") || bill_id.contains("SB")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("received from the house") ) return 1; else if (chamber.equals("upper") && act.contains("received from the senate") ) return 1; else if (act.contains("sent to the governor") ) return 2; else if ( act.contains("signed by the governor") ) return 3; return -1; } } static class NYTestAction implements TestAction { @Override public String getState() { return "NY"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("A ") || bill_id.contains("S ")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("delivered to senate") ) return 1; else if (chamber.equals("upper") && act.contains("delivered to assembly") ) return 1; else if (act.contains("delivered to governor") ) return 2; else if ( act.contains("signed chap.") ) return 3; return -1; } } static class CATestAction implements TestAction { @Override public String getState() { return "CA"; } @Override public boolean testId(String bill_id) { if ( bill_id.contains("SB") || bill_id.contains("AB") || bill_id.contains("SBX1") || bill_id.contains("ABX1")) return true; return false; } @Override public int testAction(String chamber, String act) { if (chamber.equals("lower") && act.contains("to the senate") ) return 1; else if (chamber.equals("lower") && act.contains("in senate") ) return 1; else if (chamber.equals("upper") && act.contains("to the assembly") ) return 1; else if (chamber.equals("upper") && act.contains("in assembly") ) return 1; else if (act.contains("to engrossing and enrolling") ) return 2; else if (act.contains("enrolled and presented to the governor") ) return 2; else if ( act.contains("approved by the governor") ) return 3; else if ( act.contains("chaptered by secretary of state") ) return 3; return -1; } } private static void printAllActions(Bill bill) { for ( Bill.Action action: bill.actions ) { System.out.println(action); } } private static TreeMap<Legislator, AuthorStats> readLegislators() throws Exception { TreeMap<Legislator, AuthorStats> legislators = new TreeMap<>(); for ( Legislator legislator: Legislators.values()) { legislators.put(legislator, new AuthorStats()); } return legislators; } /** * * Legislative Influence: Toward Theory Development through Causal Analysis * Author(s): Katherine Meyer * Source: Legislative Studies Quarterly, Vol. 5, No. 4 (Nov., 1980), pp. 563-585 * Published * * It assigned the following values to positions: Party Leader * or Whip = 5; Committee Chair and Vice Chair simultaneously on different * committees = 4; Committee Chair only = 3; two or more Committee Vice * Chairs = 2; Committee Vice Chair only = 1; and Member only = 0. * * Added -1 if no office held * */ private static void determineOfficeScores(TreeMap<Legislator, AuthorStats> authorSuccess) { for ( Committee committee: Committees.values() ) { for ( Committee.Member member: committee.members ) { Legislator legislator = null; if ( member.leg_id != null ) legislator = Legislators.get(member.leg_id); if ( legislator != null ) { AuthorStats successStat = authorSuccess.get(legislator); String role = member.role.toLowerCase(); if ( role.contains("member")) { successStat.cmember++; } else if ( role.contains("vice")) { successStat.cvchair++; } else if ( role.contains("chair") ) { successStat.cchair++; // } else { // assume it's a leadership position? // System.out.println("Leader Role???:" + legislator + ":" + role); // successStat.leader++; } } } } // check for (Legislator legislator: authorSuccess.keySet() ) { AuthorStats successStat = authorSuccess.get(legislator); if ( successStat.cmember > 0 ) successStat.officeScore = 0; if ( successStat.cvchair == 1 ) successStat.officeScore = 1; if ( successStat.cvchair > 1 ) successStat.officeScore = 2; if ( successStat.cchair == 1 ) successStat.officeScore = 3; if ( successStat.cchair > 0 && successStat.cvchair > 0 ) successStat.officeScore = 4; if ( successStat.leader > 0 ) successStat.officeScore = 5; /* for ( Legislator.Role role: legislator.roles ) { String type = role.type.toLowerCase(); if ( !(type.contains("member") || type.contains("vice chair") || type.contains("chair")) ) { System.out.println("Presumed leadership?:" + role); successStat.officeScore = 5; } } */ } } public static void computeLES(TreeMap<Legislator, AuthorStats> legislators) { // Map<String, Double> computeLES = (Map<String, Double>)computePad.get(LES); // ArrayList<Long> lidsAll = makeRList(); double LESMult = new Double(legislators.size()/4.0); double[][] denomArray = new double[3][4]; denomArray[0][0] = totalFrom(legislators, 0, 0); denomArray[0][1] = totalFrom(legislators, 0, 1); denomArray[0][2] = totalFrom(legislators, 0, 2); denomArray[0][3] = totalFrom(legislators, 0, 3); denomArray[1][0] = totalFrom(legislators, 1, 0); denomArray[1][1] = totalFrom(legislators, 1, 1); denomArray[1][2] = totalFrom(legislators, 1, 2); denomArray[1][3] = totalFrom(legislators, 1, 3); denomArray[2][0] = totalFrom(legislators, 2, 0); denomArray[2][1] = totalFrom(legislators, 2, 1); denomArray[2][2] = totalFrom(legislators, 2, 2); denomArray[2][3] = totalFrom(legislators, 2, 3); // make the array inverse cumulative across rows for ( int j=0; j < 3; ++j ) { for ( int i=0; i < 4; ++i ) { double sum = 0.0; for ( int i2=i; i2 < 4; ++i2 ) { sum += denomArray[j][i2]; } denomArray[j][i] = sum; } } double billsMult = 5.0; double topicMult = 10.0; double[] denom = new double[4]; denom[0] = denomArray[0][0] + (billsMult * denomArray[1][0]) + (topicMult * denomArray[2][0]); denom[1] = denomArray[0][1] + (billsMult * denomArray[1][1]) + (topicMult * denomArray[2][1]); denom[2] = denomArray[0][2] + (billsMult * denomArray[1][2]) + (topicMult * denomArray[2][2]); denom[3] = denomArray[0][3] + (billsMult * denomArray[1][3]) + (topicMult * denomArray[2][3]); double[][] legArray = new double[3][4]; for ( Legislator key: legislators.keySet()) { AuthorStats stats = legislators.get(key); for ( int i=0; i < 4; ++i ) { for ( int j=0; j < 3; ++j ) { legArray[j][i] = stats.billData[j][i]; } } // make the array inverse cumulative across rows for ( int j=0; j < 3; ++j ) { for ( int i=0; i < 4; ++i ) { double sum = 0.0; for ( int i2=i; i2 < 4; ++i2 ) { sum += legArray[j][i2]; } legArray[j][i] = sum; } } double[] num = new double[4]; num[0] = legArray[0][0] + (billsMult * legArray[1][0]) + (topicMult * legArray[2][0]); num[1] = legArray[0][1] + (billsMult * legArray[1][1]) + (topicMult * legArray[2][1]); num[2] = legArray[0][2] + (billsMult * legArray[1][2]) + (topicMult * legArray[2][2]); num[3] = legArray[0][3] + (billsMult * legArray[1][3]) + (topicMult * legArray[2][3]); double partIntroduced = num[0] / denom[0]; double partOtherChamber = num[1] / denom[1]; double partPassed = num[2] / denom[2]; double partChaptered = num[3] / denom[3]; double LES = (partIntroduced + partOtherChamber + partPassed + partChaptered) * LESMult; stats.les = LES; } } private static double totalFrom( TreeMap<Legislator, AuthorStats> legislators, int row, int col) { double ret = 0.0; for ( Legislator key: legislators.keySet()) { AuthorStats stats = legislators.get(key); ret = ret + stats.billData[row][col]; // for ( int i=col; i<4; ++i ) { // ret = ret + stats.billData[row][i]; // } } return ret; } private static void buildcurrentTopics(TestAction testAction) throws Exception { currentTopics = new TreeSet<String>(); InputStream is = CompLES.class.getResourceAsStream("/" + testAction.getState() + "TopicBills2013.txt"); InputStreamReader isr = new InputStreamReader(is, "ASCII"); BufferedReader br = new BufferedReader(isr); String line; while ( (line = br.readLine()) != null ) { currentTopics.add(line); } is.close(); // System.out.println(currentTopics); } interface TestAction { public String getState(); public boolean testId(String bill_id); public int testAction(String chamber, String act); } static class MyAction implements Comparable<MyAction> { public Bill.Action action; public MyAction(Bill.Action action) { this.action = action; } @Override public int compareTo(MyAction o) { return action.date.compareTo(o.action.date); } } }
package io.prometheus.client; import io.prometheus.client.exemplars.Exemplar; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; /** * A collector for a set of metrics. * <p> * Normal users should use {@link Gauge}, {@link Counter}, {@link Summary} and {@link Histogram}. * <p> * Subclasssing Collector is for advanced uses, such as proxying metrics from another monitoring system. * It is it the responsibility of subclasses to ensure they produce valid metrics. * @see <a href="http://prometheus.io/docs/instrumenting/exposition_formats/">Exposition formats</a>. */ public abstract class Collector { /** * Return all metrics of this Collector. */ public abstract List<MetricFamilySamples> collect(); /** * Like {@link #collect()}, but the result should only contain {@code MetricFamilySamples} where * {@code sampleNameFilter.test(name)} is {@code true} for at least one Sample name. * <p> * The default implementation first collects all {@code MetricFamilySamples} and then discards the ones * where {@code sampleNameFilter.test(name)} returns {@code false} for all names in * {@link MetricFamilySamples#getNames()}. * To improve performance, collector implementations should override this method to prevent * {@code MetricFamilySamples} from being collected if they will be discarded anyways. * See {@code ThreadExports} for an example. * <p> * Note that the resulting List may contain {@code MetricFamilySamples} where some Sample names return * {@code true} for {@code sampleNameFilter.test(name)} but some Sample names return {@code false}. * This is ok, because before we produce the output format we will call * {@link MetricFamilySamples#filter(Predicate)} to strip all Samples where {@code sampleNameFilter.test(name)} * returns {@code false}. * * @param sampleNameFilter may be {@code null}, indicating that all metrics should be collected. */ public List<MetricFamilySamples> collect(Predicate<String> sampleNameFilter) { List<MetricFamilySamples> all = collect(); if (sampleNameFilter == null) { return all; } List<MetricFamilySamples> remaining = new ArrayList<MetricFamilySamples>(all.size()); for (MetricFamilySamples mfs : all) { for (String name : mfs.getNames()) { if (sampleNameFilter.test(name)) { remaining.add(mfs); break; } } } return remaining; } public enum Type { UNKNOWN, // This is untyped in Prometheus text format. COUNTER, GAUGE, STATE_SET, INFO, HISTOGRAM, GAUGE_HISTOGRAM, SUMMARY, } /** * A metric, and all of its samples. */ static public class MetricFamilySamples { public final String name; public final String unit; public final Type type; public final String help; public final List<Sample> samples; // this list is modified when samples are added/removed. public MetricFamilySamples(String name, Type type, String help, List<Sample> samples) { this(name, "", type, help, samples); } public MetricFamilySamples(String name, String unit, Type type, String help, List<Sample> samples) { if (!unit.isEmpty() && !name.endsWith("_" + unit)) { throw new IllegalArgumentException("Metric's unit is not the suffix of the metric name: " + name); } if ((type == Type.INFO || type == Type.STATE_SET) && !unit.isEmpty()) { throw new IllegalArgumentException("Metric is of a type that cannot have a unit: " + name); } List<Sample> mungedSamples = samples; // Deal with _total from pre-OM automatically. if (type == Type.COUNTER) { if (name.endsWith("_total")) { name = name.substring(0, name.length() - 6); } String withTotal = name + "_total"; mungedSamples = new ArrayList<Sample>(samples.size()); for (Sample s: samples) { String n = s.name; if (name.equals(n)) { n = withTotal; } mungedSamples.add(new Sample(n, s.labelNames, s.labelValues, s.value, s.exemplar, s.timestampMs)); } } this.name = name; this.unit = unit; this.type = type; this.help = help; this.samples = mungedSamples; } /** * @param sampleNameFilter may be {@code null} indicating that the result contains the complete list of samples. * @return A new MetricFamilySamples containing only the Samples matching the {@code sampleNameFilter}, * or {@code null} if no Sample matches. */ public MetricFamilySamples filter(Predicate<String> sampleNameFilter) { if (sampleNameFilter == null) { return this; } List<Sample> remainingSamples = new ArrayList<Sample>(samples.size()); for (Sample sample : samples) { if (sampleNameFilter.test(sample.name)) { remainingSamples.add(sample); } } if (remainingSamples.isEmpty()) { return null; } return new MetricFamilySamples(name, unit, type, help, remainingSamples); } /** * List of names that are reserved for Samples in these MetricsFamilySamples. * <p> * This is used in two places: * <ol> * <li>To check potential name collisions in {@link CollectorRegistry#register(Collector)}. * <li>To check if a collector may contain metrics matching the metric name filter * in {@link Collector#collect(Predicate)}. * </ol> * Note that {@code getNames()} always includes the name without suffix, even though some * metrics types (like Counter) will not have a Sample with that name. * The reason is that the name without suffix is used in the metadata comments ({@code # TYPE}, {@code # UNIT}, * {@code # HELP}), and as this name <a href="https://github.com/prometheus/common/issues/319">must be unique</a> * we include the name without suffix here as well. */ public String[] getNames() { switch (type) { case COUNTER: return new String[]{ name + "_total", name + "_created", name }; case SUMMARY: return new String[]{ name + "_count", name + "_sum", name + "_created", name }; case HISTOGRAM: return new String[]{ name + "_count", name + "_sum", name + "_bucket", name + "_created", name }; case GAUGE_HISTOGRAM: return new String[]{ name + "_gcount", name + "_gsum", name + "_bucket", name }; case INFO: return new String[]{ name + "_info", name }; default: return new String[]{name}; } } @Override public boolean equals(Object obj) { if (!(obj instanceof MetricFamilySamples)) { return false; } MetricFamilySamples other = (MetricFamilySamples) obj; return other.name.equals(name) && other.unit.equals(unit) && other.type.equals(type) && other.help.equals(help) && other.samples.equals(samples); } @Override public int hashCode() { int hash = 1; hash = 37 * hash + name.hashCode(); hash = 37 * hash + unit.hashCode(); hash = 37 * hash + type.hashCode(); hash = 37 * hash + help.hashCode(); hash = 37 * hash + samples.hashCode(); return hash; } @Override public String toString() { return "Name: " + name + " Unit:" + unit + " Type: " + type + " Help: " + help + " Samples: " + samples; } /** * A single Sample, with a unique name and set of labels. */ public static class Sample { public final String name; public final List<String> labelNames; public final List<String> labelValues; // Must have same length as labelNames. public final double value; public final Exemplar exemplar; public final Long timestampMs; // It's an epoch format with milliseconds value included (this field is subject to change). public Sample(String name, List<String> labelNames, List<String> labelValues, double value, Exemplar exemplar, Long timestampMs) { this.name = name; this.labelNames = labelNames; this.labelValues = labelValues; this.value = value; this.exemplar = exemplar; this.timestampMs = timestampMs; } public Sample(String name, List<String> labelNames, List<String> labelValues, double value, Long timestampMs) { this(name, labelNames, labelValues, value, null, timestampMs); } public Sample(String name, List<String> labelNames, List<String> labelValues, double value, Exemplar exemplar) { this(name, labelNames, labelValues, value, exemplar, null); } public Sample(String name, List<String> labelNames, List<String> labelValues, double value) { this(name, labelNames, labelValues, value, null, null); } @Override public boolean equals(Object obj) { if (!(obj instanceof Sample)) { return false; } Sample other = (Sample) obj; return other.name.equals(name) && other.labelNames.equals(labelNames) && other.labelValues.equals(labelValues) && other.value == value && (exemplar == null && other.exemplar == null || other.exemplar != null && other.exemplar.equals(exemplar)) && (timestampMs == null && other.timestampMs == null || other.timestampMs != null && other.timestampMs.equals(timestampMs)); } @Override public int hashCode() { int hash = 1; hash = 37 * hash + name.hashCode(); hash = 37 * hash + labelNames.hashCode(); hash = 37 * hash + labelValues.hashCode(); long d = Double.doubleToLongBits(value); hash = 37 * hash + (int)(d ^ (d >>> 32)); if (timestampMs != null) { hash = 37 * hash + timestampMs.hashCode(); } if (exemplar != null) { hash = 37 * exemplar.hashCode(); } return hash; } @Override public String toString() { return "Name: " + name + " LabelNames: " + labelNames + " labelValues: " + labelValues + " Value: " + value + " TimestampMs: " + timestampMs; } } } /** * Register the Collector with the default registry. */ public <T extends Collector> T register() { return register(CollectorRegistry.defaultRegistry); } /** * Register the Collector with the given registry. */ public <T extends Collector> T register(CollectorRegistry registry) { registry.register(this); return (T)this; } public interface Describable { /** * Provide a list of metric families this Collector is expected to return. * * These should exclude the samples. This is used by the registry to * detect collisions and duplicate registrations. * * Usually custom collectors do not have to implement Describable. If * Describable is not implemented and the CollectorRegistry was created * with auto describe enabled (which is the case for the default registry) * then {@link #collect} will be called at registration time instead of * describe. If this could cause problems, either implement a proper * describe, or if that's not practical have describe return an empty * list. */ List<MetricFamilySamples> describe(); } /* Various utility functions for implementing Collectors. */ /** * Number of nanoseconds in a second. */ public static final double NANOSECONDS_PER_SECOND = 1E9; /** * Number of milliseconds in a second. */ public static final double MILLISECONDS_PER_SECOND = 1E3; private static final Pattern METRIC_NAME_RE = Pattern.compile("[a-zA-Z_:][a-zA-Z0-9_:]*"); private static final Pattern METRIC_LABEL_NAME_RE = Pattern.compile("[a-zA-Z_][a-zA-Z0-9_]*"); private static final Pattern RESERVED_METRIC_LABEL_NAME_RE = Pattern.compile("__.*"); /** * Throw an exception if the metric name is invalid. */ protected static void checkMetricName(String name) { if (!METRIC_NAME_RE.matcher(name).matches()) { throw new IllegalArgumentException("Invalid metric name: " + name); } } private static final Pattern SANITIZE_PREFIX_PATTERN = Pattern.compile("^[^a-zA-Z_:]"); private static final Pattern SANITIZE_BODY_PATTERN = Pattern.compile("[^a-zA-Z0-9_:]"); /** * Sanitize metric name */ public static String sanitizeMetricName(String metricName) { return SANITIZE_BODY_PATTERN.matcher( SANITIZE_PREFIX_PATTERN.matcher(metricName).replaceFirst("_") ).replaceAll("_"); } /** * Throw an exception if the metric label name is invalid. */ protected static void checkMetricLabelName(String name) { if (!METRIC_LABEL_NAME_RE.matcher(name).matches()) { throw new IllegalArgumentException("Invalid metric label name: " + name); } if (RESERVED_METRIC_LABEL_NAME_RE.matcher(name).matches()) { throw new IllegalArgumentException("Invalid metric label name, reserved for internal use: " + name); } } /** * Convert a double to its string representation in Go. */ public static String doubleToGoString(double d) { if (d == Double.POSITIVE_INFINITY) { return "+Inf"; } if (d == Double.NEGATIVE_INFINITY) { return "-Inf"; } return Double.toString(d); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.GroupCombineFunction; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.base.DoubleSerializer; import org.apache.flink.api.common.typeutils.base.IntComparator; import org.apache.flink.api.common.typeutils.base.IntSerializer; import org.apache.flink.api.common.typeutils.base.StringSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.api.java.typeutils.runtime.TupleComparator; import org.apache.flink.api.java.typeutils.runtime.TupleSerializer; import org.apache.flink.runtime.operators.testutils.UnaryOperatorTestBase; import org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator; import org.apache.flink.runtime.operators.testutils.UnionIterator; import org.apache.flink.util.Collector; import org.apache.flink.util.MutableObjectIterator; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Random; import static org.junit.Assert.*; /** * Test that checks how the combiner handles very large records that are too large to be written * into a fresh sort buffer. */ public class CombinerOversizedRecordsTest extends UnaryOperatorTestBase< GroupCombineFunction< Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>>, Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>> { private static final long COMBINE_MEM = 3 * 1024 * 1024; private final double combine_frac; private final ArrayList<Tuple3<Integer, Double, String>> outList = new ArrayList<Tuple3<Integer, Double, String>>(); private final TypeSerializer<Tuple3<Integer, Integer, String>> serializer = new TupleSerializer<Tuple3<Integer, Integer, String>>( (Class<Tuple3<Integer, Integer, String>>) (Class<?>) Tuple3.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, IntSerializer.INSTANCE, StringSerializer.INSTANCE }); private final TypeSerializer<Tuple3<Integer, Double, String>> outSerializer = new TupleSerializer<Tuple3<Integer, Double, String>>( (Class<Tuple3<Integer, Double, String>>) (Class<?>) Tuple3.class, new TypeSerializer<?>[] { IntSerializer.INSTANCE, DoubleSerializer.INSTANCE, StringSerializer.INSTANCE }); private final TypeComparator<Tuple3<Integer, Integer, String>> comparator = new TupleComparator<Tuple3<Integer, Integer, String>>( new int[] {0}, new TypeComparator<?>[] {new IntComparator(true)}, new TypeSerializer<?>[] {IntSerializer.INSTANCE}); // ------------------------------------------------------------------------ public CombinerOversizedRecordsTest(ExecutionConfig config) { super(config, COMBINE_MEM, 0); combine_frac = (double) COMBINE_MEM / getMemoryManager().getMemorySize(); } @Test public void testOversizedRecordCombineTask() { try { final int keyCnt = 100; final int valCnt = 20; // create a long heavy string payload StringBuilder bld = new StringBuilder(10 * 1024 * 1024); Random rnd = new Random(); for (int i = 0; i < 10000000; i++) { bld.append((char) (rnd.nextInt(26) + 'a')); } String longString = bld.toString(); bld = null; // construct the input as a union of // 1) long string // 2) some random values // 3) long string // 4) random values // 5) long string // random values 1 MutableObjectIterator<Tuple2<Integer, Integer>> gen1 = new UniformIntTupleGenerator(keyCnt, valCnt, false); // random values 2 MutableObjectIterator<Tuple2<Integer, Integer>> gen2 = new UniformIntTupleGenerator(keyCnt, valCnt, false); @SuppressWarnings("unchecked") MutableObjectIterator<Tuple3<Integer, Integer, String>> input = new UnionIterator<Tuple3<Integer, Integer, String>>( new SingleValueIterator<Tuple3<Integer, Integer, String>>( new Tuple3<Integer, Integer, String>(-1, -1, longString)), new StringIteratorDecorator(gen1), new SingleValueIterator<Tuple3<Integer, Integer, String>>( new Tuple3<Integer, Integer, String>(-1, -1, longString)), new StringIteratorDecorator(gen2), new SingleValueIterator<Tuple3<Integer, Integer, String>>( new Tuple3<Integer, Integer, String>(-1, -1, longString))); setInput(input, serializer); addDriverComparator(this.comparator); addDriverComparator(this.comparator); setOutput(this.outList, this.outSerializer); getTaskConfig().setDriverStrategy(DriverStrategy.SORTED_GROUP_COMBINE); getTaskConfig().setRelativeMemoryDriver(combine_frac); getTaskConfig().setFilehandlesDriver(2); GroupReduceCombineDriver< Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>> testTask = new GroupReduceCombineDriver< Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>>(); testDriver(testTask, TestCombiner.class); assertEquals(3, testTask.getOversizedRecordCount()); assertTrue(keyCnt + 3 == outList.size() || 2 * keyCnt + 3 == outList.size()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } // ------------------------------------------------------------------------ public static final class TestCombiner implements GroupCombineFunction< Tuple3<Integer, Integer, String>, Tuple3<Integer, Double, String>> { private static final long serialVersionUID = 1L; @Override public void combine( Iterable<Tuple3<Integer, Integer, String>> values, Collector<Tuple3<Integer, Double, String>> out) { int key = 0; int sum = 0; String someString = null; for (Tuple3<Integer, Integer, String> next : values) { key = next.f0; sum += next.f1; someString = next.f2; } out.collect(new Tuple3<Integer, Double, String>(key, (double) sum, someString)); } } // ------------------------------------------------------------------------ private static class StringIteratorDecorator implements MutableObjectIterator<Tuple3<Integer, Integer, String>> { private final MutableObjectIterator<Tuple2<Integer, Integer>> input; private StringIteratorDecorator(MutableObjectIterator<Tuple2<Integer, Integer>> input) { this.input = input; } @Override public Tuple3<Integer, Integer, String> next(Tuple3<Integer, Integer, String> reuse) throws IOException { Tuple2<Integer, Integer> next = input.next(); if (next == null) { return null; } else { reuse.f0 = next.f0; reuse.f1 = next.f1; reuse.f2 = "test string"; return reuse; } } @Override public Tuple3<Integer, Integer, String> next() throws IOException { Tuple2<Integer, Integer> next = input.next(); if (next == null) { return null; } else { return new Tuple3<Integer, Integer, String>(next.f0, next.f1, "test string"); } } } // ------------------------------------------------------------------------ private static class SingleValueIterator<T> implements MutableObjectIterator<T> { private final T value; private boolean pending = true; private SingleValueIterator(T value) { this.value = value; } @Override public T next(T reuse) { return next(); } @Override public T next() { if (pending) { pending = false; return value; } else { return null; } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.hive; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import io.airlift.concurrent.BoundedExecutor; import io.airlift.stats.CounterStat; import io.airlift.units.DataSize; import io.trino.plugin.hive.authentication.HiveIdentity; import io.trino.plugin.hive.metastore.Column; import io.trino.plugin.hive.metastore.Partition; import io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore; import io.trino.plugin.hive.metastore.Table; import io.trino.plugin.hive.util.HiveBucketing.HiveBucketFilter; import io.trino.spi.TrinoException; import io.trino.spi.VersionEmbedder; import io.trino.spi.connector.ConnectorSession; import io.trino.spi.connector.ConnectorSplitManager; import io.trino.spi.connector.ConnectorSplitSource; import io.trino.spi.connector.ConnectorTableHandle; import io.trino.spi.connector.ConnectorTransactionHandle; import io.trino.spi.connector.DynamicFilter; import io.trino.spi.connector.FixedSplitSource; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.type.TypeManager; import org.weakref.jmx.Managed; import org.weakref.jmx.Nested; import javax.annotation.Nullable; import javax.inject.Inject; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.function.Function; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.isNullOrEmpty; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.Iterables.transform; import static io.trino.plugin.hive.BackgroundHiveSplitLoader.BucketSplitInfo.createBucketSplitInfo; import static io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA; import static io.trino.plugin.hive.HiveErrorCode.HIVE_PARTITION_DROPPED_DURING_QUERY; import static io.trino.plugin.hive.HiveErrorCode.HIVE_PARTITION_SCHEMA_MISMATCH; import static io.trino.plugin.hive.HivePartition.UNPARTITIONED_ID; import static io.trino.plugin.hive.HiveSessionProperties.getDynamicFilteringProbeBlockingTimeout; import static io.trino.plugin.hive.HiveSessionProperties.isIgnoreAbsentPartitions; import static io.trino.plugin.hive.HiveSessionProperties.isOptimizeSymlinkListing; import static io.trino.plugin.hive.HiveSessionProperties.isUseOrcColumnNames; import static io.trino.plugin.hive.HiveSessionProperties.isUseParquetColumnNames; import static io.trino.plugin.hive.HiveStorageFormat.getHiveStorageFormat; import static io.trino.plugin.hive.TableToPartitionMapping.mapColumnsByIndex; import static io.trino.plugin.hive.metastore.MetastoreUtil.getProtectMode; import static io.trino.plugin.hive.metastore.MetastoreUtil.makePartitionName; import static io.trino.plugin.hive.metastore.MetastoreUtil.verifyOnline; import static io.trino.plugin.hive.util.HiveCoercionPolicy.canCoerce; import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static io.trino.spi.StandardErrorCode.SERVER_SHUTTING_DOWN; import static io.trino.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.GROUPED_SCHEDULING; import static java.lang.Math.min; import static java.lang.String.format; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static org.apache.hadoop.hive.ql.io.AcidUtils.isTransactionalTable; public class HiveSplitManager implements ConnectorSplitManager { public static final String PRESTO_OFFLINE = "presto_offline"; public static final String OBJECT_NOT_READABLE = "object_not_readable"; private final Function<HiveTransactionHandle, SemiTransactionalHiveMetastore> metastoreProvider; private final HivePartitionManager partitionManager; private final NamenodeStats namenodeStats; private final HdfsEnvironment hdfsEnvironment; private final DirectoryLister directoryLister; private final Executor executor; private final int maxOutstandingSplits; private final DataSize maxOutstandingSplitsSize; private final int minPartitionBatchSize; private final int maxPartitionBatchSize; private final int maxInitialSplits; private final int splitLoaderConcurrency; private final int maxSplitsPerSecond; private final boolean recursiveDfsWalkerEnabled; private final CounterStat highMemorySplitSourceCounter; private final TypeManager typeManager; @Inject public HiveSplitManager( HiveConfig hiveConfig, Function<HiveTransactionHandle, SemiTransactionalHiveMetastore> metastoreProvider, HivePartitionManager partitionManager, NamenodeStats namenodeStats, HdfsEnvironment hdfsEnvironment, DirectoryLister directoryLister, ExecutorService executorService, VersionEmbedder versionEmbedder, TypeManager typeManager) { this( metastoreProvider, partitionManager, namenodeStats, hdfsEnvironment, directoryLister, versionEmbedder.embedVersion(new BoundedExecutor(executorService, hiveConfig.getMaxSplitIteratorThreads())), new CounterStat(), hiveConfig.getMaxOutstandingSplits(), hiveConfig.getMaxOutstandingSplitsSize(), hiveConfig.getMinPartitionBatchSize(), hiveConfig.getMaxPartitionBatchSize(), hiveConfig.getMaxInitialSplits(), hiveConfig.getSplitLoaderConcurrency(), hiveConfig.getMaxSplitsPerSecond(), hiveConfig.getRecursiveDirWalkerEnabled(), typeManager); } public HiveSplitManager( Function<HiveTransactionHandle, SemiTransactionalHiveMetastore> metastoreProvider, HivePartitionManager partitionManager, NamenodeStats namenodeStats, HdfsEnvironment hdfsEnvironment, DirectoryLister directoryLister, Executor executor, CounterStat highMemorySplitSourceCounter, int maxOutstandingSplits, DataSize maxOutstandingSplitsSize, int minPartitionBatchSize, int maxPartitionBatchSize, int maxInitialSplits, int splitLoaderConcurrency, @Nullable Integer maxSplitsPerSecond, boolean recursiveDfsWalkerEnabled, TypeManager typeManager) { this.metastoreProvider = requireNonNull(metastoreProvider, "metastoreProvider is null"); this.partitionManager = requireNonNull(partitionManager, "partitionManager is null"); this.namenodeStats = requireNonNull(namenodeStats, "namenodeStats is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.directoryLister = requireNonNull(directoryLister, "directoryLister is null"); this.executor = new ErrorCodedExecutor(executor); this.highMemorySplitSourceCounter = requireNonNull(highMemorySplitSourceCounter, "highMemorySplitSourceCounter is null"); checkArgument(maxOutstandingSplits >= 1, "maxOutstandingSplits must be at least 1"); this.maxOutstandingSplits = maxOutstandingSplits; this.maxOutstandingSplitsSize = maxOutstandingSplitsSize; this.minPartitionBatchSize = minPartitionBatchSize; this.maxPartitionBatchSize = maxPartitionBatchSize; this.maxInitialSplits = maxInitialSplits; this.splitLoaderConcurrency = splitLoaderConcurrency; this.maxSplitsPerSecond = firstNonNull(maxSplitsPerSecond, Integer.MAX_VALUE); this.recursiveDfsWalkerEnabled = recursiveDfsWalkerEnabled; this.typeManager = requireNonNull(typeManager, "typeManager is null"); } @Override public ConnectorSplitSource getSplits( ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle tableHandle, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter) { HiveTableHandle hiveTable = (HiveTableHandle) tableHandle; SchemaTableName tableName = hiveTable.getSchemaTableName(); // get table metadata SemiTransactionalHiveMetastore metastore = metastoreProvider.apply((HiveTransactionHandle) transaction); Table table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()) .orElseThrow(() -> new TableNotFoundException(tableName)); // verify table is not marked as non-readable String tableNotReadable = table.getParameters().get(OBJECT_NOT_READABLE); if (!isNullOrEmpty(tableNotReadable)) { throw new HiveNotReadableException(tableName, Optional.empty(), tableNotReadable); } // get partitions List<HivePartition> partitions = partitionManager.getOrLoadPartitions(metastore, new HiveIdentity(session), hiveTable); // short circuit if we don't have any partitions if (partitions.isEmpty()) { return new FixedSplitSource(ImmutableList.of()); } // get buckets from first partition (arbitrary) Optional<HiveBucketFilter> bucketFilter = hiveTable.getBucketFilter(); // validate bucket bucketed execution Optional<HiveBucketHandle> bucketHandle = hiveTable.getBucketHandle(); if ((splitSchedulingStrategy == GROUPED_SCHEDULING) && bucketHandle.isEmpty()) { throw new TrinoException(GENERIC_INTERNAL_ERROR, "SchedulingPolicy is bucketed, but BucketHandle is not present"); } // sort partitions partitions = Ordering.natural().onResultOf(HivePartition::getPartitionId).reverse().sortedCopy(partitions); Iterable<HivePartitionMetadata> hivePartitions = getPartitionMetadata(session, metastore, table, tableName, partitions, bucketHandle.map(HiveBucketHandle::toTableBucketProperty)); // Only one thread per partition is usable when a table is not transactional int concurrency = isTransactionalTable(table.getParameters()) ? splitLoaderConcurrency : min(splitLoaderConcurrency, partitions.size()); HiveSplitLoader hiveSplitLoader = new BackgroundHiveSplitLoader( table, hiveTable.getTransaction(), hivePartitions, hiveTable.getCompactEffectivePredicate(), dynamicFilter, getDynamicFilteringProbeBlockingTimeout(session), typeManager, createBucketSplitInfo(bucketHandle, bucketFilter), session, hdfsEnvironment, namenodeStats, directoryLister, executor, concurrency, recursiveDfsWalkerEnabled, !hiveTable.getPartitionColumns().isEmpty() && isIgnoreAbsentPartitions(session), isOptimizeSymlinkListing(session), metastore.getValidWriteIds(session, hiveTable) .map(validTxnWriteIdList -> validTxnWriteIdList.getTableValidWriteIdList(table.getDatabaseName() + "." + table.getTableName()))); HiveSplitSource splitSource; switch (splitSchedulingStrategy) { case UNGROUPED_SCHEDULING: splitSource = HiveSplitSource.allAtOnce( session, table.getDatabaseName(), table.getTableName(), maxInitialSplits, maxOutstandingSplits, maxOutstandingSplitsSize, maxSplitsPerSecond, hiveSplitLoader, executor, highMemorySplitSourceCounter); break; case GROUPED_SCHEDULING: splitSource = HiveSplitSource.bucketed( session, table.getDatabaseName(), table.getTableName(), maxInitialSplits, maxOutstandingSplits, maxOutstandingSplitsSize, maxSplitsPerSecond, hiveSplitLoader, executor, highMemorySplitSourceCounter); break; default: throw new IllegalArgumentException("Unknown splitSchedulingStrategy: " + splitSchedulingStrategy); } hiveSplitLoader.start(splitSource); return splitSource; } @Managed @Nested public CounterStat getHighMemorySplitSource() { return highMemorySplitSourceCounter; } private Iterable<HivePartitionMetadata> getPartitionMetadata(ConnectorSession session, SemiTransactionalHiveMetastore metastore, Table table, SchemaTableName tableName, List<HivePartition> hivePartitions, Optional<HiveBucketProperty> bucketProperty) { if (hivePartitions.isEmpty()) { return ImmutableList.of(); } if (hivePartitions.size() == 1) { HivePartition firstPartition = getOnlyElement(hivePartitions); if (firstPartition.getPartitionId().equals(UNPARTITIONED_ID)) { return ImmutableList.of(new HivePartitionMetadata(firstPartition, Optional.empty(), TableToPartitionMapping.empty())); } } Optional<HiveStorageFormat> storageFormat = getHiveStorageFormat(table.getStorage().getStorageFormat()); Iterable<List<HivePartition>> partitionNameBatches = partitionExponentially(hivePartitions, minPartitionBatchSize, maxPartitionBatchSize); Iterable<List<HivePartitionMetadata>> partitionBatches = transform(partitionNameBatches, partitionBatch -> { Map<String, Optional<Partition>> batch = metastore.getPartitionsByNames( new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName(), Lists.transform(partitionBatch, HivePartition::getPartitionId)); ImmutableMap.Builder<String, Partition> partitionBuilder = ImmutableMap.builder(); for (Map.Entry<String, Optional<Partition>> entry : batch.entrySet()) { if (entry.getValue().isEmpty()) { throw new TrinoException(HIVE_PARTITION_DROPPED_DURING_QUERY, "Partition no longer exists: " + entry.getKey()); } partitionBuilder.put(entry.getKey(), entry.getValue().get()); } Map<String, Partition> partitions = partitionBuilder.build(); if (partitionBatch.size() != partitions.size()) { throw new TrinoException(GENERIC_INTERNAL_ERROR, format("Expected %s partitions but found %s", partitionBatch.size(), partitions.size())); } ImmutableList.Builder<HivePartitionMetadata> results = ImmutableList.builder(); for (HivePartition hivePartition : partitionBatch) { Partition partition = partitions.get(hivePartition.getPartitionId()); if (partition == null) { throw new TrinoException(GENERIC_INTERNAL_ERROR, "Partition not loaded: " + hivePartition); } String partName = makePartitionName(table, partition); // verify partition is online verifyOnline(tableName, Optional.of(partName), getProtectMode(partition), partition.getParameters()); // verify partition is not marked as non-readable String partitionNotReadable = partition.getParameters().get(OBJECT_NOT_READABLE); if (!isNullOrEmpty(partitionNotReadable)) { throw new HiveNotReadableException(tableName, Optional.of(partName), partitionNotReadable); } // Verify that the partition schema matches the table schema. // Either adding or dropping columns from the end of the table // without modifying existing partitions is allowed, but every // column that exists in both the table and partition must have // the same type. List<Column> tableColumns = table.getDataColumns(); List<Column> partitionColumns = partition.getColumns(); if ((tableColumns == null) || (partitionColumns == null)) { throw new TrinoException(HIVE_INVALID_METADATA, format("Table '%s' or partition '%s' has null columns", tableName, partName)); } TableToPartitionMapping tableToPartitionMapping = getTableToPartitionMapping(session, storageFormat, tableName, partName, tableColumns, partitionColumns); if (bucketProperty.isPresent()) { Optional<HiveBucketProperty> partitionBucketProperty = partition.getStorage().getBucketProperty(); if (partitionBucketProperty.isEmpty()) { throw new TrinoException(HIVE_PARTITION_SCHEMA_MISMATCH, format( "Hive table (%s) is bucketed but partition (%s) is not bucketed", hivePartition.getTableName(), hivePartition.getPartitionId())); } int tableBucketCount = bucketProperty.get().getBucketCount(); int partitionBucketCount = partitionBucketProperty.get().getBucketCount(); List<String> tableBucketColumns = bucketProperty.get().getBucketedBy(); List<String> partitionBucketColumns = partitionBucketProperty.get().getBucketedBy(); if (!tableBucketColumns.equals(partitionBucketColumns) || !isBucketCountCompatible(tableBucketCount, partitionBucketCount)) { throw new TrinoException(HIVE_PARTITION_SCHEMA_MISMATCH, format( "Hive table (%s) bucketing (columns=%s, buckets=%s) is not compatible with partition (%s) bucketing (columns=%s, buckets=%s)", hivePartition.getTableName(), tableBucketColumns, tableBucketCount, hivePartition.getPartitionId(), partitionBucketColumns, partitionBucketCount)); } } results.add(new HivePartitionMetadata(hivePartition, Optional.of(partition), tableToPartitionMapping)); } return results.build(); }); return concat(partitionBatches); } private TableToPartitionMapping getTableToPartitionMapping(ConnectorSession session, Optional<HiveStorageFormat> storageFormat, SchemaTableName tableName, String partName, List<Column> tableColumns, List<Column> partitionColumns) { if (storageFormat.isPresent() && isPartitionUsesColumnNames(session, storageFormat.get())) { return getTableToPartitionMappingByColumnNames(tableName, partName, tableColumns, partitionColumns); } ImmutableMap.Builder<Integer, HiveTypeName> columnCoercions = ImmutableMap.builder(); for (int i = 0; i < min(partitionColumns.size(), tableColumns.size()); i++) { HiveType tableType = tableColumns.get(i).getType(); HiveType partitionType = partitionColumns.get(i).getType(); if (!tableType.equals(partitionType)) { if (!canCoerce(typeManager, partitionType, tableType)) { throw tablePartitionColumnMismatchException(tableName, partName, tableColumns.get(i).getName(), tableType, partitionColumns.get(i).getName(), partitionType); } columnCoercions.put(i, partitionType.getHiveTypeName()); } } return mapColumnsByIndex(columnCoercions.build()); } private static boolean isPartitionUsesColumnNames(ConnectorSession session, HiveStorageFormat storageFormat) { switch (storageFormat) { case AVRO: return true; case JSON: return true; case ORC: return isUseOrcColumnNames(session); case PARQUET: return isUseParquetColumnNames(session); default: return false; } } private TableToPartitionMapping getTableToPartitionMappingByColumnNames(SchemaTableName tableName, String partName, List<Column> tableColumns, List<Column> partitionColumns) { ImmutableMap.Builder<String, Integer> partitionColumnIndexesBuilder = ImmutableMap.builder(); for (int i = 0; i < partitionColumns.size(); i++) { partitionColumnIndexesBuilder.put(partitionColumns.get(i).getName().toLowerCase(ENGLISH), i); } Map<String, Integer> partitionColumnsByIndex = partitionColumnIndexesBuilder.build(); ImmutableMap.Builder<Integer, HiveTypeName> columnCoercions = ImmutableMap.builder(); ImmutableMap.Builder<Integer, Integer> tableToPartitionColumns = ImmutableMap.builder(); for (int tableColumnIndex = 0; tableColumnIndex < tableColumns.size(); tableColumnIndex++) { Column tableColumn = tableColumns.get(tableColumnIndex); HiveType tableType = tableColumn.getType(); Integer partitionColumnIndex = partitionColumnsByIndex.get(tableColumn.getName().toLowerCase(ENGLISH)); if (partitionColumnIndex == null) { continue; } tableToPartitionColumns.put(tableColumnIndex, partitionColumnIndex); Column partitionColumn = partitionColumns.get(partitionColumnIndex); HiveType partitionType = partitionColumn.getType(); if (!tableType.equals(partitionType)) { if (!canCoerce(typeManager, partitionType, tableType)) { throw tablePartitionColumnMismatchException(tableName, partName, tableColumn.getName(), tableType, partitionColumn.getName(), partitionType); } columnCoercions.put(partitionColumnIndex, partitionType.getHiveTypeName()); } } return new TableToPartitionMapping(Optional.of(tableToPartitionColumns.build()), columnCoercions.build()); } private TrinoException tablePartitionColumnMismatchException(SchemaTableName tableName, String partName, String tableColumnName, HiveType tableType, String partitionColumnName, HiveType partitionType) { return new TrinoException(HIVE_PARTITION_SCHEMA_MISMATCH, format("" + "There is a mismatch between the table and partition schemas. " + "The types are incompatible and cannot be coerced. " + "The column '%s' in table '%s' is declared as type '%s', " + "but partition '%s' declared column '%s' as type '%s'.", tableColumnName, tableName, tableType, partName, partitionColumnName, partitionType)); } static boolean isBucketCountCompatible(int tableBucketCount, int partitionBucketCount) { checkArgument(tableBucketCount > 0 && partitionBucketCount > 0); int larger = Math.max(tableBucketCount, partitionBucketCount); int smaller = min(tableBucketCount, partitionBucketCount); if (larger % smaller != 0) { // must be evenly divisible return false; } // ratio must be power of two return Integer.bitCount(larger / smaller) == 1; } /** * Partition the given list in exponentially (power of 2) increasing batch sizes starting at 1 up to maxBatchSize */ private static <T> Iterable<List<T>> partitionExponentially(List<T> values, int minBatchSize, int maxBatchSize) { return () -> new AbstractIterator<>() { private int currentSize = minBatchSize; private final Iterator<T> iterator = values.iterator(); @Override protected List<T> computeNext() { if (!iterator.hasNext()) { return endOfData(); } int count = 0; ImmutableList.Builder<T> builder = ImmutableList.builder(); while (iterator.hasNext() && count < currentSize) { builder.add(iterator.next()); ++count; } currentSize = min(maxBatchSize, currentSize * 2); return builder.build(); } }; } private static class ErrorCodedExecutor implements Executor { private final Executor delegate; private ErrorCodedExecutor(Executor delegate) { this.delegate = requireNonNull(delegate, "delegate is null"); } @Override public void execute(Runnable command) { try { delegate.execute(command); } catch (RejectedExecutionException e) { throw new TrinoException(SERVER_SHUTTING_DOWN, "Server is shutting down", e); } } } }
package util; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.logging.Logger; /** * Singleton class representing the configuration file for ChefMateServer. * * @author Tobias Freundorfer * */ public class Config { private static final Logger logger = Logger.getLogger(EnvironmentInitializer.class.getName()); /** * The singleton instance. */ private static Config instance; /** * The name of the configuration file. */ public static final String CONFIG_FILENAME = "chefmate.conf"; /** * Section of property keys. */ public static final String PROPKEY_CHEF_REPO_NAME = "chef_repo_name"; public static final String PROPKEY_CHEF_REPO_URL = "chef_repo_url"; public static final String PROPKEY_CHEF_REPO_BRANCH = "chef_repo_branch"; public static final String PROPKEY_AWS_ACCESS_KEY = "aws_access_key"; public static final String PROPKEY_AWS_SECRET_ACCESS_KEY = "aws_secret_access_key"; public static final String PROPKEY_AWS_SSH_KEY_NAME = "aws_ssh_key_name"; public static final String PROPKEY_CHEFMATEINFO_FROM_CHEF_KEYWORD_PUBLICDNS = "chefmate_from_chef_keyword_publicdns"; public static final String PROPKEY_CHEFMATEINFO_FROM_CHEF_KEYWORD_INSTANCEID = "chefmate_from_chef_keyword_instanceid"; /** * The home directory of the user executing this. */ private String homeDir = ""; /** * The directory that should be used. */ private String serverEnvDir = ""; /** * The Chef repsoitory URL. */ private String chefRepoURL = ""; /** * The branche that should be used. */ private String chefRepoBranch = ""; /** * The name of the Chef.io repository. */ private String chefRepoName = ""; /** * The path to the ChefMateServer AWS Provisioninig init script. */ private String chefProvisioningInitScriptPath = ""; /** * The access key for AWS. */ private String awsAccessKey = ""; /** * The secret access key for AWS. Has to be set manually in the config file * for security reasons!! */ private String awsSecretAccessKey = ""; /** * The name of the ssh key used. */ private String awsSSHKeyName = ""; /** * The keyword for public DNS information received from Chef. */ private String chefMateInfo_Keyword_PublicDNS = ""; /** * The keyword for instance id information received from Chef. */ private String chefMateInfo_Keyword_InstanceID = ""; /** * Private constructor. * * @param writeDefault * Whether the default config file should be written or not. * @param reload * Whether the config file should be reloaded or not. */ private Config(boolean writeDefault, boolean reload) { this.homeDir = System.getProperty("user.home") + "/"; this.serverEnvDir = this.homeDir + "chefmateserver/"; if (writeDefault) { this.writeDefaultConfigFile(); } if (reload) { this.readConfig(); } } /** * Returns the singleton instance. * * @param writeDefault * Whether the default config file should be written or not. * @param reload * Whether the config file should be reloaded or not. * @return The singleton instance. */ public static synchronized Config getInstance(boolean writeDefault, boolean reload) { if (Config.instance == null) { Config.instance = new Config(writeDefault, reload); } return Config.instance; } /** * Writes the default configuration properties to the config file. */ public void writeDefaultConfigFile() { this.createEnvDir(); Properties properties = new Properties(); try { logger.info("### Writing default config file."); properties.setProperty(PROPKEY_CHEF_REPO_NAME, "LabCourse-group4-SS2016-CHEFrepo"); properties.setProperty(PROPKEY_CHEF_REPO_URL, "https://github.com/tfreundo/LabCourse-group4-SS2016-CHEFrepo.git"); properties.setProperty(PROPKEY_CHEF_REPO_BRANCH, "development"); properties.setProperty(PROPKEY_AWS_ACCESS_KEY, "TODO_SET_THIS_MANUALLY_DUE_TO_SECURITY_REASONS"); properties.setProperty(PROPKEY_AWS_SECRET_ACCESS_KEY, "TODO_SET_THIS_MANUALLY_DUE_TO_SECURITY_REASONS"); properties.setProperty(PROPKEY_AWS_SSH_KEY_NAME, "chefmateserver_key"); properties.setProperty(PROPKEY_CHEFMATEINFO_FROM_CHEF_KEYWORD_PUBLICDNS, "CHEFMATEINFO::PublicDNS="); properties.setProperty(PROPKEY_CHEFMATEINFO_FROM_CHEF_KEYWORD_INSTANCEID, "CHEFMATEINFO::InstanceID="); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(this.serverEnvDir + CONFIG_FILENAME)); properties.store(stream, "---Environment Initializer Config file---"); stream.close(); } catch (IOException e) { e.printStackTrace(); } } /** * Reads the properties from the config file. */ private void readConfig() { Properties properties = new Properties(); try { logger.info("### Reading config file."); BufferedInputStream stream = new BufferedInputStream( new FileInputStream(this.getServerEnvDir() + CONFIG_FILENAME)); properties.load(stream); this.chefRepoName = properties.getProperty(PROPKEY_CHEF_REPO_NAME); this.chefRepoURL = properties.getProperty(PROPKEY_CHEF_REPO_URL); this.chefRepoBranch = properties.getProperty(PROPKEY_CHEF_REPO_BRANCH); this.awsAccessKey = properties.getProperty(PROPKEY_AWS_ACCESS_KEY); this.awsSecretAccessKey = properties.getProperty(PROPKEY_AWS_SECRET_ACCESS_KEY); this.awsSSHKeyName = properties.getProperty(PROPKEY_AWS_SSH_KEY_NAME); this.chefMateInfo_Keyword_InstanceID = properties .getProperty(PROPKEY_CHEFMATEINFO_FROM_CHEF_KEYWORD_INSTANCEID); this.chefMateInfo_Keyword_PublicDNS = properties .getProperty(PROPKEY_CHEFMATEINFO_FROM_CHEF_KEYWORD_PUBLICDNS); this.chefProvisioningInitScriptPath = this.serverEnvDir + this.chefRepoName + "/initScripts/chefMateServerChefProvisioningSetup.sh"; stream.close(); } catch (IOException e) { e.printStackTrace(); } } /** * Creates the server environment directory. */ private void createEnvDir() { logger.info("### Creating environment in directory " + this.serverEnvDir + "."); // commands List<String> commands = new ArrayList<>(); commands.add("mkdir"); commands.add(this.serverEnvDir); ProcessBuilder pb = new ProcessBuilder(commands); pb.directory(new File(getHomeDir())); ShellExecutor.execute(this.getHomeDir(), commands); } /** * Section for getters. */ public String getHomeDir() { return homeDir; } public String getServerEnvDir() { return serverEnvDir; } public String getChefRepoURL() { return chefRepoURL; } public String getChefRepoName() { return chefRepoName; } public String getChefRepoBranch() { return chefRepoBranch; } public String getChefRepoPath() { return this.serverEnvDir + this.chefRepoName; } public String getChefCookbooksPath() { return this.getChefRepoPath() + "/cookbooks"; } public String getChefAttributesPath(String cookbookname) { return this.getChefCookbooksPath() + "/" + cookbookname + "/attributes"; } public String getChefAttributesDefaultFilename(String cookbookname) { return this.getChefAttributesPath(cookbookname) + "/default.rb"; } public String getChefSSHKeyPath() { return this.getServerEnvDir() + ".ssh/"; } public String getChefProvisioningInitScriptPath() { return chefProvisioningInitScriptPath; } public String getAwsSecretAccessKey() { return awsSecretAccessKey; } public String getDefaultSSHKeyPath() { return this.homeDir + ".ssh/"; } public String getAwsSSHKeyName() { return awsSSHKeyName; } public String getAwsAccessKey() { return awsAccessKey; } public String getAwsCredentialsFile() { return this.homeDir + ".aws/credentials"; } public String getChefMateInfo_Keyword_PublicDNS() { return chefMateInfo_Keyword_PublicDNS; } public String getChefMateInfo_Keyword_InstanceID() { return chefMateInfo_Keyword_InstanceID; } }
package org.activiti.engine.test.api.event; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.activiti.engine.impl.event.logger.EventLogger; import org.activiti.engine.impl.event.logger.handler.Fields; import org.activiti.engine.impl.test.PluggableFlowableTestCase; import org.activiti.engine.impl.util.CollectionUtil; import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType; import org.flowable.engine.common.impl.identity.Authentication; import org.flowable.engine.event.EventLogEntry; import org.flowable.engine.repository.DeploymentProperties; import org.flowable.engine.runtime.ProcessInstance; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; /** * @author Joram Barrez */ public class DatabaseEventLoggerTest extends PluggableFlowableTestCase { protected EventLogger databaseEventLogger; protected ObjectMapper objectMapper = new ObjectMapper(); @Override protected void setUp() throws Exception { super.setUp(); // Database event logger setup processEngineConfiguration.resetClock(); databaseEventLogger = new EventLogger(processEngineConfiguration.getClock(), processEngineConfiguration.getObjectMapper()); processEngineConfiguration.getEventDispatcher().addEventListener(databaseEventLogger); } @Override protected void tearDown() throws Exception { // Database event logger teardown processEngineConfiguration.getEventDispatcher().removeEventListener(databaseEventLogger); super.tearDown(); } public void testDatabaseEvents() throws IOException { Authentication.setAuthenticatedUserId(null); String testTenant = "testTenant"; String deploymentId = repositoryService.createDeployment() .addClasspathResource("org/activiti/engine/test/api/event/DatabaseEventLoggerProcess.bpmn20.xml") .tenantId(testTenant) .deploymentProperty(DeploymentProperties.DEPLOY_AS_FLOWABLE5_PROCESS_DEFINITION, Boolean.TRUE) .deploy().getId(); // Run process to gather data ProcessInstance processInstance = runtimeService.startProcessInstanceByKeyAndTenantId("DatabaseEventLoggerProcess", CollectionUtil.singletonMap("testVar", "helloWorld"), testTenant); // Verify event log entries List<EventLogEntry> eventLogEntries = managementService.getEventLogEntries(null, null); String processDefinitionId = processInstance.getProcessDefinitionId(); Iterator<EventLogEntry> iterator = eventLogEntries.iterator(); while (iterator.hasNext()) { EventLogEntry entry = iterator.next(); if (entry.getProcessDefinitionId() != null && !entry.getProcessDefinitionId().equals(processDefinitionId)) { iterator.remove(); } } assertEquals(15, eventLogEntries.size()); long lastLogNr = -1; for (int i = 0; i < eventLogEntries.size(); i++) { EventLogEntry entry = eventLogEntries.get(i); if (i == 0) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.VARIABLE_CREATED.name()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID)); assertNotNull(data.get(Fields.VALUE_STRING)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } // process instance start if (i == 1) { assertNotNull(entry.getType()); assertEquals("PROCESSINSTANCE_START", entry.getType()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.TENANT_ID)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); Map<String, Object> variableMap = (Map<String, Object>) data.get(Fields.VARIABLES); assertEquals(1, variableMap.size()); assertEquals("helloWorld", variableMap.get("testVar")); assertFalse(data.containsKey(Fields.NAME)); assertFalse(data.containsKey(Fields.BUSINESS_KEY)); } // Activity started if (i == 2 || i == 5 || i == 8 || i == 12) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.ACTIVITY_STARTED.name()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ACTIVITY_ID)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.ACTIVITY_TYPE)); assertNotNull(data.get(Fields.BEHAVIOR_CLASS)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } // Leaving start if (i == 3) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.ACTIVITY_COMPLETED.name()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ACTIVITY_ID)); assertEquals("startEvent1", data.get(Fields.ACTIVITY_ID)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.ACTIVITY_TYPE)); assertNotNull(data.get(Fields.BEHAVIOR_CLASS)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } // Sequence flow taken if (i == 4 || i == 7 || i == 11) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.SEQUENCEFLOW_TAKEN.name()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_ID)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_NAME)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_TYPE)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_BEHAVIOR_CLASS)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_ID)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_NAME)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_TYPE)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_BEHAVIOR_CLASS)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } // Leaving parallel gateway if (i == 6) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.ACTIVITY_COMPLETED.name()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ACTIVITY_ID)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.ACTIVITY_TYPE)); assertNotNull(data.get(Fields.BEHAVIOR_CLASS)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } // Tasks if (i == 10 || i == 14) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.TASK_CREATED.name()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getExecutionId()); assertNotNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.NAME)); assertNotNull(data.get(Fields.ASSIGNEE)); assertNotNull(data.get(Fields.CREATE_TIME)); assertNotNull(data.get(Fields.PRIORITY)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.TENANT_ID)); assertFalse(data.containsKey(Fields.DESCRIPTION)); assertFalse(data.containsKey(Fields.CATEGORY)); assertFalse(data.containsKey(Fields.OWNER)); assertFalse(data.containsKey(Fields.DUE_DATE)); assertFalse(data.containsKey(Fields.FORM_KEY)); assertFalse(data.containsKey(Fields.USER_ID)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } if (i == 9 || i == 13) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.TASK_ASSIGNED.name()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getExecutionId()); assertNotNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.NAME)); assertNotNull(data.get(Fields.ASSIGNEE)); assertNotNull(data.get(Fields.CREATE_TIME)); assertNotNull(data.get(Fields.PRIORITY)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.TENANT_ID)); assertFalse(data.containsKey(Fields.DESCRIPTION)); assertFalse(data.containsKey(Fields.CATEGORY)); assertFalse(data.containsKey(Fields.OWNER)); assertFalse(data.containsKey(Fields.DUE_DATE)); assertFalse(data.containsKey(Fields.FORM_KEY)); assertFalse(data.containsKey(Fields.USER_ID)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } lastLogNr = entry.getLogNumber(); } // Completing two tasks for (org.flowable.task.api.Task task : taskService.createTaskQuery().list()) { Authentication.setAuthenticatedUserId(task.getAssignee()); Map<String, Object> varMap = new HashMap<String, Object>(); varMap.put("test", "test"); taskService.complete(task.getId(), varMap); Authentication.setAuthenticatedUserId(null); } // Verify events eventLogEntries = managementService.getEventLogEntries(lastLogNr, 100L); assertEquals(15, eventLogEntries.size()); for (int i = 0; i < eventLogEntries.size(); i++) { EventLogEntry entry = eventLogEntries.get(i); // org.flowable.task.service.Task completion if (i == 1 || i == 6) { assertNotNull(entry.getType()); assertEquals(FlowableEngineEventType.TASK_COMPLETED.name(), entry.getType()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getExecutionId()); assertNotNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.NAME)); assertNotNull(data.get(Fields.ASSIGNEE)); assertNotNull(data.get(Fields.CREATE_TIME)); assertNotNull(data.get(Fields.PRIORITY)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.TENANT_ID)); assertNotNull(data.get(Fields.USER_ID)); Map<String, Object> variableMap = (Map<String, Object>) data.get(Fields.VARIABLES); assertEquals(1, variableMap.size()); assertEquals("test", variableMap.get("test")); assertFalse(data.containsKey(Fields.DESCRIPTION)); assertFalse(data.containsKey(Fields.CATEGORY)); assertFalse(data.containsKey(Fields.OWNER)); assertFalse(data.containsKey(Fields.DUE_DATE)); assertFalse(data.containsKey(Fields.FORM_KEY)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } // Activity Completed if (i == 2 || i == 7 || i == 10 || i == 13) { assertNotNull(entry.getType()); assertEquals(FlowableEngineEventType.ACTIVITY_COMPLETED.name(), entry.getType()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ACTIVITY_ID)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.PROCESS_INSTANCE_ID)); assertNotNull(data.get(Fields.EXECUTION_ID)); assertNotNull(data.get(Fields.ACTIVITY_TYPE)); assertNotNull(data.get(Fields.BEHAVIOR_CLASS)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); if (i == 2) { assertEquals("userTask", data.get(Fields.ACTIVITY_TYPE)); } else if (i == 7) { assertEquals("userTask", data.get(Fields.ACTIVITY_TYPE)); } else if (i == 10) { assertEquals("parallelGateway", data.get(Fields.ACTIVITY_TYPE)); } else if (i == 13) { assertEquals("endEvent", data.get(Fields.ACTIVITY_TYPE)); } } // Sequence flow taken if (i == 3 || i == 8 || i == 11) { assertNotNull(entry.getType()); assertEquals(entry.getType(), FlowableEngineEventType.SEQUENCEFLOW_TAKEN.name()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNotNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_ID)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_TYPE)); assertNotNull(data.get(Fields.SOURCE_ACTIVITY_BEHAVIOR_CLASS)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_ID)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_TYPE)); assertNotNull(data.get(Fields.TARGET_ACTIVITY_BEHAVIOR_CLASS)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } if (i == 14) { assertNotNull(entry.getType()); assertEquals("PROCESSINSTANCE_END", entry.getType()); assertNotNull(entry.getProcessDefinitionId()); assertNotNull(entry.getProcessInstanceId()); assertNotNull(entry.getTimeStamp()); assertNull(entry.getExecutionId()); assertNull(entry.getTaskId()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNotNull(data.get(Fields.ID)); assertNotNull(data.get(Fields.PROCESS_DEFINITION_ID)); assertNotNull(data.get(Fields.TENANT_ID)); assertFalse(data.containsKey(Fields.NAME)); assertFalse(data.containsKey(Fields.BUSINESS_KEY)); assertEquals(testTenant, data.get(Fields.TENANT_ID)); } } // Cleanup for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) { managementService.deleteEventLogEntry(eventLogEntry.getLogNumber()); } repositoryService.deleteDeployment(deploymentId, true); } public void testDatabaseEventsNoTenant() throws IOException { String deploymentId = repositoryService.createDeployment() .addClasspathResource("org/activiti/engine/test/api/event/DatabaseEventLoggerProcess.bpmn20.xml") .deploymentProperty(DeploymentProperties.DEPLOY_AS_FLOWABLE5_PROCESS_DEFINITION, Boolean.TRUE) .deploy().getId(); // Run process to gather data ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("DatabaseEventLoggerProcess", CollectionUtil.singletonMap("testVar", "helloWorld")); // Verify event log entries List<EventLogEntry> eventLogEntries = managementService.getEventLogEntries(null, null); String processDefinitionId = processInstance.getProcessDefinitionId(); Iterator<EventLogEntry> iterator = eventLogEntries.iterator(); while (iterator.hasNext()) { EventLogEntry entry = iterator.next(); if (entry.getProcessDefinitionId() != null && !entry.getProcessDefinitionId().equals(processDefinitionId)) { iterator.remove(); } } assertEquals(15, eventLogEntries.size()); for (int i = 0; i < eventLogEntries.size(); i++) { EventLogEntry entry = eventLogEntries.get(i); if (i == 0) { assertEquals(entry.getType(), FlowableEngineEventType.VARIABLE_CREATED.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } // process instance start if (i == 1) { assertEquals("PROCESSINSTANCE_START", entry.getType()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } // Activity started if (i == 2 || i == 5 || i == 8 || i == 12) { assertEquals(entry.getType(), FlowableEngineEventType.ACTIVITY_STARTED.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } // Leaving start if (i == 3) { assertEquals(entry.getType(), FlowableEngineEventType.ACTIVITY_COMPLETED.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } // Sequence flow taken if (i == 4 || i == 7 || i == 11) { assertEquals(entry.getType(), FlowableEngineEventType.SEQUENCEFLOW_TAKEN.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } // Leaving parallel gateway if (i == 6) { assertEquals(entry.getType(), FlowableEngineEventType.ACTIVITY_COMPLETED.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } // Tasks if (i == 10 || i == 14) { assertEquals(entry.getType(), FlowableEngineEventType.TASK_CREATED.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } if (i == 9 || i == 13) { assertEquals(entry.getType(), FlowableEngineEventType.TASK_ASSIGNED.name()); Map<String, Object> data = objectMapper.readValue(entry.getData(), new TypeReference<HashMap<String, Object>>() { }); assertNull(data.get(Fields.TENANT_ID)); } } repositoryService.deleteDeployment(deploymentId, true); // Cleanup for (EventLogEntry eventLogEntry : managementService.getEventLogEntries(null, null)) { managementService.deleteEventLogEntry(eventLogEntry.getLogNumber()); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparatorSource; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; /** * Thread-safe utility class that allows to get per-segment values via the * {@link #load(LeafReaderContext)} method. */ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexComponent { public static class CommonSettings { public static final String SETTING_MEMORY_STORAGE_HINT = "memory_storage_hint"; public enum MemoryStorageFormat { ORDINALS, PACKED, PAGED; public static MemoryStorageFormat fromString(String string) { for (MemoryStorageFormat e : MemoryStorageFormat.values()) { if (e.name().equalsIgnoreCase(string)) { return e; } } return null; } } /** * Gets a memory storage hint that should be honored if possible but is not mandatory */ public static MemoryStorageFormat getMemoryStorageHint(FieldDataType fieldDataType) { // backwards compatibility String s = fieldDataType.getSettings().get("ordinals"); if (s != null) { return "always".equals(s) ? MemoryStorageFormat.ORDINALS : null; } return MemoryStorageFormat.fromString(fieldDataType.getSettings().get(SETTING_MEMORY_STORAGE_HINT)); } } /** * The field name. */ String getFieldName(); /** * The field data type. */ FieldDataType getFieldDataType(); /** * Loads the atomic field data for the reader, possibly cached. */ FD load(LeafReaderContext context); /** * Loads directly the atomic field data for the reader, ignoring any caching involved. */ FD loadDirect(LeafReaderContext context) throws Exception; /** * Comparator used for sorting. */ XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested); /** * Clears any resources associated with this field data. */ void clear(); // we need this extended source we we have custom comparators to reuse our field data // in this case, we need to reduce type that will be used when search results are reduced // on another node (we don't have the custom source them...) abstract class XFieldComparatorSource extends FieldComparatorSource { /** * Simple wrapper class around a filter that matches parent documents * and a filter that matches child documents. For every root document R, * R will be in the parent filter and its children documents will be the * documents that are contained in the inner set between the previous * parent + 1, or 0 if there is no previous parent, and R (excluded). */ public static class Nested { private final BitSetProducer rootFilter; private final Weight innerFilter; public Nested(BitSetProducer rootFilter, Weight innerFilter) { this.rootFilter = rootFilter; this.innerFilter = innerFilter; } /** * Get a {@link BitDocIdSet} that matches the root documents. */ public BitSet rootDocs(LeafReaderContext ctx) throws IOException { return rootFilter.getBitSet(ctx); } /** * Get a {@link DocIdSet} that matches the inner documents. */ public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException { Scorer s = innerFilter.scorer(ctx); return s == null ? null : s.iterator(); } } /** Whether missing values should be sorted first. */ protected final boolean sortMissingFirst(Object missingValue) { return "_first".equals(missingValue); } /** Whether missing values should be sorted last, this is the default. */ protected final boolean sortMissingLast(Object missingValue) { return missingValue == null || "_last".equals(missingValue); } /** Return the missing object value according to the reduced type of the comparator. */ protected final Object missingObject(Object missingValue, boolean reversed) { if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { final boolean min = sortMissingFirst(missingValue) ^ reversed; switch (reducedType()) { case INT: return min ? Integer.MIN_VALUE : Integer.MAX_VALUE; case LONG: return min ? Long.MIN_VALUE : Long.MAX_VALUE; case FLOAT: return min ? Float.NEGATIVE_INFINITY : Float.POSITIVE_INFINITY; case DOUBLE: return min ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; case STRING: case STRING_VAL: return null; default: throw new UnsupportedOperationException("Unsupported reduced type: " + reducedType()); } } else { switch (reducedType()) { case INT: if (missingValue instanceof Number) { return ((Number) missingValue).intValue(); } else { return Integer.parseInt(missingValue.toString()); } case LONG: if (missingValue instanceof Number) { return ((Number) missingValue).longValue(); } else { return Long.parseLong(missingValue.toString()); } case FLOAT: if (missingValue instanceof Number) { return ((Number) missingValue).floatValue(); } else { return Float.parseFloat(missingValue.toString()); } case DOUBLE: if (missingValue instanceof Number) { return ((Number) missingValue).doubleValue(); } else { return Double.parseDouble(missingValue.toString()); } case STRING: case STRING_VAL: if (missingValue instanceof BytesRef) { return (BytesRef) missingValue; } else if (missingValue instanceof byte[]) { return new BytesRef((byte[]) missingValue); } else { return new BytesRef(missingValue.toString()); } default: throw new UnsupportedOperationException("Unsupported reduced type: " + reducedType()); } } } public abstract SortField.Type reducedType(); /** * Return a missing value that is understandable by {@link SortField#setMissingValue(Object)}. * Most implementations return null because they already replace the value at the fielddata level. * However this can't work in case of strings since there is no such thing as a string which * compares greater than any other string, so in that case we need to return * {@link SortField#STRING_FIRST} or {@link SortField#STRING_LAST} so that the coordinating node * knows how to deal with null values. */ public Object missingValue(boolean reversed) { return null; } } interface Builder { IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService); } interface Global<FD extends AtomicFieldData> extends IndexFieldData<FD> { IndexFieldData<FD> loadGlobal(DirectoryReader indexReader); IndexFieldData<FD> localGlobalDirect(DirectoryReader indexReader) throws Exception; } }
package org.knowm.xchange.dto.trade; import java.math.BigDecimal; import java.util.Date; import java.util.Set; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; /** * DTO representing a stop order * * <p>A stop order lets you set a minimum or maximum price before your trade will be treated by the * exchange as a {@link MarketOrder} unless a limit price is also set. There is no guarantee that * your conditions will be met on the exchange, so your order may not be executed. */ public class StopOrder extends Order implements Comparable<StopOrder> { /** The stop price */ protected final BigDecimal stopPrice; /** * The limit price this should be null if the stop order should be treated as a market order once * the stop price is hit */ protected BigDecimal limitPrice = null; /** * @param type Either BID (buying) or ASK (selling) * @param originalAmount The amount to trade * @param currencyPair The identifier (e.g. BTC/USD) * @param id An id (usually provided by the exchange) * @param timestamp a Date object representing the order's timestamp according to the exchange's * server, null if not provided * @param stopPrice In a BID this is the highest acceptable price, in an ASK this is the lowest * acceptable price */ public StopOrder( OrderType type, BigDecimal originalAmount, CurrencyPair currencyPair, String id, Date timestamp, BigDecimal stopPrice) { super(type, originalAmount, currencyPair, id, timestamp); this.stopPrice = stopPrice; } /** * @param type Either BID (buying) or ASK (selling) * @param originalAmount The amount to trade * @param cumulativeAmount The cumulative amount * @param currencyPair The identifier (e.g. BTC/USD) * @param id An id (usually provided by the exchange) * @param timestamp a Date object representing the order's timestamp according to the exchange's * server, null if not provided * @param stopPrice In a BID this is the highest acceptable price, in an ASK this is the lowest * acceptable price */ public StopOrder( OrderType type, BigDecimal originalAmount, BigDecimal cumulativeAmount, CurrencyPair currencyPair, String id, Date timestamp, BigDecimal stopPrice) { super( type, originalAmount, currencyPair, id, timestamp, BigDecimal.ZERO, cumulativeAmount, BigDecimal.ZERO, OrderStatus.PENDING_NEW); this.stopPrice = stopPrice; } /** * @param type Either BID (buying) or ASK (selling) * @param originalAmount The amount to trade * @param currencyPair The identifier (e.g. BTC/USD) * @param id An id (usually provided by the exchange) * @param timestamp a Date object representing the order's timestamp according to the exchange's * server, null if not provided * @param stopPrice In a BID this is the highest acceptable price, in an ASK this is the lowest * acceptable price * @param averagePrice the weighted average price of any fills belonging to the order * @param cumulativeAmount the amount that has been filled * @param status the status of the order at the exchange or broker */ public StopOrder( OrderType type, BigDecimal originalAmount, CurrencyPair currencyPair, String id, Date timestamp, BigDecimal stopPrice, BigDecimal averagePrice, BigDecimal cumulativeAmount, OrderStatus status) { super( type, originalAmount, currencyPair, id, timestamp, averagePrice, cumulativeAmount, BigDecimal.ZERO, status); this.stopPrice = stopPrice; } /** * @param type Either BID (buying) or ASK (selling) * @param originalAmount The amount to trade * @param currencyPair The identifier (e.g. BTC/USD) * @param id An id (usually provided by the exchange) * @param timestamp a Date object representing the order's timestamp according to the exchange's * server, null if not provided * @param stopPrice In a BID this is the highest acceptable price, in an ASK this is the lowest * acceptable price * @param limitPrice The limit price the order should be placed at once the stopPrice has been hit * null for market * @param averagePrice the weighted average price of any fills belonging to the order * @param cumulativeAmount the amount that has been filled * @param status the status of the order at the exchange or broker */ public StopOrder( OrderType type, BigDecimal originalAmount, CurrencyPair currencyPair, String id, Date timestamp, BigDecimal stopPrice, BigDecimal limitPrice, BigDecimal averagePrice, BigDecimal cumulativeAmount, OrderStatus status) { super( type, originalAmount, currencyPair, id, timestamp, averagePrice, cumulativeAmount, BigDecimal.ZERO, status); this.stopPrice = stopPrice; this.limitPrice = limitPrice; } /** @return The stop price */ public BigDecimal getStopPrice() { return stopPrice; } /** @return The limit price */ public BigDecimal getLimitPrice() { return limitPrice; } @Override public String toString() { return "StopOrder [stopPrice=" + stopPrice + ", " + super.toString() + "]"; } @Override public int compareTo(StopOrder stopOrder) { final int ret; if (this.getType() == stopOrder.getType()) { // Same side ret = this.getStopPrice().compareTo(stopOrder.getStopPrice()) * (getType() == OrderType.BID ? -1 : 1); } else { // Keep bid side be less than ask side ret = this.getType() == OrderType.BID ? -1 : 1; } return ret; } @Override public int hashCode() { int hash = super.hashCode(); hash = 59 * hash + (this.stopPrice != null ? this.stopPrice.hashCode() : 0); return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final StopOrder other = (StopOrder) obj; if (this.stopPrice == null ? (other.stopPrice != null) : this.stopPrice.compareTo(other.stopPrice) != 0) { return false; } return super.equals(obj); } public static class Builder extends Order.Builder { protected BigDecimal stopPrice; protected BigDecimal limitPrice; public Builder(OrderType orderType, CurrencyPair currencyPair) { super(orderType, currencyPair); } public static Builder from(Order order) { Builder builder = (Builder) new Builder(order.getType(), order.getCurrencyPair()) .originalAmount(order.getOriginalAmount()) .timestamp(order.getTimestamp()) .id(order.getId()) .flags(order.getOrderFlags()) .orderStatus(order.getStatus()) .averagePrice(order.getAveragePrice()); if (order instanceof StopOrder) { StopOrder stopOrder = (StopOrder) order; builder.stopPrice(stopOrder.getStopPrice()); builder.limitPrice(stopOrder.getLimitPrice()); } return builder; } @Override public Builder orderType(OrderType orderType) { return (Builder) super.orderType(orderType); } @Override public Builder originalAmount(BigDecimal originalAmount) { return (Builder) super.originalAmount(originalAmount); } @Override public Builder cumulativeAmount(BigDecimal originalAmount) { return (Builder) super.cumulativeAmount(originalAmount); } public Builder remainingAmount(BigDecimal remainingAmount) { return (Builder) super.remainingAmount(remainingAmount); } @Override public Builder currencyPair(CurrencyPair currencyPair) { return (Builder) super.currencyPair(currencyPair); } @Override public Builder id(String id) { return (Builder) super.id(id); } @Override public Builder timestamp(Date timestamp) { return (Builder) super.timestamp(timestamp); } @Override public Builder orderStatus(OrderStatus status) { return (Builder) super.orderStatus(status); } @Override public Builder averagePrice(BigDecimal averagePrice) { return (Builder) super.averagePrice(averagePrice); } @Override public Builder flag(IOrderFlags flag) { return (Builder) super.flag(flag); } @Override public Builder flags(Set<IOrderFlags> flags) { return (Builder) super.flags(flags); } public Builder stopPrice(BigDecimal stopPrice) { this.stopPrice = stopPrice; return this; } public Builder limitPrice(BigDecimal limitPrice) { this.limitPrice = limitPrice; return this; } public StopOrder build() { StopOrder order; if (remainingAmount != null) { order = new StopOrder( orderType, originalAmount, currencyPair, id, timestamp, stopPrice, limitPrice, averagePrice, originalAmount.subtract(remainingAmount), status); } else { order = new StopOrder( orderType, originalAmount, currencyPair, id, timestamp, stopPrice, limitPrice, averagePrice, cumulativeAmount, status); } order.setOrderFlags(flags); return order; } } }
package com.di.nomothesia.controller; import com.di.nomothesia.model.EndpointResultSet; import com.di.nomothesia.model.Fragment; import com.di.nomothesia.model.LegalDocument; import com.di.nomothesia.model.Modification; import com.di.nomothesia.service.LegislationServiceImpl; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Locale; import java.util.Map; import javax.servlet.http.HttpServletResponse; import javax.xml.bind.JAXBException; import javax.xml.transform.TransformerException; //import org.slf4j.Logger; //import org.slf4j.LoggerFactory; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.servlet.ModelAndView; /** * Handles requests for the application home page. */ @Controller public class LegislationController { //private static final Logger logger = LoggerFactory.getLogger(LegislationController.class); @RequestMapping (value = "/gazette/a/{year:\\d+}/{id:\\d+}", method = RequestMethod.GET) public void presentGovernmentGazettePDF(@PathVariable String year, @PathVariable String id, Model model, Locale locale, HttpServletResponse response) throws IOException { InputStream fis = null; fis = getClass().getResourceAsStream("file:/storage/nomothesia/resources/pdf/" + year + "/GG" + year + "_" + id + ".pdf"); org.apache.commons.io.IOUtils.copy(fis, response.getOutputStream()); response.setContentType("application/pdf"); response.flushBuffer(); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/enacted", method = RequestMethod.GET) public String presentOriginalLegalDocument(@PathVariable String type, @PathVariable String year, @PathVariable String id, Model model, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 1); model.addAttribute("legaldoc", legaldoc); List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 1, null); model.addAttribute("legalmods", legalmods); model.addAttribute("id", "custom-bootstrap-menu"); model.addAttribute("locale", locale); if (legaldoc.getPublicationDate() == null) { return "error"; } if (!legaldoc.getParts().isEmpty()) { return "basiclegislation3"; } else if (legaldoc.getChapters().isEmpty()) { return "basiclegislation"; } else { return "basiclegislation2"; } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id}", method = RequestMethod.GET) public String presentUpdatedLegalDocument(@PathVariable String type, @PathVariable String year, @PathVariable String id, Model model, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 1); List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 1, null); List<Fragment> frags = lds.getUpdatedById(legaldoc, legalmods); model.addAttribute("legalmods", legalmods); model.addAttribute("fragschanced", frags); model.addAttribute("legaldoc", legaldoc); model.addAttribute("id", "custom-bootstrap-menu"); model.addAttribute("locale", locale); if (legaldoc.getPublicationDate() == null) { return "error"; } if (!legaldoc.getParts().isEmpty()) { return "basiclegislation3"; } else if (legaldoc.getChapters().isEmpty()) { return "basiclegislation"; } else { return "basiclegislation2"; } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{type1}/{id1}/{type2}/{id2}", method = RequestMethod.GET) public String presentLegalFragment(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String type1, @PathVariable String id1, @PathVariable String type2, @PathVariable String id2, Model model, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 1); List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 1, null); List<Fragment> frags = lds.getUpdatedById(legaldoc, legalmods); model.addAttribute("legalmods", legalmods); model.addAttribute("fragschanced", frags); model.addAttribute("legaldoc", legaldoc); model.addAttribute("id", type1 + "-" + id1 + "-" + type2 + "-" + id2); model.addAttribute("locale", locale); if (legaldoc.getPublicationDate() == null) { return "error"; } if (!legaldoc.getParts().isEmpty()) { return "basiclegislation3"; } else if (legaldoc.getChapters().isEmpty()) { return "basiclegislation"; } else { return "basiclegislation2"; } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{type1}/{id1}", method = RequestMethod.GET) public String presentLegalFragmentless(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String type1, @PathVariable String id1, Model model, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 1); List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 1, null); List<Fragment> frags = lds.getUpdatedById(legaldoc, legalmods); model.addAttribute("legalmods", legalmods); model.addAttribute("fragschanced", frags); model.addAttribute("legaldoc", legaldoc); model.addAttribute("id", type1 + "-" + id1); model.addAttribute("locale", locale); if (legaldoc.getPublicationDate() == null) { return "error"; } if (!legaldoc.getParts().isEmpty()) { return "basiclegislation3"; } else if (legaldoc.getChapters().isEmpty()) { return "basiclegislation"; } else { return "basiclegislation2"; } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{yyyy:\\d+}-{mm:\\d+}-{dd:\\d+}", method = RequestMethod.GET) public String presentModificationByDate(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String yyyy, @PathVariable String mm, @PathVariable String dd, Model model, Locale locale) { String date = ""; date += yyyy + "-" + mm + "-" + dd; LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 1); if (legaldoc.getPublicationDate().compareTo(date) > 0) { legaldoc = null; } List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 1, date); List<Fragment> frags = lds.getUpdatedById(legaldoc, legalmods); model.addAttribute("legalmods", legalmods); model.addAttribute("fragschanced", frags); model.addAttribute("legaldoc", legaldoc); model.addAttribute("id", "custom-bootstrap-menu"); model.addAttribute("locale", locale); if (legaldoc.getPublicationDate() == null) { return "error"; } if (!legaldoc.getParts().isEmpty()) { return "basiclegislation3"; } else if (legaldoc.getChapters().isEmpty()) { return "basiclegislation"; } else { return "basiclegislation2"; } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{yyyy:\\d+}-{mm:\\d+}-{dd:\\d+}/data.xml", method = RequestMethod.GET, produces = {"application/xml"}) public ResponseEntity<String> exportDateToXML(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String yyyy, @PathVariable String mm, @PathVariable String dd, Locale locale) throws TransformerException { LegislationServiceImpl lds = new LegislationServiceImpl(); String date = ""; date += yyyy + "-" + mm + "-" + dd; String xml = lds.getUpdatedXMLByIdDate(type, year, id, 2, date); return new ResponseEntity<>(xml, new HttpHeaders(), HttpStatus.CREATED); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{yyyy:\\d+}-{mm:\\d+}-{dd:\\d+}/data.rdf", method = RequestMethod.GET, produces = {"application/xml"}) public ResponseEntity<String> exportDateToRDF(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String yyyy, @PathVariable String mm, @PathVariable String dd, Locale locale) throws JAXBException { LegislationServiceImpl lds = new LegislationServiceImpl(); String rdfResult = lds.getRDFById(type, year, id); return new ResponseEntity<>(rdfResult, new HttpHeaders(), HttpStatus.CREATED); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{yyyy:\\d+}-{mm:\\d+}-{dd:\\d+}/data.json", method = RequestMethod.GET) public @ResponseBody LegalDocument exportDateToJSON(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String yyyy, @PathVariable String mm, @PathVariable String dd, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); String date = ""; date += yyyy + "-" + mm + "-" + dd; LegalDocument legaldoc = lds.getById(type, year, id, 2); if (legaldoc.getPublicationDate().compareTo(date) > 0) { legaldoc = null; } List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 2, date); lds.getUpdatedById(legaldoc, legalmods); legaldoc.setPlace(null); return legaldoc; } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/{yyyy:\\d+}-{mm:\\d+}-{dd:\\d+}/data.pdf", method = RequestMethod.GET, produces = {"application/xml"}) public ModelAndView exportDateToPDF(@PathVariable String type, @PathVariable String year, @PathVariable String id, @PathVariable String yyyy, @PathVariable String mm, @PathVariable String dd, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); String date = ""; date += yyyy + "-" + mm + "-" + dd; LegalDocument legaldoc = lds.getById(type, year, id, 2); if (legaldoc.getPublicationDate().compareTo(date) > 0) { legaldoc = null; } List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 2, date); lds.getUpdatedById(legaldoc, legalmods); if (!legaldoc.getParts().isEmpty()) { return new ModelAndView("pdfView3", "legaldocument", legaldoc); } if (!legaldoc.getChapters().isEmpty()) { return new ModelAndView("pdfView2", "legaldocument", legaldoc); } else { return new ModelAndView("pdfView", "legaldocument", legaldoc); } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/enacted/data.xml", method = RequestMethod.GET, produces = {"application/xml"}) public ResponseEntity<String> exportToXML(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) throws TransformerException { LegislationServiceImpl lds = new LegislationServiceImpl(); String xml = lds.getXMLById(type, year, id, 2); return new ResponseEntity<>(xml, new HttpHeaders(), HttpStatus.CREATED); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/enacted/data.rdf", method = RequestMethod.GET, produces = {"application/xml"}) public ResponseEntity<String> exportToRDF(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) throws JAXBException { LegislationServiceImpl lds = new LegislationServiceImpl(); String rdfResult = lds.getRDFById(type, year, id); return new ResponseEntity<>(rdfResult, new HttpHeaders(), HttpStatus.CREATED); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/enacted/data.json", method = RequestMethod.GET) public @ResponseBody LegalDocument exportToJSON(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legal = lds.getById(type, year, id, 2); legal.setPlace(null); return legal; } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/enacted/data.pdf", method = RequestMethod.GET) public ModelAndView exportToPDF(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 2); if (!legaldoc.getParts().isEmpty()) { return new ModelAndView("pdfView3", "legaldocument", legaldoc); } if (!legaldoc.getChapters().isEmpty()) { return new ModelAndView("pdfView2", "legaldocument", legaldoc); } else { return new ModelAndView("pdfView", "legaldocument", legaldoc); } } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/data.xml", method = RequestMethod.GET, produces = {"application/xml"}) public ResponseEntity<String> exportUpdatedToXML(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) throws TransformerException { LegislationServiceImpl lds = new LegislationServiceImpl(); String xml = lds.getUpdatedXMLById(type, year, id, 2); return new ResponseEntity<>(xml, new HttpHeaders(), HttpStatus.CREATED); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/data.rdf", method = RequestMethod.GET, produces = {"application/xml"}) public ResponseEntity<String> exportUpdatedToRDF(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) throws JAXBException { LegislationServiceImpl lds = new LegislationServiceImpl(); String rdfResult = lds.getRDFById(type, year, id); return new ResponseEntity<>(rdfResult, new HttpHeaders(), HttpStatus.CREATED); } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/data.json", method = RequestMethod.GET) public @ResponseBody LegalDocument exportUpdatedToJSON(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 2); List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 2, null); lds.getUpdatedById(legaldoc, legalmods); legaldoc.setPlace(null); return legaldoc; } @RequestMapping (value = "/eli/{type}/{year:\\d+}/{id:\\d+}/data.pdf", method = RequestMethod.GET) public ModelAndView exportUpdatedToPDF(@PathVariable String type, @PathVariable String year, @PathVariable String id, Locale locale) { LegislationServiceImpl lds = new LegislationServiceImpl(); LegalDocument legaldoc = lds.getById(type, year, id, 2); List<Modification> legalmods = lds.getAllModificationsById(type, year, id, 2, null); lds.getUpdatedById(legaldoc, legalmods); if (!legaldoc.getParts().isEmpty()) { return new ModelAndView("pdfView3", "legaldocument", legaldoc); } if (!legaldoc.getChapters().isEmpty()) { return new ModelAndView("pdfView2", "legaldocument", legaldoc); } else { return new ModelAndView("pdfView", "legaldocument", legaldoc); } } @RequestMapping (value = "/search", method = RequestMethod.GET) public String search(@RequestParam Map<String, String> params, Model model, Locale locale) { if (params != null) { LegislationServiceImpl lds = new LegislationServiceImpl(); List<LegalDocument> LDs = lds.searchLegislation(params); List<String> tags = lds.getTags(); model.addAttribute("legalDocuments", LDs); model.addAttribute("tags", tags); model.addAttribute("locale", locale); if ((params.get("keywords") != null) && !params.get("keywords").equals("")) { model.addAttribute("keywords", params.get("keywords")); } if ((params.get("date") != null) && !params.get("date").equals("")) { model.addAttribute("date", params.get("date")); } if ((params.get("datefrom") != null) && !params.get("datefrom").equals("")) { model.addAttribute("datefrom", params.get("datefrom")); } if ((params.get("dateto") != null) && !params.get("dateto").equals("")) { model.addAttribute("dateto", params.get("dateto")); } if ((params.get("year") != null) && !params.get("year").equals("")) { model.addAttribute("year", params.get("year")); } if ((params.get("id") != null) && !params.get("id").equals("")) { model.addAttribute("id", params.get("id")); } if ((params.get("fek_year") != null) && !params.get("fek_year").equals("")) { model.addAttribute("fek_year", params.get("fek_year")); } if ((params.get("fek_id") != null) && !params.get("fek_id").equals("")) { model.addAttribute("fek_id", params.get("fek_id")); } if ((params.get("type") != null) && !params.get("type").equals("")) { model.addAttribute("type", params.get("type")); } } return "search"; } @RequestMapping (value = "/endpoint", method = RequestMethod.GET) public String endpoint(@RequestParam Map<String, String> params, Model model, Locale locale) { if (params.get("query") != null) { LegislationServiceImpl lds = new LegislationServiceImpl(); EndpointResultSet eprs = lds.sparqlQuery(params.get("query"), params.get("format")); model.addAttribute("endpointResults", eprs); model.addAttribute("format", params.get("format")); } model.addAttribute("locale", locale); return "endpoint"; } @RequestMapping (value = "/endpoint/query/{id}", method = RequestMethod.GET) public String endpoint(@PathVariable String id, Model model, Locale locale) { if (id != null) { LegislationServiceImpl lds = new LegislationServiceImpl(); EndpointResultSet eprs = lds.sparqlQuery(id, "HTML"); model.addAttribute("endpointResults", eprs); model.addAttribute("locale", locale); //model.addAttribute("format", params.get("format")); } return "endpoint"; } @ExceptionHandler (Exception.class) public String handleAllException(Exception ex) { //ModelAndView model = new ModelAndView("error/exception_error"); //model.addAttribute("locale",locale); return "error"; } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.ZoneOperationClient.ListZoneOperationsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.DeleteZoneOperationHttpRequest; import com.google.cloud.compute.v1.GetZoneOperationHttpRequest; import com.google.cloud.compute.v1.ListZoneOperationsHttpRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.OperationList; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link ZoneOperationStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (https://www.googleapis.com/compute/v1/projects/) and default * port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. For * example, to set the total timeout of deleteZoneOperation to 30 seconds: * * <pre> * <code> * ZoneOperationStubSettings.Builder zoneOperationSettingsBuilder = * ZoneOperationStubSettings.newBuilder(); * zoneOperationSettingsBuilder.deleteZoneOperationSettings().getRetrySettings().toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)); * ZoneOperationStubSettings zoneOperationSettings = zoneOperationSettingsBuilder.build(); * </code> * </pre> */ @Generated("by gapic-generator") @BetaApi public class ZoneOperationStubSettings extends StubSettings<ZoneOperationStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/compute") .add("https://www.googleapis.com/auth/compute.readonly") .add("https://www.googleapis.com/auth/devstorage.full_control") .add("https://www.googleapis.com/auth/devstorage.read_only") .add("https://www.googleapis.com/auth/devstorage.read_write") .build(); private final UnaryCallSettings<DeleteZoneOperationHttpRequest, Void> deleteZoneOperationSettings; private final UnaryCallSettings<GetZoneOperationHttpRequest, Operation> getZoneOperationSettings; private final PagedCallSettings< ListZoneOperationsHttpRequest, OperationList, ListZoneOperationsPagedResponse> listZoneOperationsSettings; /** Returns the object with the settings used for calls to deleteZoneOperation. */ public UnaryCallSettings<DeleteZoneOperationHttpRequest, Void> deleteZoneOperationSettings() { return deleteZoneOperationSettings; } /** Returns the object with the settings used for calls to getZoneOperation. */ public UnaryCallSettings<GetZoneOperationHttpRequest, Operation> getZoneOperationSettings() { return getZoneOperationSettings; } /** Returns the object with the settings used for calls to listZoneOperations. */ public PagedCallSettings< ListZoneOperationsHttpRequest, OperationList, ListZoneOperationsPagedResponse> listZoneOperationsSettings() { return listZoneOperationsSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public ZoneOperationStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonZoneOperationStub.create(this); } else { throw new UnsupportedOperationException( "Transport not supported: " + getTransportChannelProvider().getTransportName()); } } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "https://www.googleapis.com/compute/v1/projects/"; } /** Returns the default service port. */ public static int getDefaultServicePort() { return 443; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultHttpJsonTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(ZoneOperationStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected ZoneOperationStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); deleteZoneOperationSettings = settingsBuilder.deleteZoneOperationSettings().build(); getZoneOperationSettings = settingsBuilder.getZoneOperationSettings().build(); listZoneOperationsSettings = settingsBuilder.listZoneOperationsSettings().build(); } private static final PagedListDescriptor<ListZoneOperationsHttpRequest, OperationList, Operation> LIST_ZONE_OPERATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListZoneOperationsHttpRequest, OperationList, Operation>() { @Override public String emptyToken() { return ""; } @Override public ListZoneOperationsHttpRequest injectToken( ListZoneOperationsHttpRequest payload, String token) { return ListZoneOperationsHttpRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListZoneOperationsHttpRequest injectPageSize( ListZoneOperationsHttpRequest payload, int pageSize) { return ListZoneOperationsHttpRequest.newBuilder(payload) .setMaxResults(pageSize) .build(); } @Override public Integer extractPageSize(ListZoneOperationsHttpRequest payload) { return payload.getMaxResults(); } @Override public String extractNextToken(OperationList payload) { return payload.getNextPageToken(); } @Override public Iterable<Operation> extractResources(OperationList payload) { return payload.getItemsList() != null ? payload.getItemsList() : ImmutableList.<Operation>of(); } }; private static final PagedListResponseFactory< ListZoneOperationsHttpRequest, OperationList, ListZoneOperationsPagedResponse> LIST_ZONE_OPERATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListZoneOperationsHttpRequest, OperationList, ListZoneOperationsPagedResponse>() { @Override public ApiFuture<ListZoneOperationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListZoneOperationsHttpRequest, OperationList> callable, ListZoneOperationsHttpRequest request, ApiCallContext context, ApiFuture<OperationList> futureResponse) { PageContext<ListZoneOperationsHttpRequest, OperationList, Operation> pageContext = PageContext.create( callable, LIST_ZONE_OPERATIONS_PAGE_STR_DESC, request, context); return ListZoneOperationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Builder for ZoneOperationStubSettings. */ public static class Builder extends StubSettings.Builder<ZoneOperationStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<DeleteZoneOperationHttpRequest, Void> deleteZoneOperationSettings; private final UnaryCallSettings.Builder<GetZoneOperationHttpRequest, Operation> getZoneOperationSettings; private final PagedCallSettings.Builder< ListZoneOperationsHttpRequest, OperationList, ListZoneOperationsPagedResponse> listZoneOperationsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "idempotent", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(20000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(20000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("default", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(clientContext); deleteZoneOperationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getZoneOperationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listZoneOperationsSettings = PagedCallSettings.newBuilder(LIST_ZONE_OPERATIONS_PAGE_STR_FACT); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( deleteZoneOperationSettings, getZoneOperationSettings, listZoneOperationsSettings); initDefaults(this); } private static Builder createDefault() { Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .deleteZoneOperationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder .getZoneOperationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder .listZoneOperationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); return builder; } protected Builder(ZoneOperationStubSettings settings) { super(settings); deleteZoneOperationSettings = settings.deleteZoneOperationSettings.toBuilder(); getZoneOperationSettings = settings.getZoneOperationSettings.toBuilder(); listZoneOperationsSettings = settings.listZoneOperationsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( deleteZoneOperationSettings, getZoneOperationSettings, listZoneOperationsSettings); } // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to deleteZoneOperation. */ public UnaryCallSettings.Builder<DeleteZoneOperationHttpRequest, Void> deleteZoneOperationSettings() { return deleteZoneOperationSettings; } /** Returns the builder for the settings used for calls to getZoneOperation. */ public UnaryCallSettings.Builder<GetZoneOperationHttpRequest, Operation> getZoneOperationSettings() { return getZoneOperationSettings; } /** Returns the builder for the settings used for calls to listZoneOperations. */ public PagedCallSettings.Builder< ListZoneOperationsHttpRequest, OperationList, ListZoneOperationsPagedResponse> listZoneOperationsSettings() { return listZoneOperationsSettings; } @Override public ZoneOperationStubSettings build() throws IOException { return new ZoneOperationStubSettings(this); } } }
/* * Copyright (c) 2013 David Boissier * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codinjutsu.tools.mongo.view; import com.intellij.ui.TreeTableSpeedSearch; import com.intellij.ui.treeStructure.treetable.ListTreeTableModelOnColumns; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.containers.Convertor; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import com.mongodb.DBObject; import org.bson.types.ObjectId; import org.codinjutsu.tools.mongo.view.model.JsonTreeNode; import org.codinjutsu.tools.mongo.view.nodedescriptor.MongoNodeDescriptor; import org.codinjutsu.tools.mongo.view.renderer.MongoKeyCellRenderer; import org.codinjutsu.tools.mongo.view.renderer.MongoValueCellRenderer; import org.codinjutsu.tools.mongo.view.table.MongoDatePickerCellEditor; import org.codinjutsu.tools.mongo.view.table.MongoValueCellEditor; import org.jetbrains.annotations.Nullable; import javax.swing.table.TableCellEditor; import javax.swing.table.TableCellRenderer; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Date; public class JsonTreeTableView extends TreeTable { private static final ColumnInfo KEY = new ColumnInfo("Key") { public Object valueOf(Object obj) { JsonTreeNode node = (JsonTreeNode) obj; return node.getDescriptor(); } @Override public Class getColumnClass() { return TreeTableModel.class; } @Override public boolean isCellEditable(Object o) { return false; } }; private static final ColumnInfo READONLY_VALUE = new ReadOnlyValueColumnInfo(); private static final ColumnInfo WRITABLE_VALUE = new WritableColumnInfo(); public static final ColumnInfo[] COLUMNS_FOR_READING = new ColumnInfo[]{KEY, READONLY_VALUE}; public static final ColumnInfo[] COLUMNS_FOR_WRITING = new ColumnInfo[]{KEY, WRITABLE_VALUE}; private final ColumnInfo[] columns; public JsonTreeTableView(TreeNode rootNode, ColumnInfo[] columnInfos) { super(new ListTreeTableModelOnColumns(rootNode, columnInfos)); this.columns = columnInfos; final TreeTableTree tree = getTree(); tree.setShowsRootHandles(true); tree.setRootVisible(false); UIUtil.setLineStyleAngled(tree); setTreeCellRenderer(new MongoKeyCellRenderer()); TreeUtil.expand(tree, 2); new TreeTableSpeedSearch(this, new Convertor<TreePath, String>() { @Override public String convert(final TreePath path) { final JsonTreeNode node = (JsonTreeNode) path.getLastPathComponent(); MongoNodeDescriptor descriptor = node.getDescriptor(); return descriptor.getFormattedKey(); } }); } @Override public TableCellRenderer getCellRenderer(int row, int column) { TreePath treePath = getTree().getPathForRow(row); if (treePath == null) return super.getCellRenderer(row, column); JsonTreeNode node = (JsonTreeNode) treePath.getLastPathComponent(); TableCellRenderer renderer = this.columns[column].getRenderer(node); return renderer == null ? super.getCellRenderer(row, column) : renderer; } @Override public TableCellEditor getCellEditor(int row, int column) { TreePath treePath = getTree().getPathForRow(row); if (treePath == null) return super.getCellEditor(row, column); JsonTreeNode node = (JsonTreeNode) treePath.getLastPathComponent(); TableCellEditor editor = columns[column].getEditor(node); return editor == null ? super.getCellEditor(row, column) : editor; } private static class ReadOnlyValueColumnInfo extends ColumnInfo<JsonTreeNode, MongoNodeDescriptor> { private final TableCellRenderer myRenderer = new MongoValueCellRenderer(); public ReadOnlyValueColumnInfo() { super("Value"); } public MongoNodeDescriptor valueOf(JsonTreeNode treeNode) { return treeNode.getDescriptor(); } @Override public TableCellRenderer getRenderer(JsonTreeNode o) { return myRenderer; } @Override public boolean isCellEditable(JsonTreeNode o) { return false; } } private static class WritableColumnInfo extends ColumnInfo<JsonTreeNode, Object> { private final TableCellRenderer myRenderer = new MongoValueCellRenderer(); private final TableCellEditor defaultEditor = new MongoValueCellEditor(); public WritableColumnInfo() { super("Value"); } @Override public TableCellRenderer getRenderer(JsonTreeNode o) { return myRenderer; } @Override public boolean isCellEditable(JsonTreeNode treeNode) { Object value = treeNode.getDescriptor().getValue(); if (value instanceof DBObject) { return false; } if (value instanceof ObjectId) { return false; } return true; } @Nullable @Override public TableCellEditor getEditor(final JsonTreeNode treeNode) { Object value = treeNode.getDescriptor().getValue(); if (value instanceof Date) { return buildDateCellEditor(treeNode); } return defaultEditor; } private static MongoDatePickerCellEditor buildDateCellEditor(final JsonTreeNode treeNode) { final MongoDatePickerCellEditor dateEditor = new MongoDatePickerCellEditor(); // Note from dev: Quite ugly because when clicking on the button to open popup calendar, stopCellEdition is invoked. // From that point, impossible to set the selected data in the node description dateEditor.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent actionEvent) { treeNode.getDescriptor().setValue(dateEditor.getCellEditorValue()); } }); return dateEditor; } public Object valueOf(JsonTreeNode treeNode) { return treeNode.getDescriptor().getValue(); } @Override public void setValue(JsonTreeNode treeNode, Object value) { treeNode.getDescriptor().setValue(value); } } }
package org.apollo.game.model.entity; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apollo.game.model.skill.SkillListener; import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; /** * Represents the set of the player's skills. * * @author Graham */ public final class SkillSet { /** * The maximum allowed experience. */ public static final double MAXIMUM_EXP = 200_000_000; /** * The minimum amounts of experience required for the levels. */ private static final int[] EXPERIENCE_FOR_LEVEL = new int[100]; /** * The number of skills. */ private static final int SKILL_COUNT = 21; static { int points = 0, output = 0; for (int level = 1; level <= 99; level++) { EXPERIENCE_FOR_LEVEL[level] = output; points += Math.floor(level + 300 * Math.pow(2, level / 7.0)); output = (int) Math.floor(points / 4); } } /** * Gets the minimum experience required for the specified level. * * @param level The level. * @return The minimum experience. */ public static int getExperienceForLevel(int level) { Preconditions.checkArgument(level >= 1 && level <= 99, "Level must be between 1 and 99, inclusive."); return EXPERIENCE_FOR_LEVEL[level]; } /** * Gets the minimum level to get the specified experience. * * @param experience The experience. * @return The minimum level. */ public static int getLevelForExperience(double experience) { Preconditions.checkArgument(experience >= 0 && experience <= MAXIMUM_EXP, "Experience must be between 0 and " + MAXIMUM_EXP + ", inclusive."); for (int level = 1; level <= 98; level++) { if (experience < EXPERIENCE_FOR_LEVEL[level + 1]) { return level; } } return 99; } /** * The combat level of this skill set. */ private int combat = 3; /** * Whether or not events are being fired. */ private boolean firingEvents = true; /** * The list of skill listeners. */ private final List<SkillListener> listeners = new ArrayList<>(); /** * The skills. */ private final Skill[] skills = new Skill[SKILL_COUNT]; /** * Creates the skill set. */ public SkillSet() { init(); } /** * Adds experience to the specified skill. * * @param id The skill id. * @param experience The amount of experience. */ public void addExperience(int id, double experience) { checkBounds(id); Skill old = skills[id]; double newExperience = Math.min(old.getExperience() + experience, MAXIMUM_EXP); int current = old.getCurrentLevel(); int maximum = getLevelForExperience(newExperience); int delta = maximum - old.getMaximumLevel(); current += delta; setSkill(id, new Skill(newExperience, current, maximum)); if (delta > 0) { notifyLevelledUp(id); // here so it notifies using the updated skill } } /** * Adds a {@link SkillListener} to this set. * * @param listener The listener. */ public void addListener(SkillListener listener) { listeners.add(listener); } /** * Calculates the combat level for this skill set. */ public void calculateCombatLevel() { int attack = skills[Skill.ATTACK].getMaximumLevel(); int defence = skills[Skill.DEFENCE].getMaximumLevel(); int strength = skills[Skill.STRENGTH].getMaximumLevel(); int hitpoints = skills[Skill.HITPOINTS].getMaximumLevel(); int prayer = skills[Skill.PRAYER].getMaximumLevel(); int ranged = skills[Skill.RANGED].getMaximumLevel(); int magic = skills[Skill.MAGIC].getMaximumLevel(); double base = Ints.max(strength + attack, magic * 2, ranged * 2); double combat = (base * 1.3 + defence + hitpoints + prayer / 2) / 4; this.combat = (int) combat; } /** * Forces this skill set to refresh. */ public void forceRefresh() { notifySkillsUpdated(); } /** * Gets the combat level of this skill set. * * @return The combat level. */ public int getCombatLevel() { return combat; } /** * Gets the current level of the specified skill. * * @param skill The skill. * @return The current level. */ public int getCurrentLevel(int skill) { return getSkill(skill).getCurrentLevel(); } /** * Gets the experience of the specified skill. * * @param skill The skill. * @return The experience. */ public double getExperience(int skill) { return getSkill(skill).getExperience(); } /** * Gets the maximum level of the specified skill. * * @param skill The skill. * @return The maximum level. */ public int getMaximumLevel(int skill) { return getSkill(skill).getMaximumLevel(); } /** * Gets a skill by its id. * * @param id The id. * @return The skill. */ public Skill getSkill(int id) { checkBounds(id); return skills[id]; } /** * Gets the total level for this skill set. * * @return The total level. */ public int getTotalLevel() { return Arrays.stream(skills).mapToInt(Skill::getMaximumLevel).sum(); } /** * Normalizes the skills in this set. */ public void normalize() { for (int id = 0; id < skills.length; id++) { int current = skills[id].getCurrentLevel(), max = skills[id].getMaximumLevel(); if (current == max || id == Skill.PRAYER) { continue; } current += current < max ? 1 : -1; setSkill(id, new Skill(skills[id].getExperience(), current, max)); } } /** * Removes all the {@link SkillListener}s. */ public void removeAllListeners() { listeners.clear(); } /** * Removes a {@link SkillListener}. * * @param listener The listener to remove. */ public void removeListener(SkillListener listener) { listeners.remove(listener); } /** * Sets the current level of the specified skill. * * @param skill The skill. * @param level The level. */ public void setCurrentLevel(int skill, int level) { Skill old = getSkill(skill); setSkill(skill, Skill.updateCurrentLevel(level, old)); } /** * Sets the experience level of the specified skill. * * @param skill The skill. * @param experience The experience. */ public void setExperience(int skill, double experience) { Skill old = getSkill(skill); setSkill(skill, Skill.updateExperience(experience, old)); } /** * Sets the maximum level of the specified skill. * * @param skill The skill. * @param level The level. */ public void setMaximumLevel(int skill, int level) { Skill old = getSkill(skill); setSkill(skill, Skill.updateMaximumLevel(level, old)); } /** * Sets a {@link Skill}. * * @param id The id. * @param skill The skill. */ public void setSkill(int id, Skill skill) { checkBounds(id); skills[id] = skill; notifySkillUpdated(id); } /** * Gets the number of {@link Skill}s in this set. * * @return The number of skills. */ public int size() { return skills.length; } /** * Starts the firing of events. */ public void startFiringEvents() { firingEvents = true; } /** * Stops events from being fired. */ public void stopFiringEvents() { firingEvents = false; } /** * Checks the bounds of the id. * * @param id The id. * @throws IndexOutOfBoundsException If the id is out of bounds. */ private void checkBounds(int id) { Preconditions.checkElementIndex(id, skills.length, "Skill id is out of bounds."); } /** * Initialises the skill set. */ private void init() { Arrays.setAll(skills, id -> id == Skill.HITPOINTS ? new Skill(1154, 10, 10) : new Skill(0, 1, 1)); } /** * Notifies listeners that a skill has been levelled up. * * @param id The skill's id. */ private void notifyLevelledUp(int id) { checkBounds(id); if (firingEvents) { listeners.forEach(listener -> listener.levelledUp(this, id, skills[id])); } } /** * Notifies listeners that the skills in this listener have been updated. */ private void notifySkillsUpdated() { if (firingEvents) { listeners.forEach(listener -> listener.skillsUpdated(this)); } } /** * Notifies listeners that a skill has been updated. * * @param id The skill's id. */ private void notifySkillUpdated(int id) { checkBounds(id); if (firingEvents) { listeners.forEach(listener -> listener.skillUpdated(this, id, skills[id])); } } }
/* * Copyright 2014 Artem Chikin * Copyright 2014 Artem Herasymchuk * Copyright 2014 Tom Krywitsky * Copyright 2014 Henry Pabst * Copyright 2014 Bradley Simons * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ca.ualberta.cmput301w14t08.geochan.activities; import java.util.List; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentManager.OnBackStackChangedListener; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import ca.ualberta.cmput301w14t08.geochan.R; import ca.ualberta.cmput301w14t08.geochan.fragments.CustomLocationFragment; import ca.ualberta.cmput301w14t08.geochan.fragments.EditFragment; import ca.ualberta.cmput301w14t08.geochan.fragments.FavouritesFragment; import ca.ualberta.cmput301w14t08.geochan.fragments.MapViewFragment; import ca.ualberta.cmput301w14t08.geochan.fragments.PostFragment; import ca.ualberta.cmput301w14t08.geochan.fragments.ThreadListFragment; import ca.ualberta.cmput301w14t08.geochan.helpers.ConnectivityHelper; import ca.ualberta.cmput301w14t08.geochan.helpers.Toaster; import ca.ualberta.cmput301w14t08.geochan.managers.CacheManager; import ca.ualberta.cmput301w14t08.geochan.managers.PreferencesManager; import ca.ualberta.cmput301w14t08.geochan.managers.ThreadManager; import ca.ualberta.cmput301w14t08.geochan.models.GeoLocationLog; /** * Inflates the default fragment and handles some of the crucial controller methods. * Initializes most of our singleton classes so attempting to fetch an instance of * one does not return null. * * @author Artem Chikin * @author Henry Pabst * @author Artem Herasymchuk */ public class MainActivity extends FragmentActivity implements OnBackStackChangedListener { /** * Sets up the initial state of the activity. Initializes singleton classes * and a ThreadListFragment to view the app's thread list. * @param savedInstanceState the saved instance state bundle */ // TODO Fix logic in fragments @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); if (savedInstanceState != null) { return; } // DO NOT DELETE THE LINES BELOW OR THIS APP WILL CRASH, THESE SINGLETON // CLASSES HAVE TO BE INITIALIZED BEFORE ANYTHING ELSE. ConnectivityHelper.generateInstance(this); Toaster.generateInstance(this); PreferencesManager.generateInstance(this); CacheManager.generateInstance(this); GeoLocationLog.generateInstance(this); ThreadManager.generateInstance(this); ThreadListFragment fragment = new ThreadListFragment(); getSupportFragmentManager().beginTransaction().add(R.id.fragment_container, fragment, "threadListFrag") .commit(); getSupportFragmentManager().addOnBackStackChangedListener(this); } /** * Inflates this activity's action bar options. * @param menu the menu * @return the result */ @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.main, menu); return super.onCreateOptionsMenu(menu); } /** * Handles the selection of specific action bar items according * to which item was selected. * @param item the item selected * @return the result */ @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_settings: Intent intent = new Intent(this.getBaseContext(), PreferencesActivity.class); startActivity(intent); return true; case R.id.action_favourites: getSupportFragmentManager().beginTransaction() .replace(R.id.fragment_container, new FavouritesFragment(), "favouritesFrag") .addToBackStack(null).commit(); // This next line is necessary for JUnit to see fragments getSupportFragmentManager().executePendingTransactions(); return true; case R.id.action_add_thread: PostFragment frag = new PostFragment(); Bundle bundle = new Bundle(); bundle.putLong("id", -1); frag.setArguments(bundle); getSupportFragmentManager().beginTransaction() .replace(R.id.fragment_container, frag, "postFrag").addToBackStack(null) .commit(); // This next line is necessary for JUnit to see fragments getSupportFragmentManager().executePendingTransactions(); return true; case android.R.id.home: if (!returnBackStackImmediate(getSupportFragmentManager())) { getSupportFragmentManager().popBackStack(); } return true; default: return super.onOptionsItemSelected(item); } } /** * Re-registers the OnBackStackChangedListener as it does not survive * the destruction of the Activity. */ @Override public void onResume() { super.onResume(); getSupportFragmentManager().addOnBackStackChangedListener(this); checkActionBar(); } /** * Checks the back stack for fragments and enables/disables the back button * in the action bar accordingly */ @Override public void onBackStackChanged() { checkActionBar(); } /** * Overrides the back button to work with nested fragments */ @Override public void onBackPressed() { if (!returnBackStackImmediate(getSupportFragmentManager())) { super.onBackPressed(); } } /** * Checks the back stack for fragments and enables/disables the back button * in the action bar accordingly */ private void checkActionBar() { int count = getSupportFragmentManager().getBackStackEntryCount(); if (count > 0) { getActionBar().setDisplayHomeAsUpEnabled(true); } else { getActionBar().setDisplayHomeAsUpEnabled(false); } } /** * Propagates a back button press down to our nested fragment manager * and its fragments. * @param fm the Fragment Manager * @return true if the back stack was returned from successfully, * false if not */ private boolean returnBackStackImmediate(FragmentManager fm) { // HACK: propagate back button press to child fragments. // This might not work properly when you have multiple fragments adding // multiple children to the back stack. // (in our case, only one child fragments adds fragments to the back stack, // so we're fine with this) // // This code was taken from the web site: // http://android.joao.jp/2013/09/back-stack-with-nested-fragments-back.html // Accessed on March 21, 2014 List<Fragment> fragments = fm.getFragments(); if (fragments != null && fragments.size() > 0) { for (Fragment fragment : fragments) { if (fragment != null && fragment.getChildFragmentManager() != null) { if (fragment.getChildFragmentManager().getBackStackEntryCount() > 0) { if (fragment.getChildFragmentManager().popBackStackImmediate()) { return true; } else { return returnBackStackImmediate(fragment.getChildFragmentManager()); } } } } } return false; } /** * Calls the respective post new thread method in the fragment. * * @param view * View passed to the activity to check which button was pressed */ public void post(View view) { getSupportFragmentManager(); PostFragment fragment = (PostFragment) getSupportFragmentManager() .findFragmentByTag("postFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (PostFragment) favFrag.getChildFragmentManager() .findFragmentByTag("postFrag"); } fragment.post(view); } /** * Calls the respective attach image method in the running PostFragment. * @param view View passed to the activity to check which button was pressed. */ public void attachImage(View view) { PostFragment fragment = (PostFragment) getSupportFragmentManager() .findFragmentByTag("postFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (PostFragment) favFrag.getChildFragmentManager() .findFragmentByTag("postFrag"); } fragment.attachImage(view); } /** * Method called when the Edit Image button is pressed in EditFragment. Finds the * appropriate fragment and calls editImage on it. * @param view The View passed to the activity to check which button was pressed. */ public void editImage(View view) { EditFragment fragment = (EditFragment) getSupportFragmentManager() .findFragmentByTag("editFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (EditFragment) favFrag.getChildFragmentManager() .findFragmentByTag("editFrag"); } fragment.editImage(view); } /** * Method called when the Post Edit button is pressed in EditFragment. * Finds the appropriate fragment and calls makeEdit on it. * @param view */ public void makeEdit(View view) { EditFragment fragment = (EditFragment) getSupportFragmentManager() .findFragmentByTag("editFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (EditFragment) favFrag.getChildFragmentManager() .findFragmentByTag("editFrag"); } fragment.makeEdit(view); } /** * Calls the respective change location method in the fragment. * * @param view * View passed to the activity to check which button was pressed */ public void changeLocation(View view) { Bundle args = new Bundle(); if (view.getId() == R.id.location_button) { args.putInt("postType", CustomLocationFragment.POST); } else if (view.getId() == R.id.edit_location_button) { args.putInt("postType", CustomLocationFragment.EDIT); } CustomLocationFragment frag = new CustomLocationFragment(); frag.setArguments(args); FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); if(favFrag != null){ //This bit here solves the issue of a crash when changing location //in a reply to a comment in a favourited thread. FragmentManager childMan = favFrag.getChildFragmentManager(); childMan.beginTransaction() .replace(R.id.container, frag, "customLocFrag").addToBackStack(null) .commit(); } else { getSupportFragmentManager().beginTransaction() .replace(R.id.fragment_container, frag, "customLocFrag").addToBackStack(null) .commit(); getSupportFragmentManager().executePendingTransactions(); } } /** * Calls the respective submit location method in the fragment. * * @param view * View passed to the activity to check which button was pressed */ public void submitLocation(View view) { CustomLocationFragment fragment = (CustomLocationFragment) getSupportFragmentManager() .findFragmentByTag("customLocFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (CustomLocationFragment) favFrag.getChildFragmentManager() .findFragmentByTag("customLocFrag"); } fragment.submitNewLocation(view); } /** * Calls the respective submit location method in the fragment. * * @param view * View passed to the activity to check which button was pressed */ public void submitCurrentLocation(View view) { CustomLocationFragment fragment = (CustomLocationFragment) getSupportFragmentManager() .findFragmentByTag("customLocFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (CustomLocationFragment) favFrag.getChildFragmentManager() .findFragmentByTag("customLocFrag"); } fragment.submitCurrentLocation(view); } /** * Called when the get_directions_button is clicked in MapViewFragment. Finds the * fragment where the button was clicked and calls getDirections on it. * @param view View passed to the activity to determine which button was pressed. */ public void getDirections(View view) { MapViewFragment fragment = (MapViewFragment) getSupportFragmentManager().findFragmentByTag( "mapFrag"); if(fragment == null){ FavouritesFragment favFrag = (FavouritesFragment) getSupportFragmentManager() .findFragmentByTag("favouritesFrag"); fragment = (MapViewFragment) favFrag.getChildFragmentManager() .findFragmentByTag("mapFrag"); } fragment.getDirections(); } }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.core; import java.io.IOException; import java.text.MessageFormat; import com.asakusafw.runtime.core.api.ApiStub; import com.asakusafw.runtime.core.api.ReportApi; import com.asakusafw.runtime.core.legacy.LegacyReport; import com.asakusafw.runtime.core.legacy.RuntimeResource; /** * Report API entry class. * The Report API enables to notify some messages in operator methods, to the runtime reporting system * (e.g. logger, standard output, or etc.). * Generally, the Report API does not have any effect on the batch execution, for example, the batch execution will * continue even if {@link Report#error(String)} is invoked. * Clients should put <code>&#64;Sticky</code> annotation for operator methods using this API, otherwise the Asakusa * DSL compiler optimization may remove the target operator. <pre><code> &#64;Sticky &#64;Update public void updateWithReport(Hoge hoge) { if (hoge.getValue() &lt; 0) { Report.error("invalid value"); } else { hoge.setValue(0); } } </code></pre> * @since 0.1.0 * @version 0.9.0 */ public final class Report { /** * The Hadoop property name of the custom implementation class name of {@link Report.Delegate}. * To use a default implementation, clients should set {@code com.asakusafw.runtime.core.Report$Default} to it. */ public static final String K_DELEGATE_CLASS = "com.asakusafw.runtime.core.Report.Delegate"; //$NON-NLS-1$ private static final ApiStub<ReportApi> STUB = new ApiStub<>(LegacyReport.API); private Report() { return; } /** * Reports an <em>informative</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @throws Report.FailedException if error was occurred while reporting tReportessage * @see Report */ public static void info(String message) { STUB.get().info(message); } /** * Reports an <em>informative</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @param throwable the optional exception object (nullable) * @throws Report.FailedException if error was occurred while reporting the message * @see Report * @since 0.5.1 */ public static void info(String message, Throwable throwable) { STUB.get().info(message, throwable); } /** * Reports a <em>warning</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @throws Report.FailedException if error was occurred while reporting the message * @see Report */ public static void warn(String message) { STUB.get().warn(message); } /** * Reports a <em>warning</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @param throwable the optional exception object (nullable) * @throws Report.FailedException if error was occurred while reporting the message * @see Report * @since 0.5.1 */ public static void warn(String message, Throwable throwable) { STUB.get().warn(message, throwable); } /** * Reports an <em>error</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * Please be careful that this method will <em>NOT</em> shutdown the running batch. * To shutdown the batch, throw an exception ({@link RuntimeException}) in operator methods. * @param message the message * @throws Report.FailedException if error was occurred while reporting the message * @see Report */ public static void error(String message) { STUB.get().error(message); } /** * Reports an <em>error</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * Please be careful that this method will <em>NOT</em> shutdown the running batch. * To shutdown the batch, throw an exception ({@link RuntimeException}) in operator methods. * @param message the message * @param throwable the optional exception object (nullable) * @throws Report.FailedException if error was occurred while reporting the message * @see Report * @since 0.5.1 */ public static void error(String message, Throwable throwable) { STUB.get().error(message, throwable); } /** * Returns the API stub. * Application developer must not use this directly. * @return the API stub * @since 0.9.0 */ public static ApiStub<ReportApi> getStub() { return STUB; } /** * {@link FailedException} is thrown when an exception was occurred while processing messages in {@link Report}. */ public static class FailedException extends RuntimeException { private static final long serialVersionUID = 1L; /** * Creates a new instance. */ public FailedException() { super(); } /** * Creates a new instance. * @param message the exception message (nullable) * @param cause the original cause (nullable) */ public FailedException(String message, Throwable cause) { super(message, cause); } /** * Creates a new instance. * @param message the exception message (nullable) */ public FailedException(String message) { super(message); } /** * Creates a new instance. * @param cause the original cause (nullable) */ public FailedException(Throwable cause) { super(cause); } } /** * An abstract super class of delegation objects for {@link Report}. * Application developers can inherit this class, and set the fully qualified name to the property * {@link Report#K_DELEGATE_CLASS} to use the custom implementation for the Report API. * @since 0.1.0 * @version 0.7.4 */ public abstract static class Delegate implements RuntimeResource { /** * Notifies a report. * @param level report level * @param message report message * @throws IOException if failed to notify this report by I/O error */ public abstract void report(Level level, String message) throws IOException; /** * Notifies a report. * @param level report level * @param message report message * @param throwable optional exception info (nullable) * @throws IOException if failed to notify this report by I/O error * @since 0.5.1 */ public void report(Level level, String message, Throwable throwable) throws IOException { report(level, message); } } /** * Represents levels of reporting. */ public enum Level { /** * Informative level. */ INFO, /** * Warning level. */ WARN, /** * Erroneous level. */ ERROR, } /** * A basic implementation of {@link Delegate}. * @since 0.1.0 * @version 0.5.1 */ public static class Default extends Delegate { @Override public void report(Level level, String message) { switch (level) { case INFO: System.out.println(message); break; case WARN: System.err.println(message); new Exception("Warning").printStackTrace(); break; case ERROR: System.err.println(message); new Exception("Error").printStackTrace(); break; default: throw new AssertionError(MessageFormat.format( "[{0}] {1}", //$NON-NLS-1$ level, message)); } } @Override public void report(Level level, String message, Throwable throwable) { switch (level) { case INFO: System.out.println(message); if (throwable != null) { throwable.printStackTrace(System.out); } break; case WARN: case ERROR: System.err.println(message); if (throwable != null) { throwable.printStackTrace(System.err); } break; default: throw new AssertionError(MessageFormat.format( "[{0}] {1}", //$NON-NLS-1$ level, message)); } } } }
package com.climate.mirage.app; import android.content.Context; import android.graphics.drawable.Drawable; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; import com.climate.mirage.Mirage; import com.climate.mirage.cache.disk.CompositeDiskCache; import com.climate.mirage.cache.disk.DiskCache; import com.climate.mirage.cache.disk.DiskCacheStrategy; import com.climate.mirage.cache.disk.DiskLruCacheWrapper; import com.climate.mirage.exceptions.MirageIOException; import com.climate.mirage.requests.MirageRequest; import com.climate.mirage.targets.ImageViewTarget; import java.io.File; import java.util.ArrayList; public class SampleOfflineSyncActivity extends AppCompatActivity { private static final String TAG = SampleOfflineSyncActivity.class.getSimpleName(); private ArrayList<String> items; private TextView textView; private Mirage syncMirage; private Mirage appMirage; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.viewpager); textView = (TextView)findViewById(R.id.textView); Button button1 = (Button)findViewById(R.id.button1); button1.setText("Reset"); button1.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { reset(); } }); if (Mirage.get(this).getDefaultMemoryCache() != null) { Mirage.get(this).getDefaultMemoryCache().clear(); } syncMirage = new Mirage(this); syncMirage.setDefaultMemoryCache(null); syncMirage.setDefaultDiskCache(createOfflineCache()); syncMirage.setDefaultExecutor(AsyncTask.THREAD_POOL_EXECUTOR); // let's not fool with the default mirage settings for this example appMirage = new Mirage(this); appMirage.setDefaultMemoryCache(null); appMirage.setDefaultDiskCache(createWrappedCaches()); appMirage.setDefaultExecutor(AsyncTask.THREAD_POOL_EXECUTOR); items = new ArrayList<>(); items.add("https://i.imgur.com/lCL6kEF.jpg"); items.add("https://i.imgur.com/HDrJjF0.jpg"); items.add("https://i.imgur.com/hY9kBxr.jpg"); items.add("https://i.imgur.com/3ndso90.jpg"); items.add("https://i.imgur.com/AGDbbKl.jpg"); items.add("https://i.imgur.com/7IAT3YE.jpg"); items.add("https://i.imgur.com/FQgMesN.jpg"); items.add("https://i.imgur.com/J8eM6C0.jpg"); items.add("https://i.imgur.com/drnRnjv.jpg"); reset(); } @Override protected void onDestroy() { super.onDestroy(); syncMirage.dispose(); appMirage.dispose(); } private void reset() { syncMirage.clearCache(); appMirage.clearCache(); new AsyncTask<Void, File, Void>() { private int count = 0; @Override protected Void doInBackground(Void... params) { for (int i=0; i<items.size(); i++) { try { File file = syncMirage .load(items.get(i)) .diskCacheStrategy(DiskCacheStrategy.SOURCE) .downloadOnlySync(); publishProgress(file); } catch (MirageIOException e) { Log.w(TAG, "IO Exception", e); } } return null; } @Override protected void onPreExecute() { super.onPreExecute(); textView.setText("Starting download"); } @Override protected void onProgressUpdate(File... values) { super.onProgressUpdate(values); textView.setText("file " + (count++) + " at \n\n" + values[0].getAbsolutePath()); } @Override protected void onPostExecute(Void aVoid) { super.onPostExecute(aVoid); textView.setText("Downloading Complete"); initPager(); } }.execute(); } private void initPager() { ViewPager pager = (ViewPager)findViewById(R.id.pager); pager.setAdapter(new MyAdapter()); } private DiskCache createWrappedCaches() { // the "blank" cache will never get written to since we've already have it on our // "sync" cache as a source DiskCache cacheDisk = new DiskLruCacheWrapper( new DiskLruCacheWrapper.SharedDiskLruCacheFactory(new File(getCacheDir(), "blank"), 100 * 1024 * 1024)); DiskLruCacheWrapper syncDisk = new DiskLruCacheWrapper( new DiskLruCacheWrapper.SharedDiskLruCacheFactory(new File(getCacheDir(), "sync"), 100 * 1024 * 1024)); syncDisk.setReadOnly(true); DiskCache compositeDiskCache = new CompositeDiskCache(cacheDisk, syncDisk); return compositeDiskCache; } private DiskCache createOfflineCache() { DiskLruCacheWrapper syncDisk = new DiskLruCacheWrapper( new DiskLruCacheWrapper.SharedDiskLruCacheFactory(new File(getCacheDir(), "sync"), 100 * 1024 * 1024)); return syncDisk; } private class MyAdapter extends PagerAdapter { private Context context; private int width, height; private MyAdapter() { this.context = SampleOfflineSyncActivity.this; width = context.getResources().getDisplayMetrics().widthPixels; height = (int)(width * .75f); } @Override public int getCount() { return items.size(); } @Override public boolean isViewFromObject(View view, Object object) { return view == object; } @Override public void destroyItem(ViewGroup container, int position, Object object) { appMirage.cancelRequest((ImageView)object); container.removeView((View) object); } @Override public Object instantiateItem(ViewGroup container, int position) { ImageView iv = new ImageView(context); ViewGroup.LayoutParams lp = new ViewGroup.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); container.addView(iv, lp); MirageRequest request = appMirage .load(items.get(position)) .skipWritingMemoryCache(true); request.into(new MyImageTarget(request, iv, position)) .fit() .fade() .placeHolder(R.drawable.mirage_ic_launcher) .error(R.drawable.ic_error) .go(); return iv; } } private class MyImageTarget extends ImageViewTarget { private int position; private MyImageTarget(MirageRequest request, ImageView imageView, int position) { super(request, imageView); this.position = position; } @Override protected void onResult(ImageView view, Drawable drawable, Mirage.Source source, MirageRequest request) { super.onResult(view, drawable, source, request); textView.setText("Image " + position + " from " + source.toString()); } } }
/* Copyright [2013-2014] eBay Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package models.data.providers.actors; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.TimeUnit; import java.util.UUID; import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; import models.agent.batch.commands.message.BatchResponseFromManager; import models.agent.batch.commands.message.InitialRequestToManager; import models.agent.batch.commands.message.RequestToBatchSenderAsstManager; import models.agent.batch.commands.message.ResponseCountToBatchSenderAsstManager; import models.asynchttp.RequestProtocol; import models.asynchttp.actors.OperationWorker; import models.asynchttp.request.AgentRequest; import models.asynchttp.request.GenericAgentRequest; import models.asynchttp.response.AgentResponse; import models.asynchttp.response.GenericAgentResponse; import models.data.AgentCommandMetadata; import models.data.AggregateData; import models.data.NodeData; import models.data.NodeGroupDataMap; import models.data.NodeReqResponse; import models.data.providers.AgentDataAggregator; import models.data.providers.AgentDataProvider; import models.data.providers.AgentDataProviderHelper; import models.utils.AgentUtils; import models.utils.DateUtils; import models.utils.VarUtils; import akka.actor.Actor; import akka.actor.ActorRef; import akka.actor.Cancellable; import akka.actor.OneForOneStrategy; import akka.actor.PoisonPill; import akka.actor.Props; import akka.actor.SupervisorStrategy; import akka.actor.SupervisorStrategy.Directive; import akka.actor.UntypedActor; import akka.actor.UntypedActorFactory; import akka.japi.Function; /** * 20130917: add the logic to replace * REPLACE-VAR_APIVARREPLACE_SUPERMANSPECIAL_TARGET_NODE_VAR * * @author ypei * */ public class AggregationManager extends UntypedActor { protected int responseCount = 0; protected int requestCount = 0; protected int requestNotFireCount = 0; protected long startTime = System.currentTimeMillis(); protected long endTime = -1L; protected ActorRef director = null; protected List<ActorRef> workers = new ArrayList<ActorRef>(); public String patternStr = null; public AggregateData aggregateData = null; protected String directorJobId = null; protected enum InternalMessageType { OPERATION_TIMEOUT } // default is -1: which will not to limit the response length protected int responseExtractIndexStart = -1; protected int responseExtractIndexEnd = -1; protected Cancellable timeoutMessageCancellable = null; protected Map<String, NodeGroupDataMap> dataStore = null; protected String agentCommandType = null; protected Map<String, NodeData> nodeDataMapValid = null; @Override public void onReceive(Object message) { // Start all workers if (message instanceof RequestToAggregationManager) { director = getSender(); // clear responseMap RequestToAggregationManager request = (RequestToAggregationManager) message; patternStr = request.getPatternStr(); directorJobId = request.getDirectorJobId(); aggregateData = request.getAggregateData(); nodeDataMapValid = aggregateData.getNodeDataMapValid(); agentCommandType = aggregateData.getAgentCommandType(); AgentDataProvider adp = AgentDataProvider.getInstance(); final String errorMsgPatternStr = adp.aggregationMetadatas .get(VarUtils.AGGREGATION_PATTERN_EXTRACT_EXCEPTION_SUMMARY_FROM_ERROR_MSG); if (nodeDataMapValid == null || nodeDataMapValid.entrySet() == null) { models.utils.LogUtils.printLogError (" ERROR: ? Data source is missing. Return. nodeDataMapValid == null || nodeDataMapValid.entrySet() == null in genResponseToMetadataMap()" + DateUtils.getNowDateTimeStrSdsm()); return; } // assumption: key value pairs number equals to requestCount = nodeDataMapValid.size(); models.utils.LogUtils.printLogNormal ("!Obtain request! aggregation request with count: " + requestCount); if (requestCount <= 0) { getSender().tell( new ResponseFromAggregationManager(new Date() + ""), getSelf()); return; } for (Entry<String, NodeData> entry : nodeDataMapValid.entrySet()) { // 20130730 TODO: Potential bug: verified NPE. this can be // empty. When no data is coming back. final String fqdn = entry.getKey(); if (entry.getValue() == null || entry.getValue().getDataMap() == null || entry.getValue().getDataMap().get(agentCommandType) == null || entry.getValue().getDataMap().get(agentCommandType) .getResponseContent() == null) { if(VarUtils.IN_DETAIL_DEBUG){ models.utils.LogUtils.printLogNormal ("ERROR~Understandable: Will happen when Response is null and request fails to send out. NodeData in nodeDataMapValid is NULL in function genResponseToMetadataMap at " + DateUtils.getNowDateTimeStrSdsm()); } boolean isError = true; aggregateData.getFqdnResponseExtractMap().put(fqdn, VarUtils.NA); aggregateData.getFqdnIsErrorExtractMap().put(fqdn, isError); aggregateData.getFqdnErrorMsgExtractMap().put(fqdn, VarUtils.SUPERMAN_NOT_FIRE_REQUEST); ++requestNotFireCount; continue; } final NodeData nodeData = entry.getValue(); final ActorRef worker = getContext().system().actorOf( new Props(new UntypedActorFactory() { private static final long serialVersionUID = 1L; final RequestToAggregationWorker request = new RequestToAggregationWorker( nodeData, agentCommandType, errorMsgPatternStr, patternStr); public Actor create() { return new AggregationWorker(request, fqdn); } })); workers.add(worker); worker.tell(AggregationWorker.MessageType.PROCESS_REQUEST, getSelf()); }// end for loop //TODO if(workers.isEmpty()){ models.utils.LogUtils.printLogNormal("NO RESPONSES coming back in this case. Just return"); ResponseFromAggregationManager responseFromAggregationManager = new ResponseFromAggregationManager( new Date() + ""); if (director != null) { director.tell(responseFromAggregationManager, getSelf()); } else { models.utils.LogUtils.printLogError ("ERROR: Initiator is NULL for AggregationManager "); } getContext().stop(getSelf()); } // 2013 1013 add cancel. final FiniteDuration timeOutDuration = Duration.create( VarUtils.TIMEOUT_ASK_AGGREGATION_MANAGER_SCONDS, TimeUnit.SECONDS); timeoutMessageCancellable = getContext() .system() .scheduler() .scheduleOnce(timeOutDuration, getSelf(), InternalMessageType.OPERATION_TIMEOUT, getContext().system().dispatcher()); if (VarUtils.IN_DETAIL_DEBUG) { models.utils.LogUtils.printLogError ("Scheduled TIMEOUT_ASK_AGGREGATION_MANAGER_SCONDS OPERATION_TIMEOUT after SEC: " + VarUtils.TIMEOUT_ASK_AGGREGATION_MANAGER_SCONDS + " at " + DateUtils.getNowDateTimeStrSdsm()); } } else if (message instanceof ResponseToAggregationManagerFromWorker) { try { ResponseToAggregationManagerFromWorker responseFromWorker = null; responseFromWorker = (ResponseToAggregationManagerFromWorker) message; // 20130422 to trim the message if needed; careful, when there // are exception: will make the // bug fixed: 20130423 check gap.getResponseContent() length // ==0; then -1 will make it out of bound! this.responseCount += 1; if (responseFromWorker.getRequest() == null) { models.utils.LogUtils.printLogError("responseFromWorker request is null!!" + DateUtils.getNowDateTimeStrSdsm()); } String fqdn = responseFromWorker.getFqdn(); String extractErrorMsg = responseFromWorker .getExtractErrorMsg(); String extractedResponse = responseFromWorker .getExtractedResponse(); String agentCommandType = responseFromWorker.getRequest() .getAgentCommandType(); boolean isError = responseFromWorker.isError(); // 20131026: with this: for status code / response time only if (patternStr!=null&& patternStr .equalsIgnoreCase(VarUtils.AGGREGATION_SUPERMAN_SPECIAL_STATUS_CODE)) { String statusCode = responseFromWorker.getRequest() .getNodeData().getDataMap().get(agentCommandType) .getResponseContent().getStatusCode(); extractedResponse = statusCode; } else if (patternStr!=null&& patternStr .equalsIgnoreCase(VarUtils.AGGREGATION_SUPERMAN_SPECIAL_RESPONSE_TIME)) { String responseTime = responseFromWorker.getRequest() .getNodeData().getDataMap().get(agentCommandType) .getResponseContent().getResponseTime(); extractedResponse = responseTime; } if (VarUtils.IN_DETAIL_DEBUG) { models.utils.LogUtils.printLogNormal(" stringMatcherByPattern output: " + extractedResponse); models.utils.LogUtils.printLogNormal(" extractErrorMsg output: " + extractErrorMsg); } // put into the init hashmap aggregateData.getFqdnResponseExtractMap().put(fqdn, extractedResponse); aggregateData.getFqdnIsErrorExtractMap().put(fqdn, isError); aggregateData.getFqdnErrorMsgExtractMap().put(fqdn, extractErrorMsg); if (this.responseCount % 1 == 0) { if (VarUtils.IN_DETAIL_DEBUG) { long responseReceiveTime = System.currentTimeMillis(); // %.5g%n double progressPercent = (double) (responseCount) / (double) (requestCount) * 100.0; String responseReceiveTimeStr = DateUtils .getDateTimeStr(new Date(responseReceiveTime)); String secondElapsedStr = new Double( (responseReceiveTime - startTime) / 1000.0) .toString(); // percent is escaped using percent sign models.utils.LogUtils.printLogNormal(String .format("\n[%d]__RESPONSE_RECV_IN_MGR %d (+%d) / %d (%.5g%%) after %s SEC for %s at %s , directorJobId : %s\n", responseCount, responseCount, requestCount - responseCount, requestCount, progressPercent, secondElapsedStr, fqdn, responseReceiveTimeStr, directorJobId) ); } } if (this.responseCount + this.requestNotFireCount == this.requestCount ) { models.utils.LogUtils.printLogNormal ("SUCCESSFUL GOT ON ALL RESPONSES: Received all the expected messages. Count matches: " + this.requestCount + " at time: " + DateUtils.getNowDateTimeStrSdsm()); ResponseFromAggregationManager responseFromAggregationManager = new ResponseFromAggregationManager( new Date() + ""); if (director != null) { director.tell(responseFromAggregationManager, getSelf()); } else { models.utils.LogUtils.printLogError ("ERROR: Initiator is NULL for AggregationManager "); } // Send message to the future with the result long currTime = System.currentTimeMillis(); models.utils.LogUtils.printLogNormal ("\nTime taken to get all responses back in Aggregation Manager: " + (currTime - startTime) / 1000.0 + " secs"); // MUST SHUT DOWN: This is a double check. Acutally agent // operation worker has already shutdown. for (ActorRef worker : workers) { getContext().stop(worker); } if (timeoutMessageCancellable != null) { timeoutMessageCancellable.cancel(); } // Suicide // getSelf().tell(PoisonPill.getInstance(), null); getContext().stop(getSelf()); }// end when all requests have resonponse } catch (Throwable t) { t.printStackTrace(); } } else if (message instanceof InternalMessageType) { switch ((InternalMessageType) message) { case OPERATION_TIMEOUT: cancelRequestAndCancelWorkers(); break; } } else { models.utils.LogUtils.printLogError("Unhandled: " + message); unhandled(message); } }// end func /** * Potential bug: this assumes when cancel; all initial request to each node * has been sent out by asst manager to each op worker. * * For those op worker who has receive this cancel first PRIOR to the asst * manager's request message; the reply back could * * This way prevent memory leak by sending cancel to OP worker in order to * stop http worker; rather than directly stopping OP worker without * stopping http worker If not; this did not really stop the ASST manager.. * * will rely on the global ASK manager timeout for if . */ private void cancelRequestAndCancelWorkers() { for (ActorRef worker : workers) { if (worker == null) { models.utils.LogUtils.printLogError("worker is gone. null ptr: "); } else if (!worker.isTerminated()) { // just stop should be fine. since no next layer getContext().stop(worker); } } models.utils.LogUtils.printLogError ("--DEBUG--aggregationManager sending cancelPendingRequest at time: " + DateUtils.getNowDateTimeStr()); } public int getResponseCount() { return responseCount; } public void setResponseCount(int responseCount) { this.responseCount = responseCount; } public int getRequestCount() { return requestCount; } public void setRequestCount(int requestCount) { this.requestCount = requestCount; } public long getStartTime() { return startTime; } public void setStartTime(long startTime) { this.startTime = startTime; } public ActorRef getDirector() { return director; } public void setDirector(ActorRef director) { this.director = director; } public List<ActorRef> getWorkers() { return workers; } public void setWorkers(List<ActorRef> workers) { this.workers = workers; } public String getAgentCommandType() { return agentCommandType; } public void setAgentCommandType(String agentCommandType) { this.agentCommandType = agentCommandType; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.ppd; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.PTFOperator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowType; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PTFDesc; import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowFunctionDef; import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef; import org.apache.hadoop.hive.ql.ppd.ExprWalkerInfo.ExprInfo; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFDenseRank.GenericUDAFDenseRankEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFLead.GenericUDAFLeadEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRank.GenericUDAFRankEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.JobConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; /** * Operator factory for predicate pushdown processing of operator graph Each * operator determines the pushdown predicates by walking the expression tree. * Each operator merges its own pushdown predicates with those of its children * Finally the TableScan operator gathers all the predicates and inserts a * filter operator after itself. TODO: Further optimizations 1) Multi-insert * case 2) Create a filter operator for those predicates that couldn't be pushed * to the previous operators in the data flow 3) Merge multiple sequential * filter predicates into so that plans are more readable 4) Remove predicates * from filter operators that have been pushed. Currently these pushed * predicates are evaluated twice. */ public final class OpProcFactory { protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class .getName()); private static ExprWalkerInfo getChildWalkerInfo(Operator<?> current, OpWalkerInfo owi) { if (current.getNumChild() == 0) { return null; } if (current.getNumChild() > 1) { // ppd for multi-insert query is not yet implemented // we assume that nothing can is pushed beyond this operator List<Operator<? extends OperatorDesc>> children = Lists.newArrayList(current.getChildOperators()); for (Operator<?> child : children) { ExprWalkerInfo childInfo = owi.getPrunedPreds(child); createFilter(child, childInfo, owi); } return null; } return owi.getPrunedPreds(current.getChildOperators().get(0)); } private static void removeCandidates(Operator<?> operator, OpWalkerInfo owi) { if (operator instanceof FilterOperator) { if (owi.getCandidateFilterOps().contains(operator)) { removeOperator(operator); } owi.getCandidateFilterOps().remove(operator); } if (operator.getChildOperators() != null) { List<Operator<? extends OperatorDesc>> children = Lists.newArrayList(operator.getChildOperators()); for (Operator<?> child : children) { removeCandidates(child, owi); } } } private static void removeAllCandidates(OpWalkerInfo owi) { for (FilterOperator operator : owi.getCandidateFilterOps()) { removeOperator(operator); } owi.getCandidateFilterOps().clear(); } private static void removeOperator(Operator<? extends OperatorDesc> operator) { List<Operator<? extends OperatorDesc>> children = operator.getChildOperators(); List<Operator<? extends OperatorDesc>> parents = operator.getParentOperators(); for (Operator<? extends OperatorDesc> parent : parents) { parent.getChildOperators().addAll(children); parent.removeChild(operator); } for (Operator<? extends OperatorDesc> child : children) { child.getParentOperators().addAll(parents); child.removeParent(operator); } } /** * Processor for Script Operator Prevents any predicates being pushed. */ public static class ScriptPPD extends DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.debug("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); // script operator is a black-box to hive so no optimization here // assuming that nothing can be pushed above the script op // same with LIMIT op // create a filter with all children predicates OpWalkerInfo owi = (OpWalkerInfo) procCtx; ExprWalkerInfo childInfo = getChildWalkerInfo((Operator<?>) nd, owi); if (childInfo != null && HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, null, false); return createFilter((Operator)nd, unpushedPreds, owi); } return null; } } public static class PTFPPD extends ScriptPPD { /* * For WindowingTableFunction if: * a. there is a Rank/DenseRank function: if there are unpushedPred of the form * rnkValue < Constant; then use the smallest Constant val as the 'rankLimit' * on the WindowingTablFn. * b. If there are no Wdw Fns with an End Boundary past the current row, the * condition can be pushed down as a limit pushdown(mapGroupBy=true) * * (non-Javadoc) * @see org.apache.hadoop.hive.ql.ppd.OpProcFactory.ScriptPPD#process(org.apache.hadoop.hive.ql.lib.Node, java.util.Stack, org.apache.hadoop.hive.ql.lib.NodeProcessorCtx, java.lang.Object[]) */ @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; PTFOperator ptfOp = (PTFOperator) nd; pushRankLimit(ptfOp, owi); return super.process(nd, stack, procCtx, nodeOutputs); } private void pushRankLimit(PTFOperator ptfOp, OpWalkerInfo owi) throws SemanticException { PTFDesc conf = ptfOp.getConf(); if ( !conf.forWindowing() ) { return; } float threshold = owi.getParseContext().getConf().getFloatVar(HiveConf.ConfVars.HIVELIMITPUSHDOWNMEMORYUSAGE); if (threshold <= 0 || threshold >= 1) { return; } WindowTableFunctionDef wTFn = (WindowTableFunctionDef) conf.getFuncDef(); List<Integer> rFnIdxs = rankingFunctions(wTFn); if ( rFnIdxs.size() == 0 ) { return; } ExprWalkerInfo childInfo = getChildWalkerInfo(ptfOp, owi); if (childInfo == null) { return; } List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>(); Iterator<List<ExprNodeDesc>> iterator = childInfo.getFinalCandidates().values().iterator(); while (iterator.hasNext()) { for (ExprNodeDesc pred : iterator.next()) { preds = ExprNodeDescUtils.split(pred, preds); } } int rLimit = -1; int fnIdx = -1; for(ExprNodeDesc pred : preds) { int[] pLimit = getLimit(wTFn, rFnIdxs, pred); if ( pLimit != null ) { if ( rLimit == -1 || rLimit >= pLimit[0] ) { rLimit = pLimit[0]; fnIdx = pLimit[1]; } } } if ( rLimit != -1 ) { wTFn.setRankLimit(rLimit); wTFn.setRankLimitFunction(fnIdx); if ( canPushLimitToReduceSink(wTFn)) { pushRankLimitToRedSink(ptfOp, owi.getParseContext().getConf(), rLimit); } } } private List<Integer> rankingFunctions(WindowTableFunctionDef wTFn) { List<Integer> rFns = new ArrayList<Integer>(); for(int i=0; i < wTFn.getWindowFunctions().size(); i++ ) { WindowFunctionDef wFnDef = wTFn.getWindowFunctions().get(i); if ( (wFnDef.getWFnEval() instanceof GenericUDAFRankEvaluator) || (wFnDef.getWFnEval() instanceof GenericUDAFDenseRankEvaluator ) ) { rFns.add(i); } } return rFns; } /* * For a predicate check if it is a candidate for pushing down as limit optimization. * The expression must be of the form rankFn <|<= constant. */ private int[] getLimit(WindowTableFunctionDef wTFn, List<Integer> rFnIdxs, ExprNodeDesc expr) { if ( !(expr instanceof ExprNodeGenericFuncDesc) ) { return null; } ExprNodeGenericFuncDesc fExpr = (ExprNodeGenericFuncDesc) expr; if ( !(fExpr.getGenericUDF() instanceof GenericUDFOPLessThan) && !(fExpr.getGenericUDF() instanceof GenericUDFOPEqualOrLessThan) ) { return null; } if ( !(fExpr.getChildren().get(0) instanceof ExprNodeColumnDesc) ) { return null; } if ( !(fExpr.getChildren().get(1) instanceof ExprNodeConstantDesc) ) { return null; } ExprNodeConstantDesc constantExpr = (ExprNodeConstantDesc) fExpr.getChildren().get(1) ; if ( constantExpr.getTypeInfo() != TypeInfoFactory.intTypeInfo ) { return null; } int limit = (Integer) constantExpr.getValue(); if ( fExpr.getGenericUDF() instanceof GenericUDFOPEqualOrLessThan ) { limit = limit + 1; } String colName = ((ExprNodeColumnDesc)fExpr.getChildren().get(0)).getColumn(); for(int i=0; i < rFnIdxs.size(); i++ ) { String fAlias = wTFn.getWindowFunctions().get(i).getAlias(); if ( fAlias.equals(colName)) { return new int[] {limit,i}; } } return null; } /* * Limit can be pushed down to Map-side if all Window Functions need access * to rows before the current row. This is true for: * 1. Rank, DenseRank and Lead Fns. (the window doesn't matter for lead fn). * 2. If the Window for the function is Row based and the End Boundary doesn't * reference rows past the Current Row. */ private boolean canPushLimitToReduceSink(WindowTableFunctionDef wTFn) { for(WindowFunctionDef wFnDef : wTFn.getWindowFunctions() ) { if ( (wFnDef.getWFnEval() instanceof GenericUDAFRankEvaluator) || (wFnDef.getWFnEval() instanceof GenericUDAFDenseRankEvaluator ) || (wFnDef.getWFnEval() instanceof GenericUDAFLeadEvaluator ) ) { continue; } WindowFrameDef wdwFrame = wFnDef.getWindowFrame(); BoundaryDef end = wdwFrame.getEnd(); if (wdwFrame.getWindowType() == WindowType.RANGE) { return false; } if ( end.getDirection() == Direction.FOLLOWING ) { return false; } } return true; } private void pushRankLimitToRedSink(PTFOperator ptfOp, HiveConf conf, int rLimit) throws SemanticException { Operator<? extends OperatorDesc> parent = ptfOp.getParentOperators().get(0); Operator<? extends OperatorDesc> gP = parent == null ? null : parent.getParentOperators().get(0); if ( gP == null || !(gP instanceof ReduceSinkOperator )) { return; } float threshold = conf.getFloatVar(HiveConf.ConfVars.HIVELIMITPUSHDOWNMEMORYUSAGE); ReduceSinkOperator rSink = (ReduceSinkOperator) gP; ReduceSinkDesc rDesc = rSink.getConf(); rDesc.setTopN(rLimit); rDesc.setTopNMemoryUsage(threshold); rDesc.setMapGroupBy(true); rDesc.setPTFReduceSink(true); } } public static class UDTFPPD extends DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, procCtx, nodeOutputs); OpWalkerInfo owi = (OpWalkerInfo) procCtx; ExprWalkerInfo prunedPred = owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd); if (prunedPred == null || !prunedPred.hasAnyCandidates()) { return null; } Map<String, List<ExprNodeDesc>> candidates = prunedPred.getFinalCandidates(); createFilter((Operator)nd, prunedPred, owi); candidates.clear(); return null; } } public static class LateralViewForwardPPD extends DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; // The lateral view forward operator has 2 children, a SELECT(*) and // a SELECT(cols) (for the UDTF operator) The child at index 0 is the // SELECT(*) because that's the way that the DAG was constructed. We // only want to get the predicates from the SELECT(*). ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren() .get(0)); owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, childPreds); return null; } } /** * Combines predicates of its child into a single expression and adds a filter * op as new child. */ public static class TableScanPPD extends DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; TableScanOperator tsOp = (TableScanOperator) nd; mergeWithChildrenPred(tsOp, owi, null, null); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { // remove all the candidate filter operators // when we get to the TS removeAllCandidates(owi); } ExprWalkerInfo pushDownPreds = owi.getPrunedPreds(tsOp); // nonFinalCandidates predicates should be empty assert pushDownPreds == null || !pushDownPreds.hasNonFinalCandidates(); return createFilter(tsOp, pushDownPreds, owi); } } /** * Determines the push down predicates in its where expression and then * combines it with the push down predicates that are passed from its children. */ public static class FilterPPD extends DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { return process(nd, stack, procCtx, false, nodeOutputs); } Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, boolean onlySyntheticJoinPredicate, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd; // if this filter is generated one, predicates need not to be extracted ExprWalkerInfo ewi = owi.getPrunedPreds(op); // Don't push a sampling predicate since createFilter() always creates filter // with isSamplePred = false. Also, the filterop with sampling pred is always // a child of TableScan, so there is no need to push this predicate. if (ewi == null && !((FilterOperator)op).getConf().getIsSamplingPred() && (!onlySyntheticJoinPredicate || ((FilterOperator)op).getConf().isSyntheticJoinPredicate())) { // get pushdown predicates for this operator's predicate ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate(); ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate); if (!ewi.isDeterministic()) { /* predicate is not deterministic */ if (op.getChildren() != null && op.getChildren().size() == 1) { createFilter(op, owi .getPrunedPreds((Operator<? extends OperatorDesc>) (op .getChildren().get(0))), owi); } return null; } logExpr(nd, ewi); owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { // add this filter for deletion, if it does not have non-final candidates owi.addCandidateFilterOp((FilterOperator)op); Map<String, List<ExprNodeDesc>> residual = ewi.getResidualPredicates(true); createFilter(op, residual, owi); } } // merge it with children predicates boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, ewi, null); if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { if (hasUnpushedPredicates) { ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, null, false); return createFilter((Operator)nd, unpushedPreds, owi); } } return null; } } public static class SimpleFilterPPD extends FilterPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; // We try to push the full Filter predicate iff: // - the Filter is on top of a TableScan, or // - the Filter is on top of a PTF (between PTF and Filter, there might be Select operators) // Otherwise, we push only the synthetic join predicates // Note : pushing Filter on top of PTF is necessary so the LimitPushdownOptimizer for Rank // functions gets enabled boolean parentTableScan = filterOp.getParentOperators().get(0) instanceof TableScanOperator; boolean ancestorPTF = false; if (!parentTableScan) { Operator<?> parent = filterOp; while (true) { assert parent.getParentOperators().size() == 1; parent = parent.getParentOperators().get(0); if (parent instanceof SelectOperator) { continue; } else if (parent instanceof PTFOperator) { ancestorPTF = true; break; } else { break; } } } return process(nd, stack, procCtx, !parentTableScan && !ancestorPTF, nodeOutputs); } } /** * Determines predicates for which alias can be pushed to it's parents. See * the comments for getQualifiedAliases function. */ public static class JoinerPPD extends DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; Set<String> aliases = getAliases(nd); // we pass null for aliases here because mergeWithChildrenPred filters // aliases in the children node context and we need to filter them in // the current JoinOperator's context mergeWithChildrenPred(nd, owi, null, null); ExprWalkerInfo prunePreds = owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd); if (prunePreds != null) { Set<String> toRemove = new HashSet<String>(); // we don't push down any expressions that refer to aliases that can;t // be pushed down per getQualifiedAliases for (Entry<String, List<ExprNodeDesc>> entry : prunePreds.getFinalCandidates().entrySet()) { String key = entry.getKey(); List<ExprNodeDesc> value = entry.getValue(); if (key == null && ExprNodeDescUtils.isAllConstants(value)) { continue; // propagate constants } if (!aliases.contains(key)) { toRemove.add(key); } } for (String alias : toRemove) { for (ExprNodeDesc expr : prunePreds.getFinalCandidates().get(alias)) { // add expr to the list of predicates rejected from further pushing // so that we know to add it in createFilter() ExprInfo exprInfo; if (alias != null) { exprInfo = prunePreds.addOrGetExprInfo(expr); exprInfo.alias = alias; } else { exprInfo = prunePreds.getExprInfo(expr); } prunePreds.addNonFinalCandidate(exprInfo != null ? exprInfo.alias : null, expr); } prunePreds.getFinalCandidates().remove(alias); } return handlePredicates(nd, prunePreds, owi); } return null; } protected Set<String> getAliases(Node nd) throws SemanticException { return ((Operator)nd).getSchema().getTableNames(); } protected Object handlePredicates(Node nd, ExprWalkerInfo prunePreds, OpWalkerInfo owi) throws SemanticException { if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { return createFilter((Operator)nd, prunePreds.getResidualPredicates(true), owi); } return null; } } public static class JoinPPD extends JoinerPPD { @Override protected Set<String> getAliases(Node nd) { return getQualifiedAliases((JoinOperator) nd, ((JoinOperator)nd).getSchema()); } /** * Figures out the aliases for whom it is safe to push predicates based on * ANSI SQL semantics. The join conditions are left associative so "a * RIGHT OUTER JOIN b LEFT OUTER JOIN c INNER JOIN d" is interpreted as * "((a RIGHT OUTER JOIN b) LEFT OUTER JOIN c) INNER JOIN d". For inner * joins, both the left and right join subexpressions are considered for * pushing down aliases, for the right outer join, the right subexpression * is considered and the left ignored and for the left outer join, the * left subexpression is considered and the left ignored. Here, aliases b * and d are eligible to be pushed up. * * TODO: further optimization opportunity for the case a.c1 = b.c1 and b.c2 * = c.c2 a and b are first joined and then the result with c. But the * second join op currently treats a and b as separate aliases and thus * disallowing predicate expr containing both tables a and b (such as a.c3 * + a.c4 > 20). Such predicates also can be pushed just above the second * join and below the first join * * @param op * Join Operator * @param rr * Row resolver * @return set of qualified aliases */ private Set<String> getQualifiedAliases(JoinOperator op, RowSchema rs) { Set<String> aliases = new HashSet<String>(); JoinCondDesc[] conds = op.getConf().getConds(); Map<Integer, Set<String>> posToAliasMap = op.getPosToAliasMap(); int i; for (i=conds.length-1; i>=0; i--){ if (conds[i].getType() == JoinDesc.INNER_JOIN) { aliases.addAll(posToAliasMap.get(i+1)); } else if (conds[i].getType() == JoinDesc.FULL_OUTER_JOIN) { break; } else if (conds[i].getType() == JoinDesc.RIGHT_OUTER_JOIN) { aliases.addAll(posToAliasMap.get(i+1)); break; } else if (conds[i].getType() == JoinDesc.LEFT_OUTER_JOIN) { continue; } } if(i == -1){ aliases.addAll(posToAliasMap.get(0)); } Set<String> aliases2 = rs.getTableNames(); aliases.retainAll(aliases2); return aliases; } } public static class ReduceSinkPPD extends DefaultPPD implements NodeProcessor { public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { super.process(nd, stack, procCtx, nodeOutputs); Operator<?> operator = (Operator<?>) nd; OpWalkerInfo owi = (OpWalkerInfo) procCtx; if (operator.getNumChild() == 1 && operator.getChildOperators().get(0) instanceof JoinOperator) { if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDRECOGNIZETRANSITIVITY)) { JoinOperator child = (JoinOperator) operator.getChildOperators().get(0); int targetPos = child.getParentOperators().indexOf(operator); applyFilterTransitivity(child, targetPos, owi); } } return null; } /** * Adds additional pushdown predicates for a join operator by replicating * filters transitively over all the equijoin conditions. * * If we have a predicate "t.col=1" and the equijoin conditions * "t.col=s.col" and "t.col=u.col", we add the filters "s.col=1" and * "u.col=1". Note that this does not depend on the types of joins (ie. * inner, left/right/full outer) between the tables s, t and u because if * a predicate, eg. "t.col=1" is present in getFinalCandidates() at this * point, we have already verified that it can be pushed down, so any rows * emitted must satisfy s.col=t.col=u.col=1 and replicating the filters * like this is ok. */ private void applyFilterTransitivity(JoinOperator join, int targetPos, OpWalkerInfo owi) throws SemanticException { ExprWalkerInfo joinPreds = owi.getPrunedPreds(join); if (joinPreds == null || !joinPreds.hasAnyCandidates()) { return; } Map<String, List<ExprNodeDesc>> oldFilters = joinPreds.getFinalCandidates(); Map<String, List<ExprNodeDesc>> newFilters = new HashMap<String, List<ExprNodeDesc>>(); List<Operator<? extends OperatorDesc>> parentOperators = join.getParentOperators(); ReduceSinkOperator target = (ReduceSinkOperator) parentOperators.get(targetPos); List<ExprNodeDesc> targetKeys = target.getConf().getKeyCols(); ExprWalkerInfo rsPreds = owi.getPrunedPreds(target); for (int sourcePos = 0; sourcePos < parentOperators.size(); sourcePos++) { ReduceSinkOperator source = (ReduceSinkOperator) parentOperators.get(sourcePos); List<ExprNodeDesc> sourceKeys = source.getConf().getKeyCols(); Set<String> sourceAliases = new HashSet<String>(Arrays.asList(source.getInputAliases())); for (Map.Entry<String, List<ExprNodeDesc>> entry : oldFilters.entrySet()) { if (entry.getKey() == null && ExprNodeDescUtils.isAllConstants(entry.getValue())) { // propagate constants for (String targetAlias : target.getInputAliases()) { rsPreds.addPushDowns(targetAlias, entry.getValue()); } continue; } if (!sourceAliases.contains(entry.getKey())) { continue; } for (ExprNodeDesc predicate : entry.getValue()) { ExprNodeDesc backtrack = ExprNodeDescUtils.backtrack(predicate, join, source); if (backtrack == null) { continue; } ExprNodeDesc replaced = ExprNodeDescUtils.replace(backtrack, sourceKeys, targetKeys); if (replaced == null) { continue; } for (String targetAlias : target.getInputAliases()) { rsPreds.addFinalCandidate(targetAlias, replaced); } } } } } } /** * Default processor which just merges its children. */ public static class DefaultPPD implements NodeProcessor { @Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; Set<String> includes = getQualifiedAliases((Operator<?>) nd, owi); boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, includes); if (hasUnpushedPredicates && HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { if (includes != null || nd instanceof ReduceSinkOperator) { owi.getCandidateFilterOps().clear(); } else { ExprWalkerInfo pruned = owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd); Map<String, List<ExprNodeDesc>> residual = pruned.getResidualPredicates(true); if (residual != null && !residual.isEmpty()) { createFilter((Operator) nd, residual, owi); pruned.getNonFinalCandidates().clear(); } } } return null; } // RS for join, SEL(*) for lateral view // SEL for union does not count (should be copied to both sides) private Set<String> getQualifiedAliases(Operator<?> operator, OpWalkerInfo owi) { if (operator.getNumChild() != 1) { return null; } Operator<?> child = operator.getChildOperators().get(0); if (!(child instanceof JoinOperator || child instanceof LateralViewJoinOperator)) { return null; } if (operator instanceof ReduceSinkOperator && ((ReduceSinkOperator)operator).getInputAliases() != null) { String[] aliases = ((ReduceSinkOperator)operator).getInputAliases(); return new HashSet<String>(Arrays.asList(aliases)); } Set<String> includes = operator.getSchema().getTableNames(); if (includes.size() == 1 && includes.contains("")) { // Reduce sink of group by operator return null; } return includes; } /** * @param nd * @param ewi */ protected void logExpr(Node nd, ExprWalkerInfo ewi) { if (!LOG.isDebugEnabled()) return; for (Entry<String, List<ExprNodeDesc>> e : ewi.getFinalCandidates().entrySet()) { StringBuilder sb = new StringBuilder("Pushdown predicates of ").append(nd.getName()) .append(" for alias ").append(e.getKey()).append(": "); boolean isFirst = true; for (ExprNodeDesc n : e.getValue()) { if (!isFirst) { sb.append("; "); } isFirst = false; sb.append(n.getExprString()); } LOG.debug(sb.toString()); } } /** * Take current operators pushdown predicates and merges them with * children's pushdown predicates. * * @param nd * current operator * @param owi * operator context during this walk * @param ewi * pushdown predicates (part of expression walker info) * @param aliases * aliases that this operator can pushdown. null means that all * aliases can be pushed down * @throws SemanticException */ protected boolean mergeWithChildrenPred(Node nd, OpWalkerInfo owi, ExprWalkerInfo ewi, Set<String> aliases) throws SemanticException { Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd; ExprWalkerInfo childPreds = getChildWalkerInfo(op, owi); if (childPreds == null) { return false; } if (ewi == null) { ewi = new ExprWalkerInfo(); } boolean hasUnpushedPredicates = false; for (Entry<String, List<ExprNodeDesc>> e : childPreds .getFinalCandidates().entrySet()) { if (aliases == null || e.getKey() == null || aliases.contains(e.getKey())) { // e.getKey() (alias) can be null in case of constant expressions. see // input8.q ExprWalkerInfo extractPushdownPreds = ExprWalkerProcFactory .extractPushdownPreds(owi, op, e.getValue()); if (!extractPushdownPreds.getNonFinalCandidates().isEmpty()) { hasUnpushedPredicates = true; } ewi.merge(extractPushdownPreds); logExpr(nd, extractPushdownPreds); } } owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi); return hasUnpushedPredicates; } protected ExprWalkerInfo mergeChildrenPred(Node nd, OpWalkerInfo owi, Set<String> excludedAliases, boolean ignoreAliases) throws SemanticException { if (nd.getChildren() == null) { return null; } Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd; ExprWalkerInfo ewi = new ExprWalkerInfo(); for (Operator<? extends OperatorDesc> child : op.getChildOperators()) { ExprWalkerInfo childPreds = owi.getPrunedPreds(child); if (childPreds == null) { continue; } for (Entry<String, List<ExprNodeDesc>> e : childPreds .getFinalCandidates().entrySet()) { if (ignoreAliases || excludedAliases == null || !excludedAliases.contains(e.getKey()) || e.getKey() == null) { ewi.addPushDowns(e.getKey(), e.getValue()); logExpr(nd, ewi); } } } return ewi; } } protected static Object createFilter(Operator op, ExprWalkerInfo pushDownPreds, OpWalkerInfo owi) { if (pushDownPreds != null && pushDownPreds.hasAnyCandidates()) { return createFilter(op, pushDownPreds.getFinalCandidates(), owi); } return null; } protected static Object createFilter(Operator op, Map<String, List<ExprNodeDesc>> predicates, OpWalkerInfo owi) { RowSchema inputRS = op.getSchema(); // combine all predicates into a single expression List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>(); Iterator<List<ExprNodeDesc>> iterator = predicates.values().iterator(); while (iterator.hasNext()) { for (ExprNodeDesc pred : iterator.next()) { preds = ExprNodeDescUtils.split(pred, preds); } } if (preds.isEmpty()) { return null; } ExprNodeDesc condn = ExprNodeDescUtils.mergePredicates(preds); if (op instanceof TableScanOperator && condn instanceof ExprNodeGenericFuncDesc) { boolean pushFilterToStorage; HiveConf hiveConf = owi.getParseContext().getConf(); pushFilterToStorage = hiveConf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_STORAGE); if (pushFilterToStorage) { condn = pushFilterToStorageHandler( (TableScanOperator) op, (ExprNodeGenericFuncDesc)condn, owi, hiveConf); if (condn == null) { // we pushed the whole thing down return null; } } } // add new filter op List<Operator<? extends OperatorDesc>> originalChilren = op .getChildOperators(); op.setChildOperators(null); Operator<FilterDesc> output = OperatorFactory.getAndMakeChild( new FilterDesc(condn, false), new RowSchema(inputRS.getSignature()), op); output.setChildOperators(originalChilren); for (Operator<? extends OperatorDesc> ch : originalChilren) { List<Operator<? extends OperatorDesc>> parentOperators = ch .getParentOperators(); int pos = parentOperators.indexOf(op); assert pos != -1; parentOperators.remove(pos); parentOperators.add(pos, output); // add the new op as the old } if (HiveConf.getBoolVar(owi.getParseContext().getConf(), HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { // remove the candidate filter ops removeCandidates(op, owi); } // push down current ppd context to newly added filter ExprWalkerInfo walkerInfo = owi.getPrunedPreds(op); if (walkerInfo != null) { walkerInfo.getNonFinalCandidates().clear(); owi.putPrunedPreds(output, walkerInfo); } return output; } /** * Attempts to push a predicate down into a storage handler. For * native tables, this is a no-op. * * @param tableScanOp table scan against which predicate applies * * @param originalPredicate predicate to be pushed down * * @param owi object walk info * * @param hiveConf Hive configuration * * @return portion of predicate which needs to be evaluated * by Hive as a post-filter, or null if it was possible * to push down the entire predicate */ private static ExprNodeGenericFuncDesc pushFilterToStorageHandler( TableScanOperator tableScanOp, ExprNodeGenericFuncDesc originalPredicate, OpWalkerInfo owi, HiveConf hiveConf) { TableScanDesc tableScanDesc = tableScanOp.getConf(); Table tbl = tableScanDesc.getTableMetadata(); if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTINDEXFILTER)) { // attach the original predicate to the table scan operator for index // optimizations that require the pushed predicate before pcr & later // optimizations are applied tableScanDesc.setFilterExpr(originalPredicate); } if (!tbl.isNonNative()) { return originalPredicate; } HiveStorageHandler storageHandler = tbl.getStorageHandler(); if (!(storageHandler instanceof HiveStoragePredicateHandler)) { // The storage handler does not provide predicate decomposition // support, so we'll implement the entire filter in Hive. However, // we still provide the full predicate to the storage handler in // case it wants to do any of its own prefiltering. tableScanDesc.setFilterExpr(originalPredicate); return originalPredicate; } HiveStoragePredicateHandler predicateHandler = (HiveStoragePredicateHandler) storageHandler; JobConf jobConf = new JobConf(owi.getParseContext().getConf()); Utilities.setColumnNameList(jobConf, tableScanOp); Utilities.setColumnTypeList(jobConf, tableScanOp); Utilities.copyTableJobPropertiesToConf( Utilities.getTableDesc(tbl), jobConf); Deserializer deserializer = tbl.getDeserializer(); HiveStoragePredicateHandler.DecomposedPredicate decomposed = predicateHandler.decomposePredicate( jobConf, deserializer, originalPredicate); if (decomposed == null) { // not able to push anything down if (LOG.isDebugEnabled()) { LOG.debug("No pushdown possible for predicate: " + originalPredicate.getExprString()); } return originalPredicate; } if (LOG.isDebugEnabled()) { LOG.debug("Original predicate: " + originalPredicate.getExprString()); if (decomposed.pushedPredicate != null) { LOG.debug( "Pushed predicate: " + decomposed.pushedPredicate.getExprString()); } if (decomposed.residualPredicate != null) { LOG.debug( "Residual predicate: " + decomposed.residualPredicate.getExprString()); } } tableScanDesc.setFilterExpr(decomposed.pushedPredicate); tableScanDesc.setFilterObject(decomposed.pushedPredicateObject); return decomposed.residualPredicate; } public static NodeProcessor getFilterProc() { return new FilterPPD(); } public static NodeProcessor getFilterSyntheticJoinPredicateProc() { return new SimpleFilterPPD(); } public static NodeProcessor getJoinProc() { return new JoinPPD(); } public static NodeProcessor getTSProc() { return new TableScanPPD(); } public static NodeProcessor getDefaultProc() { return new DefaultPPD(); } public static NodeProcessor getPTFProc() { return new PTFPPD(); } public static NodeProcessor getSCRProc() { return new ScriptPPD(); } public static NodeProcessor getLIMProc() { return new ScriptPPD(); } public static NodeProcessor getLVFProc() { return new LateralViewForwardPPD(); } public static NodeProcessor getUDTFProc() { return new UDTFPPD(); } public static NodeProcessor getLVJProc() { return new JoinerPPD(); } public static NodeProcessor getRSProc() { return new ReduceSinkPPD(); } private OpProcFactory() { // prevent instantiation } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.analysis.compound.hyphenation; import java.io.File; import java.io.Serializable; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.HashMap; import org.xml.sax.InputSource; /** * This tree structure stores the hyphenation patterns in an efficient way for * fast lookup. It provides the provides the method to hyphenate a word. * * This class has been taken from the Apache FOP project (http://xmlgraphics.apache.org/fop/). They have been slightly modified. */ public class HyphenationTree extends TernaryTree implements PatternConsumer, Serializable { private static final long serialVersionUID = -7842107987915665573L; /** * value space: stores the interletter values */ protected ByteVector vspace; /** * This map stores hyphenation exceptions */ protected HashMap<String,ArrayList<Object>> stoplist; /** * This map stores the character classes */ protected TernaryTree classmap; /** * Temporary map to store interletter values on pattern loading. */ private transient TernaryTree ivalues; public HyphenationTree() { stoplist = new HashMap<String,ArrayList<Object>>(23); // usually a small table classmap = new TernaryTree(); vspace = new ByteVector(); vspace.alloc(1); // this reserves index 0, which we don't use } /** * Packs the values by storing them in 4 bits, two values into a byte Values * range is from 0 to 9. We use zero as terminator, so we'll add 1 to the * value. * * @param values a string of digits from '0' to '9' representing the * interletter values. * @return the index into the vspace array where the packed values are stored. */ protected int packValues(String values) { int i, n = values.length(); int m = (n & 1) == 1 ? (n >> 1) + 2 : (n >> 1) + 1; int offset = vspace.alloc(m); byte[] va = vspace.getArray(); for (i = 0; i < n; i++) { int j = i >> 1; byte v = (byte) ((values.charAt(i) - '0' + 1) & 0x0f); if ((i & 1) == 1) { va[j + offset] = (byte) (va[j + offset] | v); } else { va[j + offset] = (byte) (v << 4); // big endian } } va[m - 1 + offset] = 0; // terminator return offset; } protected String unpackValues(int k) { StringBuilder buf = new StringBuilder(); byte v = vspace.get(k++); while (v != 0) { char c = (char) ((v >>> 4) - 1 + '0'); buf.append(c); c = (char) (v & 0x0f); if (c == 0) { break; } c = (char) (c - 1 + '0'); buf.append(c); v = vspace.get(k++); } return buf.toString(); } /** * Read hyphenation patterns from an XML file. * * @param f the filename * @throws HyphenationException In case the parsing fails */ public void loadPatterns(File f) throws HyphenationException { try { InputSource src = new InputSource(f.toURL().toExternalForm()); loadPatterns(src); } catch (MalformedURLException e) { throw new HyphenationException("Error converting the File '" + f + "' to a URL: " + e.getMessage()); } } /** * Read hyphenation patterns from an XML file. * * @param source the InputSource for the file * @throws HyphenationException In case the parsing fails */ public void loadPatterns(InputSource source) throws HyphenationException { PatternParser pp = new PatternParser(this); ivalues = new TernaryTree(); pp.parse(source); // patterns/values should be now in the tree // let's optimize a bit trimToSize(); vspace.trimToSize(); classmap.trimToSize(); // get rid of the auxiliary map ivalues = null; } public String findPattern(String pat) { int k = super.find(pat); if (k >= 0) { return unpackValues(k); } return ""; } /** * String compare, returns 0 if equal or t is a substring of s */ protected int hstrcmp(char[] s, int si, char[] t, int ti) { for (; s[si] == t[ti]; si++, ti++) { if (s[si] == 0) { return 0; } } if (t[ti] == 0) { return 0; } return s[si] - t[ti]; } protected byte[] getValues(int k) { StringBuilder buf = new StringBuilder(); byte v = vspace.get(k++); while (v != 0) { char c = (char) ((v >>> 4) - 1); buf.append(c); c = (char) (v & 0x0f); if (c == 0) { break; } c = (char) (c - 1); buf.append(c); v = vspace.get(k++); } byte[] res = new byte[buf.length()]; for (int i = 0; i < res.length; i++) { res[i] = (byte) buf.charAt(i); } return res; } /** * <p> * Search for all possible partial matches of word starting at index an update * interletter values. In other words, it does something like: * </p> * <code> * for(i=0; i<patterns.length; i++) { * if ( word.substring(index).startsWidth(patterns[i]) ) * update_interletter_values(patterns[i]); * } * </code> * <p> * But it is done in an efficient way since the patterns are stored in a * ternary tree. In fact, this is the whole purpose of having the tree: doing * this search without having to test every single pattern. The number of * patterns for languages such as English range from 4000 to 10000. Thus, * doing thousands of string comparisons for each word to hyphenate would be * really slow without the tree. The tradeoff is memory, but using a ternary * tree instead of a trie, almost halves the the memory used by Lout or TeX. * It's also faster than using a hash table * </p> * * @param word null terminated word to match * @param index start index from word * @param il interletter values array to update */ protected void searchPatterns(char[] word, int index, byte[] il) { byte[] values; int i = index; char p, q; char sp = word[i]; p = root; while (p > 0 && p < sc.length) { if (sc[p] == 0xFFFF) { if (hstrcmp(word, i, kv.getArray(), lo[p]) == 0) { values = getValues(eq[p]); // data pointer is in eq[] int j = index; for (int k = 0; k < values.length; k++) { if (j < il.length && values[k] > il[j]) { il[j] = values[k]; } j++; } } return; } int d = sp - sc[p]; if (d == 0) { if (sp == 0) { break; } sp = word[++i]; p = eq[p]; q = p; // look for a pattern ending at this position by searching for // the null char ( splitchar == 0 ) while (q > 0 && q < sc.length) { if (sc[q] == 0xFFFF) { // stop at compressed branch break; } if (sc[q] == 0) { values = getValues(eq[q]); int j = index; for (int k = 0; k < values.length; k++) { if (j < il.length && values[k] > il[j]) { il[j] = values[k]; } j++; } break; } else { q = lo[q]; /** * actually the code should be: q = sc[q] < 0 ? hi[q] : lo[q]; but * java chars are unsigned */ } } } else { p = d < 0 ? lo[p] : hi[p]; } } } /** * Hyphenate word and return a Hyphenation object. * * @param word the word to be hyphenated * @param remainCharCount Minimum number of characters allowed before the * hyphenation point. * @param pushCharCount Minimum number of characters allowed after the * hyphenation point. * @return a {@link Hyphenation Hyphenation} object representing the * hyphenated word or null if word is not hyphenated. */ public Hyphenation hyphenate(String word, int remainCharCount, int pushCharCount) { char[] w = word.toCharArray(); return hyphenate(w, 0, w.length, remainCharCount, pushCharCount); } /** * w = "****nnllllllnnn*****", where n is a non-letter, l is a letter, all n * may be absent, the first n is at offset, the first l is at offset + * iIgnoreAtBeginning; word = ".llllll.'\0'***", where all l in w are copied * into word. In the first part of the routine len = w.length, in the second * part of the routine len = word.length. Three indices are used: index(w), * the index in w, index(word), the index in word, letterindex(word), the * index in the letter part of word. The following relations exist: index(w) = * offset + i - 1 index(word) = i - iIgnoreAtBeginning letterindex(word) = * index(word) - 1 (see first loop). It follows that: index(w) - index(word) = * offset - 1 + iIgnoreAtBeginning index(w) = letterindex(word) + offset + * iIgnoreAtBeginning */ /** * Hyphenate word and return an array of hyphenation points. * * @param w char array that contains the word * @param offset Offset to first character in word * @param len Length of word * @param remainCharCount Minimum number of characters allowed before the * hyphenation point. * @param pushCharCount Minimum number of characters allowed after the * hyphenation point. * @return a {@link Hyphenation Hyphenation} object representing the * hyphenated word or null if word is not hyphenated. */ public Hyphenation hyphenate(char[] w, int offset, int len, int remainCharCount, int pushCharCount) { int i; char[] word = new char[len + 3]; // normalize word char[] c = new char[2]; int iIgnoreAtBeginning = 0; int iLength = len; boolean bEndOfLetters = false; for (i = 1; i <= len; i++) { c[0] = w[offset + i - 1]; int nc = classmap.find(c, 0); if (nc < 0) { // found a non-letter character ... if (i == (1 + iIgnoreAtBeginning)) { // ... before any letter character iIgnoreAtBeginning++; } else { // ... after a letter character bEndOfLetters = true; } iLength--; } else { if (!bEndOfLetters) { word[i - iIgnoreAtBeginning] = (char) nc; } else { return null; } } } len = iLength; if (len < (remainCharCount + pushCharCount)) { // word is too short to be hyphenated return null; } int[] result = new int[len + 1]; int k = 0; // check exception list first String sw = new String(word, 1, len); if (stoplist.containsKey(sw)) { // assume only simple hyphens (Hyphen.pre="-", Hyphen.post = Hyphen.no = // null) ArrayList<Object> hw = stoplist.get(sw); int j = 0; for (i = 0; i < hw.size(); i++) { Object o = hw.get(i); // j = index(sw) = letterindex(word)? // result[k] = corresponding index(w) if (o instanceof String) { j += ((String) o).length(); if (j >= remainCharCount && j < (len - pushCharCount)) { result[k++] = j + iIgnoreAtBeginning; } } } } else { // use algorithm to get hyphenation points word[0] = '.'; // word start marker word[len + 1] = '.'; // word end marker word[len + 2] = 0; // null terminated byte[] il = new byte[len + 3]; // initialized to zero for (i = 0; i < len + 1; i++) { searchPatterns(word, i, il); } // hyphenation points are located where interletter value is odd // i is letterindex(word), // i + 1 is index(word), // result[k] = corresponding index(w) for (i = 0; i < len; i++) { if (((il[i + 1] & 1) == 1) && i >= remainCharCount && i <= (len - pushCharCount)) { result[k++] = i + iIgnoreAtBeginning; } } } if (k > 0) { // trim result array int[] res = new int[k+2]; System.arraycopy(result, 0, res, 1, k); // We add the synthetical hyphenation points // at the beginning and end of the word res[0]=0; res[k+1]=len; return new Hyphenation(res); } else { return null; } } /** * Add a character class to the tree. It is used by * {@link PatternParser PatternParser} as callback to add character classes. * Character classes define the valid word characters for hyphenation. If a * word contains a character not defined in any of the classes, it is not * hyphenated. It also defines a way to normalize the characters in order to * compare them with the stored patterns. Usually pattern files use only lower * case characters, in this case a class for letter 'a', for example, should * be defined as "aA", the first character being the normalization char. */ public void addClass(String chargroup) { if (chargroup.length() > 0) { char equivChar = chargroup.charAt(0); char[] key = new char[2]; key[1] = 0; for (int i = 0; i < chargroup.length(); i++) { key[0] = chargroup.charAt(i); classmap.insert(key, 0, equivChar); } } } /** * Add an exception to the tree. It is used by * {@link PatternParser PatternParser} class as callback to store the * hyphenation exceptions. * * @param word normalized word * @param hyphenatedword a vector of alternating strings and * {@link Hyphen hyphen} objects. */ public void addException(String word, ArrayList<Object> hyphenatedword) { stoplist.put(word, hyphenatedword); } /** * Add a pattern to the tree. Mainly, to be used by * {@link PatternParser PatternParser} class as callback to add a pattern to * the tree. * * @param pattern the hyphenation pattern * @param ivalue interletter weight values indicating the desirability and * priority of hyphenating at a given point within the pattern. It * should contain only digit characters. (i.e. '0' to '9'). */ public void addPattern(String pattern, String ivalue) { int k = ivalues.find(ivalue); if (k <= 0) { k = packValues(ivalue); ivalues.insert(ivalue, (char) k); } insert(pattern, (char) k); } @Override public void printStats() { System.out.println("Value space size = " + Integer.toString(vspace.length())); super.printStats(); } }
/** * <copyright> * </copyright> * * $Id$ */ package org.oasis.xAL.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; import org.eclipse.emf.ecore.util.BasicFeatureMap; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.FeatureMap; import org.eclipse.emf.ecore.util.InternalEList; import org.oasis.xAL.AddressLineType; import org.oasis.xAL.PostTownType; import org.oasis.xAL.PostalCodeNumberExtensionType; import org.oasis.xAL.PostalCodeNumberType; import org.oasis.xAL.PostalCodeType; import org.oasis.xAL.XALPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Postal Code Type</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getAddressLine <em>Address Line</em>}</li> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getPostalCodeNumber <em>Postal Code Number</em>}</li> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getPostalCodeNumberExtension <em>Postal Code Number Extension</em>}</li> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getPostTown <em>Post Town</em>}</li> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getAny <em>Any</em>}</li> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getType <em>Type</em>}</li> * <li>{@link org.oasis.xAL.impl.PostalCodeTypeImpl#getAnyAttribute <em>Any Attribute</em>}</li> * </ul> * </p> * * @generated */ public class PostalCodeTypeImpl extends EObjectImpl implements PostalCodeType { /** * The cached value of the '{@link #getAddressLine() <em>Address Line</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAddressLine() * @generated * @ordered */ protected EList<AddressLineType> addressLine; /** * The cached value of the '{@link #getPostalCodeNumber() <em>Postal Code Number</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPostalCodeNumber() * @generated * @ordered */ protected EList<PostalCodeNumberType> postalCodeNumber; /** * The cached value of the '{@link #getPostalCodeNumberExtension() <em>Postal Code Number Extension</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPostalCodeNumberExtension() * @generated * @ordered */ protected EList<PostalCodeNumberExtensionType> postalCodeNumberExtension; /** * The cached value of the '{@link #getPostTown() <em>Post Town</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPostTown() * @generated * @ordered */ protected PostTownType postTown; /** * The cached value of the '{@link #getAny() <em>Any</em>}' attribute list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAny() * @generated * @ordered */ protected FeatureMap any; /** * The default value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected static final Object TYPE_EDEFAULT = null; /** * The cached value of the '{@link #getType() <em>Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected Object type = TYPE_EDEFAULT; /** * The cached value of the '{@link #getAnyAttribute() <em>Any Attribute</em>}' attribute list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAnyAttribute() * @generated * @ordered */ protected FeatureMap anyAttribute; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PostalCodeTypeImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return XALPackage.eINSTANCE.getPostalCodeType(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<AddressLineType> getAddressLine() { if (addressLine == null) { addressLine = new EObjectContainmentEList<AddressLineType>(AddressLineType.class, this, XALPackage.POSTAL_CODE_TYPE__ADDRESS_LINE); } return addressLine; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<PostalCodeNumberType> getPostalCodeNumber() { if (postalCodeNumber == null) { postalCodeNumber = new EObjectContainmentEList<PostalCodeNumberType>(PostalCodeNumberType.class, this, XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER); } return postalCodeNumber; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<PostalCodeNumberExtensionType> getPostalCodeNumberExtension() { if (postalCodeNumberExtension == null) { postalCodeNumberExtension = new EObjectContainmentEList<PostalCodeNumberExtensionType>(PostalCodeNumberExtensionType.class, this, XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER_EXTENSION); } return postalCodeNumberExtension; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PostTownType getPostTown() { return postTown; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPostTown(PostTownType newPostTown, NotificationChain msgs) { PostTownType oldPostTown = postTown; postTown = newPostTown; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, XALPackage.POSTAL_CODE_TYPE__POST_TOWN, oldPostTown, newPostTown); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPostTown(PostTownType newPostTown) { if (newPostTown != postTown) { NotificationChain msgs = null; if (postTown != null) msgs = ((InternalEObject)postTown).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - XALPackage.POSTAL_CODE_TYPE__POST_TOWN, null, msgs); if (newPostTown != null) msgs = ((InternalEObject)newPostTown).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - XALPackage.POSTAL_CODE_TYPE__POST_TOWN, null, msgs); msgs = basicSetPostTown(newPostTown, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, XALPackage.POSTAL_CODE_TYPE__POST_TOWN, newPostTown, newPostTown)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FeatureMap getAny() { if (any == null) { any = new BasicFeatureMap(this, XALPackage.POSTAL_CODE_TYPE__ANY); } return any; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Object getType() { return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setType(Object newType) { Object oldType = type; type = newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, XALPackage.POSTAL_CODE_TYPE__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FeatureMap getAnyAttribute() { if (anyAttribute == null) { anyAttribute = new BasicFeatureMap(this, XALPackage.POSTAL_CODE_TYPE__ANY_ATTRIBUTE); } return anyAttribute; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case XALPackage.POSTAL_CODE_TYPE__ADDRESS_LINE: return ((InternalEList<?>)getAddressLine()).basicRemove(otherEnd, msgs); case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER: return ((InternalEList<?>)getPostalCodeNumber()).basicRemove(otherEnd, msgs); case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER_EXTENSION: return ((InternalEList<?>)getPostalCodeNumberExtension()).basicRemove(otherEnd, msgs); case XALPackage.POSTAL_CODE_TYPE__POST_TOWN: return basicSetPostTown(null, msgs); case XALPackage.POSTAL_CODE_TYPE__ANY: return ((InternalEList<?>)getAny()).basicRemove(otherEnd, msgs); case XALPackage.POSTAL_CODE_TYPE__ANY_ATTRIBUTE: return ((InternalEList<?>)getAnyAttribute()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case XALPackage.POSTAL_CODE_TYPE__ADDRESS_LINE: return getAddressLine(); case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER: return getPostalCodeNumber(); case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER_EXTENSION: return getPostalCodeNumberExtension(); case XALPackage.POSTAL_CODE_TYPE__POST_TOWN: return getPostTown(); case XALPackage.POSTAL_CODE_TYPE__ANY: if (coreType) return getAny(); return ((FeatureMap.Internal)getAny()).getWrapper(); case XALPackage.POSTAL_CODE_TYPE__TYPE: return getType(); case XALPackage.POSTAL_CODE_TYPE__ANY_ATTRIBUTE: if (coreType) return getAnyAttribute(); return ((FeatureMap.Internal)getAnyAttribute()).getWrapper(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case XALPackage.POSTAL_CODE_TYPE__ADDRESS_LINE: getAddressLine().clear(); getAddressLine().addAll((Collection<? extends AddressLineType>)newValue); return; case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER: getPostalCodeNumber().clear(); getPostalCodeNumber().addAll((Collection<? extends PostalCodeNumberType>)newValue); return; case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER_EXTENSION: getPostalCodeNumberExtension().clear(); getPostalCodeNumberExtension().addAll((Collection<? extends PostalCodeNumberExtensionType>)newValue); return; case XALPackage.POSTAL_CODE_TYPE__POST_TOWN: setPostTown((PostTownType)newValue); return; case XALPackage.POSTAL_CODE_TYPE__ANY: ((FeatureMap.Internal)getAny()).set(newValue); return; case XALPackage.POSTAL_CODE_TYPE__TYPE: setType(newValue); return; case XALPackage.POSTAL_CODE_TYPE__ANY_ATTRIBUTE: ((FeatureMap.Internal)getAnyAttribute()).set(newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case XALPackage.POSTAL_CODE_TYPE__ADDRESS_LINE: getAddressLine().clear(); return; case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER: getPostalCodeNumber().clear(); return; case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER_EXTENSION: getPostalCodeNumberExtension().clear(); return; case XALPackage.POSTAL_CODE_TYPE__POST_TOWN: setPostTown((PostTownType)null); return; case XALPackage.POSTAL_CODE_TYPE__ANY: getAny().clear(); return; case XALPackage.POSTAL_CODE_TYPE__TYPE: setType(TYPE_EDEFAULT); return; case XALPackage.POSTAL_CODE_TYPE__ANY_ATTRIBUTE: getAnyAttribute().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case XALPackage.POSTAL_CODE_TYPE__ADDRESS_LINE: return addressLine != null && !addressLine.isEmpty(); case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER: return postalCodeNumber != null && !postalCodeNumber.isEmpty(); case XALPackage.POSTAL_CODE_TYPE__POSTAL_CODE_NUMBER_EXTENSION: return postalCodeNumberExtension != null && !postalCodeNumberExtension.isEmpty(); case XALPackage.POSTAL_CODE_TYPE__POST_TOWN: return postTown != null; case XALPackage.POSTAL_CODE_TYPE__ANY: return any != null && !any.isEmpty(); case XALPackage.POSTAL_CODE_TYPE__TYPE: return TYPE_EDEFAULT == null ? type != null : !TYPE_EDEFAULT.equals(type); case XALPackage.POSTAL_CODE_TYPE__ANY_ATTRIBUTE: return anyAttribute != null && !anyAttribute.isEmpty(); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (any: "); result.append(any); result.append(", type: "); result.append(type); result.append(", anyAttribute: "); result.append(anyAttribute); result.append(')'); return result.toString(); } } //PostalCodeTypeImpl
/* * Copyright 2002-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.util.xml; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.xml.namespace.NamespaceContext; import javax.xml.namespace.QName; import javax.xml.stream.XMLEventFactory; import javax.xml.stream.XMLEventWriter; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import javax.xml.stream.events.EndElement; import javax.xml.stream.events.Namespace; import javax.xml.stream.events.StartElement; /** * Implementation of the {@link javax.xml.stream.XMLStreamWriter} interface * that wraps an {@link XMLEventWriter}. * * @author Arjen Poutsma * @since 3.0.5 * @see StaxUtils#createEventStreamWriter(javax.xml.stream.XMLEventWriter, javax.xml.stream.XMLEventFactory) */ class XMLEventStreamWriter implements XMLStreamWriter { private static final String DEFAULT_ENCODING = "UTF-8"; private final XMLEventWriter eventWriter; private final XMLEventFactory eventFactory; private final List<EndElement> endElements = new ArrayList<>(); private boolean emptyElement = false; public XMLEventStreamWriter(XMLEventWriter eventWriter, XMLEventFactory eventFactory) { this.eventWriter = eventWriter; this.eventFactory = eventFactory; } @Override public void setNamespaceContext(NamespaceContext context) throws XMLStreamException { this.eventWriter.setNamespaceContext(context); } @Override public NamespaceContext getNamespaceContext() { return this.eventWriter.getNamespaceContext(); } @Override public void setPrefix(String prefix, String uri) throws XMLStreamException { this.eventWriter.setPrefix(prefix, uri); } @Override public String getPrefix(String uri) throws XMLStreamException { return this.eventWriter.getPrefix(uri); } @Override public void setDefaultNamespace(String uri) throws XMLStreamException { this.eventWriter.setDefaultNamespace(uri); } @Override public Object getProperty(String name) throws IllegalArgumentException { throw new IllegalArgumentException(); } @Override public void writeStartDocument() throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createStartDocument()); } @Override public void writeStartDocument(String version) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createStartDocument(DEFAULT_ENCODING, version)); } @Override public void writeStartDocument(String encoding, String version) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createStartDocument(encoding, version)); } @Override public void writeStartElement(String localName) throws XMLStreamException { closeEmptyElementIfNecessary(); doWriteStartElement(this.eventFactory.createStartElement(new QName(localName), null, null)); } @Override public void writeStartElement(String namespaceURI, String localName) throws XMLStreamException { closeEmptyElementIfNecessary(); doWriteStartElement(this.eventFactory.createStartElement(new QName(namespaceURI, localName), null, null)); } @Override public void writeStartElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { closeEmptyElementIfNecessary(); doWriteStartElement(this.eventFactory.createStartElement(new QName(namespaceURI, localName, prefix), null, null)); } private void doWriteStartElement(StartElement startElement) throws XMLStreamException { this.eventWriter.add(startElement); this.endElements.add(this.eventFactory.createEndElement(startElement.getName(), startElement.getNamespaces())); } @Override public void writeEmptyElement(String localName) throws XMLStreamException { closeEmptyElementIfNecessary(); writeStartElement(localName); this.emptyElement = true; } @Override public void writeEmptyElement(String namespaceURI, String localName) throws XMLStreamException { closeEmptyElementIfNecessary(); writeStartElement(namespaceURI, localName); this.emptyElement = true; } @Override public void writeEmptyElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { closeEmptyElementIfNecessary(); writeStartElement(prefix, localName, namespaceURI); this.emptyElement = true; } private void closeEmptyElementIfNecessary() throws XMLStreamException { if (this.emptyElement) { this.emptyElement = false; writeEndElement(); } } @Override public void writeEndElement() throws XMLStreamException { closeEmptyElementIfNecessary(); int last = this.endElements.size() - 1; EndElement lastEndElement = this.endElements.remove(last); this.eventWriter.add(lastEndElement); } @Override public void writeAttribute(String localName, String value) throws XMLStreamException { this.eventWriter.add(this.eventFactory.createAttribute(localName, value)); } @Override public void writeAttribute(String namespaceURI, String localName, String value) throws XMLStreamException { this.eventWriter.add(this.eventFactory.createAttribute(new QName(namespaceURI, localName), value)); } @Override public void writeAttribute(String prefix, String namespaceURI, String localName, String value) throws XMLStreamException { this.eventWriter.add(this.eventFactory.createAttribute(prefix, namespaceURI, localName, value)); } @Override public void writeNamespace(String prefix, String namespaceURI) throws XMLStreamException { doWriteNamespace(this.eventFactory.createNamespace(prefix, namespaceURI)); } @Override public void writeDefaultNamespace(String namespaceURI) throws XMLStreamException { doWriteNamespace(this.eventFactory.createNamespace(namespaceURI)); } @SuppressWarnings("rawtypes") private void doWriteNamespace(Namespace namespace) throws XMLStreamException { int last = this.endElements.size() - 1; EndElement oldEndElement = this.endElements.get(last); Iterator oldNamespaces = oldEndElement.getNamespaces(); List<Namespace> newNamespaces = new ArrayList<>(); while (oldNamespaces.hasNext()) { Namespace oldNamespace = (Namespace) oldNamespaces.next(); newNamespaces.add(oldNamespace); } newNamespaces.add(namespace); EndElement newEndElement = this.eventFactory.createEndElement(oldEndElement.getName(), newNamespaces.iterator()); this.eventWriter.add(namespace); this.endElements.set(last, newEndElement); } @Override public void writeCharacters(String text) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createCharacters(text)); } @Override public void writeCharacters(char[] text, int start, int len) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createCharacters(new String(text, start, len))); } @Override public void writeCData(String data) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createCData(data)); } @Override public void writeComment(String data) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createComment(data)); } @Override public void writeProcessingInstruction(String target) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createProcessingInstruction(target, "")); } @Override public void writeProcessingInstruction(String target, String data) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createProcessingInstruction(target, data)); } @Override public void writeDTD(String dtd) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createDTD(dtd)); } @Override public void writeEntityRef(String name) throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createEntityReference(name, null)); } @Override public void writeEndDocument() throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.add(this.eventFactory.createEndDocument()); } @Override public void flush() throws XMLStreamException { this.eventWriter.flush(); } @Override public void close() throws XMLStreamException { closeEmptyElementIfNecessary(); this.eventWriter.close(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3.statements; import java.util.*; import java.util.stream.Collectors; import com.google.common.collect.Iterables; import org.apache.cassandra.auth.Permission; import org.apache.cassandra.config.*; import org.apache.cassandra.cql3.*; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.CollectionType; import org.apache.cassandra.db.marshal.CounterColumnType; import org.apache.cassandra.db.marshal.ReversedType; import org.apache.cassandra.db.view.View; import org.apache.cassandra.exceptions.*; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.schema.Indexes; import org.apache.cassandra.schema.TableParams; import org.apache.cassandra.service.ClientState; import org.apache.cassandra.service.MigrationManager; import org.apache.cassandra.transport.Event; import org.apache.cassandra.utils.*; public class AlterTableStatement extends SchemaAlteringStatement { public enum Type { ADD, ALTER, DROP, OPTS, RENAME } public final Type oType; private final TableAttributes attrs; private final Map<ColumnDefinition.Raw, ColumnDefinition.Raw> renames; private final List<AlterTableStatementColumn> colNameList; private final Long deleteTimestamp; public AlterTableStatement(CFName name, Type type, List<AlterTableStatementColumn> colDataList, TableAttributes attrs, Map<ColumnDefinition.Raw, ColumnDefinition.Raw> renames, Long deleteTimestamp) { super(name); this.oType = type; this.colNameList = colDataList; this.attrs = attrs; this.renames = renames; this.deleteTimestamp = deleteTimestamp == null ? FBUtilities.timestampMicros() : deleteTimestamp; } public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException { state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER); } public void validate(ClientState state) { // validated in announceMigration() } public Event.SchemaChange announceMigration(boolean isLocalOnly) throws RequestValidationException { CFMetaData meta = Validation.validateColumnFamily(keyspace(), columnFamily()); if (meta.isView()) throw new InvalidRequestException("Cannot use ALTER TABLE on Materialized View"); CFMetaData cfm = meta.copy(); ColumnIdentifier columnName = null; ColumnDefinition def = null; CQL3Type.Raw dataType = null; boolean isStatic = false; CQL3Type validator = null; List<ViewDefinition> viewUpdates = null; Iterable<ViewDefinition> views = View.findAll(keyspace(), columnFamily()); switch (oType) { case ADD: if (cfm.isDense()) throw new InvalidRequestException("Cannot add new column to a COMPACT STORAGE table"); for (AlterTableStatementColumn colData : colNameList) { columnName = colData.getColumnName().getIdentifier(cfm); def = cfm.getColumnDefinition(columnName); dataType = colData.getColumnType(); assert dataType != null; isStatic = colData.getStaticType(); validator = dataType.prepare(keyspace()); if (isStatic) { if (!cfm.isCompound()) throw new InvalidRequestException("Static columns are not allowed in COMPACT STORAGE tables"); if (cfm.clusteringColumns().isEmpty()) throw new InvalidRequestException("Static columns are only useful (and thus allowed) if the table has at least one clustering column"); } if (def != null) { switch (def.kind) { case PARTITION_KEY: case CLUSTERING: throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName)); default: throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName)); } } // Cannot re-add a dropped counter column. See #7831. if (meta.isCounter() && meta.getDroppedColumns().containsKey(columnName.bytes)) throw new InvalidRequestException(String.format("Cannot re-add previously dropped counter column %s", columnName)); AbstractType<?> type = validator.getType(); if (type.isCollection() && type.isMultiCell()) { if (!cfm.isCompound()) throw new InvalidRequestException("Cannot use non-frozen collections in COMPACT STORAGE tables"); if (cfm.isSuper()) throw new InvalidRequestException("Cannot use non-frozen collections with super column families"); // If there used to be a non-frozen collection column with the same name (that has been dropped), // we could still have some data using the old type, and so we can't allow adding a collection // with the same name unless the types are compatible (see #6276). CFMetaData.DroppedColumn dropped = cfm.getDroppedColumns().get(columnName.bytes); if (dropped != null && dropped.type instanceof CollectionType && dropped.type.isMultiCell() && !type.isCompatibleWith(dropped.type)) { String message = String.format("Cannot add a collection with the name %s because a collection with the same name" + " and a different type (%s) has already been used in the past", columnName, dropped.type.asCQL3Type()); throw new InvalidRequestException(message); } } cfm.addColumnDefinition(isStatic ? ColumnDefinition.staticDef(cfm, columnName.bytes, type) : ColumnDefinition.regularDef(cfm, columnName.bytes, type)); // Adding a column to a table which has an include all view requires the column to be added to the view // as well if (!isStatic) { for (ViewDefinition view : views) { if (view.includeAllColumns) { ViewDefinition viewCopy = view.copy(); viewCopy.metadata.addColumnDefinition(ColumnDefinition.regularDef(viewCopy.metadata, columnName.bytes, type)); if (viewUpdates == null) viewUpdates = new ArrayList<>(); viewUpdates.add(viewCopy); } } } } break; case ALTER: columnName = colNameList.get(0).getColumnName().getIdentifier(cfm); def = cfm.getColumnDefinition(columnName); dataType = colNameList.get(0).getColumnType(); assert dataType != null; validator = dataType.prepare(keyspace()); if (def == null) throw new InvalidRequestException(String.format("Column %s was not found in table %s", columnName, columnFamily())); AbstractType<?> validatorType = def.isReversedType() && !validator.getType().isReversed() ? ReversedType.getInstance(validator.getType()) : validator.getType(); validateAlter(cfm, def, validatorType); // In any case, we update the column definition cfm.addOrReplaceColumnDefinition(def.withNewType(validatorType)); // We also have to validate the view types here. If we have a view which includes a column as part of // the clustering key, we need to make sure that it is indeed compatible. for (ViewDefinition view : views) { if (!view.includes(columnName)) continue; ViewDefinition viewCopy = view.copy(); ColumnDefinition viewDef = view.metadata.getColumnDefinition(columnName); AbstractType viewType = viewDef.isReversedType() && !validator.getType().isReversed() ? ReversedType.getInstance(validator.getType()) : validator.getType(); validateAlter(view.metadata, viewDef, viewType); viewCopy.metadata.addOrReplaceColumnDefinition(viewDef.withNewType(viewType)); if (viewUpdates == null) viewUpdates = new ArrayList<>(); viewUpdates.add(viewCopy); } break; case DROP: if (!cfm.isCQLTable()) throw new InvalidRequestException("Cannot drop columns from a non-CQL3 table"); for (AlterTableStatementColumn colData : colNameList) { columnName = colData.getColumnName().getIdentifier(cfm); def = cfm.getColumnDefinition(columnName); if (def == null) throw new InvalidRequestException(String.format("Column %s was not found in table %s", columnName, columnFamily())); switch (def.kind) { case PARTITION_KEY: case CLUSTERING: throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName)); case REGULAR: case STATIC: ColumnDefinition toDelete = null; for (ColumnDefinition columnDef : cfm.partitionColumns()) { if (columnDef.name.equals(columnName)) { toDelete = columnDef; break; } } assert toDelete != null; cfm.removeColumnDefinition(toDelete); cfm.recordColumnDrop(toDelete, deleteTimestamp); break; } // If the dropped column is required by any secondary indexes // we reject the operation, as the indexes must be dropped first Indexes allIndexes = cfm.getIndexes(); if (!allIndexes.isEmpty()) { ColumnFamilyStore store = Keyspace.openAndGetStore(cfm); Set<IndexMetadata> dependentIndexes = store.indexManager.getDependentIndexes(def); if (!dependentIndexes.isEmpty()) throw new InvalidRequestException(String.format("Cannot drop column %s because it has " + "dependent secondary indexes (%s)", def, dependentIndexes.stream() .map(i -> i.name) .collect(Collectors.joining(",")))); } // If a column is dropped which is included in a view, we don't allow the drop to take place. boolean rejectAlter = false; StringBuilder builder = new StringBuilder(); for (ViewDefinition view : views) { if (!view.includes(columnName)) continue; if (rejectAlter) builder.append(','); rejectAlter = true; builder.append(view.viewName); } if (rejectAlter) throw new InvalidRequestException(String.format("Cannot drop column %s, depended on by materialized views (%s.{%s})", columnName.toString(), keyspace(), builder.toString())); } break; case OPTS: if (attrs == null) throw new InvalidRequestException("ALTER TABLE WITH invoked, but no parameters found"); attrs.validate(); TableParams params = attrs.asAlteredTableParams(cfm.params); if (!Iterables.isEmpty(views) && params.gcGraceSeconds == 0) { throw new InvalidRequestException("Cannot alter gc_grace_seconds of the base table of a " + "materialized view to 0, since this value is used to TTL " + "undelivered updates. Setting gc_grace_seconds too low might " + "cause undelivered updates to expire " + "before being replayed."); } if (meta.isCounter() && params.defaultTimeToLive > 0) throw new InvalidRequestException("Cannot set default_time_to_live on a table with counters"); cfm.params(params); break; case RENAME: for (Map.Entry<ColumnDefinition.Raw, ColumnDefinition.Raw> entry : renames.entrySet()) { ColumnIdentifier from = entry.getKey().getIdentifier(cfm); ColumnIdentifier to = entry.getValue().getIdentifier(cfm); cfm.renameColumn(from, to); // If the view includes a renamed column, it must be renamed in the view table and the definition. for (ViewDefinition view : views) { if (!view.includes(from)) continue; ViewDefinition viewCopy = view.copy(); ColumnIdentifier viewFrom = entry.getKey().getIdentifier(viewCopy.metadata); ColumnIdentifier viewTo = entry.getValue().getIdentifier(viewCopy.metadata); viewCopy.renameColumn(viewFrom, viewTo); if (viewUpdates == null) viewUpdates = new ArrayList<>(); viewUpdates.add(viewCopy); } } break; } MigrationManager.announceColumnFamilyUpdate(cfm, isLocalOnly); if (viewUpdates != null) { for (ViewDefinition viewUpdate : viewUpdates) MigrationManager.announceViewUpdate(viewUpdate, isLocalOnly); } return new Event.SchemaChange(Event.SchemaChange.Change.UPDATED, Event.SchemaChange.Target.TABLE, keyspace(), columnFamily()); } private static void validateAlter(CFMetaData cfm, ColumnDefinition def, AbstractType<?> validatorType) { switch (def.kind) { case PARTITION_KEY: if (validatorType instanceof CounterColumnType) throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", def.name)); AbstractType<?> currentType = cfm.getKeyValidatorAsClusteringComparator().subtype(def.position()); if (!validatorType.isValueCompatibleWith(currentType)) throw new ConfigurationException(String.format("Cannot change %s from type %s to type %s: types are incompatible.", def.name, currentType.asCQL3Type(), validatorType.asCQL3Type())); break; case CLUSTERING: if (!cfm.isCQLTable()) throw new InvalidRequestException(String.format("Cannot alter clustering column %s in a non-CQL3 table", def.name)); AbstractType<?> oldType = cfm.comparator.subtype(def.position()); // Note that CFMetaData.validateCompatibility already validate the change we're about to do. However, the error message it // sends is a bit cryptic for a CQL3 user, so validating here for a sake of returning a better error message // Do note that we need isCompatibleWith here, not just isValueCompatibleWith. if (!validatorType.isCompatibleWith(oldType)) { throw new ConfigurationException(String.format("Cannot change %s from type %s to type %s: types are not order-compatible.", def.name, oldType.asCQL3Type(), validatorType.asCQL3Type())); } break; case REGULAR: case STATIC: // As above, we want a clear error message, but in this case it happens that CFMetaData.validateCompatibility *does not* // validate this for historical reasons so it's doubtly important. Note that we only care about value compatibility // though since we won't compare values (except when there is an index, but that is validated by ColumnDefinition already). // TODO: we could clear out where validation is done and do it only once. if (!validatorType.isValueCompatibleWith(def.type)) throw new ConfigurationException(String.format("Cannot change %s from type %s to type %s: types are incompatible.", def.name, def.type.asCQL3Type(), validatorType.asCQL3Type())); break; } } @Override public String toString() { return String.format("AlterTableStatement(name=%s, type=%s)", cfName, oType); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws2.cw; import java.time.Instant; import org.apache.camel.RuntimeCamelException; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import software.amazon.awssdk.core.Protocol; import software.amazon.awssdk.services.cloudwatch.CloudWatchClient; @UriParams public class Cw2Configuration implements Cloneable { @UriPath @Metadata(required = true) private String namespace; @UriParam @Metadata(autowired = true) private CloudWatchClient amazonCwClient; @UriParam(label = "security", secret = true) private String accessKey; @UriParam(label = "security", secret = true) private String secretKey; @UriParam private String name; @UriParam private Double value; @UriParam private String unit; @UriParam private Instant timestamp; @UriParam(enums = "HTTP,HTTPS", defaultValue = "HTTPS") private Protocol proxyProtocol = Protocol.HTTPS; @UriParam private String proxyHost; @UriParam private Integer proxyPort; @UriParam private String region; @UriParam(defaultValue = "false") private boolean trustAllCertificates; public String getAccessKey() { return accessKey; } /** * Amazon AWS Access Key */ public void setAccessKey(String accessKey) { this.accessKey = accessKey; } public String getSecretKey() { return secretKey; } /** * Amazon AWS Secret Key */ public void setSecretKey(String secretKey) { this.secretKey = secretKey; } public String getName() { return name; } /** * The metric name */ public void setName(String name) { this.name = name; } public Double getValue() { return value; } /** * The metric value */ public void setValue(Double value) { this.value = value; } public String getUnit() { return unit; } /** * The metric unit */ public void setUnit(String unit) { this.unit = unit; } public String getNamespace() { return namespace; } /** * The metric namespace */ public void setNamespace(String namespace) { this.namespace = namespace; } /** * The metric timestamp */ public void setTimestamp(Instant timestamp) { this.timestamp = timestamp; } public Instant getTimestamp() { return timestamp; } public CloudWatchClient getAmazonCwClient() { return amazonCwClient; } /** * To use the AmazonCloudWatch as the client */ public void setAmazonCwClient(CloudWatchClient amazonCwClient) { this.amazonCwClient = amazonCwClient; } public Protocol getProxyProtocol() { return proxyProtocol; } /** * To define a proxy protocol when instantiating the CW client */ public void setProxyProtocol(Protocol proxyProtocol) { this.proxyProtocol = proxyProtocol; } public String getProxyHost() { return proxyHost; } /** * To define a proxy host when instantiating the CW client */ public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public Integer getProxyPort() { return proxyPort; } /** * To define a proxy port when instantiating the CW client */ public void setProxyPort(Integer proxyPort) { this.proxyPort = proxyPort; } public String getRegion() { return region; } /** * The region in which EKS client needs to work. When using this parameter, the configuration will expect the * lowercase name of the region (for example ap-east-1) You'll need to use the name Region.EU_WEST_1.id() */ public void setRegion(String region) { this.region = region; } public boolean isTrustAllCertificates() { return trustAllCertificates; } /** * If we want to trust all certificates in case of overriding the endpoint */ public void setTrustAllCertificates(boolean trustAllCertificates) { this.trustAllCertificates = trustAllCertificates; } // ************************************************* // // ************************************************* public Cw2Configuration copy() { try { return (Cw2Configuration) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.junit.Assert.*; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.regex.Pattern; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.web.HftpFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.PathUtils; import org.apache.log4j.Appender; import org.apache.log4j.AsyncAppender; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.log4j.RollingFileAppender; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; /** * A JUnit test that audit logs are generated */ @RunWith(Parameterized.class) public class TestAuditLogs { static final String auditLogFile = PathUtils.getTestDirName(TestAuditLogs.class) + "/TestAuditLogs-audit.log"; boolean useAsyncLog; @Parameters public static Collection<Object[]> data() { Collection<Object[]> params = new ArrayList<Object[]>(); params.add(new Object[]{new Boolean(false)}); params.add(new Object[]{new Boolean(true)}); return params; } public TestAuditLogs(boolean useAsyncLog) { this.useAsyncLog = useAsyncLog; } // Pattern for: // allowed=(true|false) ugi=name ip=/address cmd={cmd} src={path} dst=null perm=null static final Pattern auditPattern = Pattern.compile( "allowed=.*?\\s" + "ugi=.*?\\s" + "ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" + "cmd=.*?\\ssrc=.*?\\sdst=null\\s" + "perm=.*?"); static final Pattern successPattern = Pattern.compile( ".*allowed=true.*"); static final String username = "bob"; static final String[] groups = { "group1" }; static final String fileName = "/srcdat"; DFSTestUtil util; MiniDFSCluster cluster; FileSystem fs; String fnames[]; Configuration conf; UserGroupInformation userGroupInfo; @Before public void setupCluster() throws Exception { // must configure prior to instantiating the namesystem because it // will reconfigure the logger if async is enabled configureAuditLogs(); conf = new HdfsConfiguration(); final long precision = 1L; conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, precision); conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L); conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_AUDIT_LOG_ASYNC_KEY, useAsyncLog); util = new DFSTestUtil.Builder().setName("TestAuditAllowed"). setNumFiles(20).build(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); fs = cluster.getFileSystem(); util.createFiles(fs, fileName); // make sure the appender is what it's supposed to be Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); @SuppressWarnings("unchecked") List<Appender> appenders = Collections.list(logger.getAllAppenders()); assertEquals(1, appenders.size()); assertEquals(useAsyncLog, appenders.get(0) instanceof AsyncAppender); fnames = util.getFileNames(fileName); util.waitReplication(fs, fileName, (short)3); userGroupInfo = UserGroupInformation.createUserForTesting(username, groups); } @After public void teardownCluster() throws Exception { util.cleanup(fs, "/srcdat"); fs.close(); cluster.shutdown(); } /** test that allowed operation puts proper entry in audit log */ @Test public void testAuditAllowed() throws Exception { final Path file = new Path(fnames[0]); FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf); setupAuditLogs(); InputStream istream = userfs.open(file); int val = istream.read(); istream.close(); verifyAuditLogs(true); assertTrue("failed to read from file", val >= 0); } /** test that allowed stat puts proper entry in audit log */ @Test public void testAuditAllowedStat() throws Exception { final Path file = new Path(fnames[0]); FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf); setupAuditLogs(); FileStatus st = userfs.getFileStatus(file); verifyAuditLogs(true); assertTrue("failed to stat file", st != null && st.isFile()); } /** test that denied operation puts proper entry in audit log */ @Test public void testAuditDenied() throws Exception { final Path file = new Path(fnames[0]); FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf); fs.setPermission(file, new FsPermission((short)0600)); fs.setOwner(file, "root", null); setupAuditLogs(); try { userfs.open(file); fail("open must not succeed"); } catch(AccessControlException e) { System.out.println("got access denied, as expected."); } verifyAuditLogs(false); } /** test that access via webhdfs puts proper entry in audit log */ @Test public void testAuditWebHdfs() throws Exception { final Path file = new Path(fnames[0]); fs.setPermission(file, new FsPermission((short)0644)); fs.setOwner(file, "root", null); setupAuditLogs(); WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); InputStream istream = webfs.open(file); int val = istream.read(); istream.close(); verifyAuditLogsRepeat(true, 3); assertTrue("failed to read from file", val >= 0); } /** test that stat via webhdfs puts proper entry in audit log */ @Test public void testAuditWebHdfsStat() throws Exception { final Path file = new Path(fnames[0]); fs.setPermission(file, new FsPermission((short)0644)); fs.setOwner(file, "root", null); setupAuditLogs(); WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); FileStatus st = webfs.getFileStatus(file); verifyAuditLogs(true); assertTrue("failed to stat file", st != null && st.isFile()); } /** test that access via Hftp puts proper entry in audit log */ @Test public void testAuditHftp() throws Exception { final Path file = new Path(fnames[0]); final String hftpUri = "hftp://" + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); HftpFileSystem hftpFs = null; setupAuditLogs(); try { hftpFs = (HftpFileSystem) new Path(hftpUri).getFileSystem(conf); InputStream istream = hftpFs.open(file); @SuppressWarnings("unused") int val = istream.read(); istream.close(); verifyAuditLogs(true); } finally { if (hftpFs != null) hftpFs.close(); } } /** test that denied access via webhdfs puts proper entry in audit log */ @Test public void testAuditWebHdfsDenied() throws Exception { final Path file = new Path(fnames[0]); fs.setPermission(file, new FsPermission((short)0600)); fs.setOwner(file, "root", null); setupAuditLogs(); try { WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); InputStream istream = webfs.open(file); int val = istream.read(); fail("open+read must not succeed, got " + val); } catch(AccessControlException E) { System.out.println("got access denied, as expected."); } verifyAuditLogsRepeat(false, 2); } /** Sets up log4j logger for auditlogs */ private void setupAuditLogs() throws IOException { Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); // enable logging now that the test is ready to run logger.setLevel(Level.INFO); } private void configureAuditLogs() throws IOException { // Shutdown the LogManager to release all logger open file handles. // Unfortunately, Apache commons logging library does not provide // means to release underlying loggers. For additional info look up // commons library FAQ. LogManager.shutdown(); File file = new File(auditLogFile); if (file.exists()) { assertTrue(file.delete()); } Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); // disable logging while the cluster startup preps files logger.setLevel(Level.OFF); PatternLayout layout = new PatternLayout("%m%n"); RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile); logger.addAppender(appender); } // Ensure audit log has only one entry private void verifyAuditLogs(boolean expectSuccess) throws IOException { verifyAuditLogsRepeat(expectSuccess, 1); } // Ensure audit log has exactly N entries private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe) throws IOException { // Turn off the logs Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); logger.setLevel(Level.OFF); BufferedReader reader = new BufferedReader(new FileReader(auditLogFile)); String line = null; boolean ret = true; try { for (int i = 0; i < ndupe; i++) { line = reader.readLine(); assertNotNull(line); assertTrue("Expected audit event not found in audit log", auditPattern.matcher(line).matches()); ret &= successPattern.matcher(line).matches(); } assertNull("Unexpected event in audit log", reader.readLine()); assertTrue("Expected success=" + expectSuccess, ret == expectSuccess); } finally { reader.close(); } } }
/* * #%L * MockSocketServer.java - mongodb-async-driver - Allanbank Consulting, Inc. * %% * Copyright (C) 2011 - 2014 Allanbank Consulting, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.allanbank.mongodb.client; import java.io.Closeable; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import com.allanbank.mongodb.bson.io.EndianUtils; import com.allanbank.mongodb.client.message.Header; import com.allanbank.mongodb.util.IOUtils; import com.allanbank.mongodb.util.ServerNameUtils; /** * Provides a simple single threaded socket server to act as a MongoDB server in * tests. The server collects all messages it receives and can be loaded with * replies to the requests it receives. * * @copyright 2011, Allanbank Consulting, Inc., All Rights Reserved */ public class MockSocketServer implements Runnable, Closeable { /** An empty Array of bytes. */ public static final byte[] EMPTY_BYTES = new byte[0]; /** Set to true when a client is connected. */ private boolean myClientConnected = false; /** The current active connection. */ private SocketChannel myConnection; /** The replies to send when a message is received. */ private final List<byte[]> myReplies = new ArrayList<byte[]>(); /** The requests received. */ private final List<byte[]> myRequests = new ArrayList<byte[]>(); /** Set to false to stop the server. */ private volatile boolean myRunning; /** The server socket we are listening on. */ private final ServerSocketChannel myServerSocket; /** * Creates a new MockMongoDBServer. * * @throws IOException * On a failure creating the server socket. */ public MockSocketServer() throws IOException { myServerSocket = ServerSocketChannel.open(); myServerSocket.socket().bind( new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 0)); myServerSocket.configureBlocking(false); myRunning = false; } /** * Clears the requests received and replies to send. */ public void clear() { myReplies.clear(); myRequests.clear(); } /** * Closes the server socket. * * @throws IOException * On a failure closing the server socket. */ @Override public void close() throws IOException { myRunning = false; myServerSocket.close(); } /** * Disconnects any active client.. * * @return True if a client is connected, false otherwise. */ public boolean disconnectClient() { final SocketChannel channel = myConnection; IOUtils.close(channel); if (channel != null) { close(channel.socket()); } return (channel != null); } /** * Returns the address for the server. * * @return The address for the server. */ public InetSocketAddress getInetSocketAddress() { return new InetSocketAddress(myServerSocket.socket().getInetAddress(), myServerSocket.socket().getLocalPort()); } /** * Returns the replies that will be returned after each message is received. * * @return the replies to return. */ public List<byte[]> getReplies() { return Collections.unmodifiableList(myReplies); } /** * Returns the requests that have been received. * * @return the requests received. */ public List<byte[]> getRequests() { return Collections.unmodifiableList(myRequests); } /** * Returns the address for the server. * * @return The address for the server. */ public String getServerName() { return ServerNameUtils.normalize(getInetSocketAddress()); } /** * Returns if the server is running. * * @return the running */ public boolean isRunning() { return myRunning; } /** * Runs the server loop waiting for connections and servicing a single * client until it exits. */ @Override public void run() { myRunning = true; try { while (myRunning) { myConnection = myServerSocket.accept(); if (myConnection != null) { try { handleClient(); } finally { synchronized (this) { myClientConnected = false; notifyAll(); } myConnection.close(); myConnection = null; } } else { sleep(); } } } catch (final IOException error) { // Exit. } } /** * Sets the replies to return after each message is received. * * @param replies * the replies to send */ public void setReplies(final List<byte[]> replies) { myReplies.clear(); if (replies != null) { myReplies.addAll(replies); } } /** * Controls if the server is running. * * @param running * the running to set */ public void setRunning(final boolean running) { myRunning = running; } /** * Starts the mock server. */ public void start() { final Thread t = new Thread(this, "MockSocketServer"); t.start(); } /** * Waits for a client to connect. * * @param timeout * Time to wait for the connect. * @param units * The units for the time to wait for the connect. * @return True if a client is connect, false on timeout. */ public boolean waitForClient(final int timeout, final TimeUnit units) { return waitForClient(units.toMillis(timeout)); } /** * Waits for a client to connect. * * @param timeout * Time to wait (in milliseconds) for the disconnect. * @return True if a client is connected, false on timeout. */ public boolean waitForClient(final long timeout) { long now = System.currentTimeMillis(); final long deadline = now + timeout; boolean result = false; synchronized (this) { while (!myClientConnected && (now < deadline)) { try { notifyAll(); wait(deadline - now); } catch (final InterruptedException e) { // Ignored. Handled by while. } now = System.currentTimeMillis(); } result = myClientConnected; } return result; } /** * Waits for a client to disconnect. * * @param timeout * Time to wait for the disconnect. * @param units * The units for the time to wait for the disconnect. * @return True if a client is disconnected, false on timeout. */ public boolean waitForDisconnect(final int timeout, final TimeUnit units) { return waitForDisconnect(units.toMillis(timeout)); } /** * Waits for a client to disconnect. * * @param timeout * Time to wait (in milliseconds) for the disconnect. * @return True if a client is disconnected, false on timeout. */ public boolean waitForDisconnect(final long timeout) { long now = System.currentTimeMillis(); final long deadline = now + timeout; boolean result; synchronized (this) { while (myClientConnected && (now < deadline)) { try { notifyAll(); wait(deadline - now); } catch (final InterruptedException e) { // Ignored. Handled by while. } now = System.currentTimeMillis(); } result = !myClientConnected; } return result; } /** * Waits for a client to request. * * @param count * The number of requests to wait for. * @param timeout * Time to wait for the request. * @param units * The units for the time to wait for the request. * @return True if a client is request, false on timeout. */ public boolean waitForRequest(final int count, final int timeout, final TimeUnit units) { return waitForRequest(count, units.toMillis(timeout)); } /** * Waits for a client request. * * @param count * The number of request to wait for. * @param timeout * Time to wait (in milliseconds) for the disconnect. * @return True if a client is connected, false on timeout. */ public boolean waitForRequest(final int count, final long timeout) { long now = System.currentTimeMillis(); final long deadline = now + timeout; synchronized (this) { while ((myRequests.size() < count) && (now < deadline)) { try { // Wake up the receive thread. notifyAll(); wait(deadline - now); } catch (final InterruptedException e) { // Ignored. Handled by while. } now = System.currentTimeMillis(); } } return (myRequests.size() >= count); } /** * Closes the {@link Socket} and logs any error. Sockets do not implement * {@link Closeable} in Java 6 * * @param socket * The connection to close. Sockets do not implement * {@link Closeable} in Java 6 */ protected void close(final Socket socket) { if (socket != null) { try { socket.close(); } catch (final IOException ignored) { // Ignored } } } /** * Handles a single client connection. * * @throws IOException * On a connection error. */ protected void handleClient() throws IOException { // Use non-blocking mode so we can pickup when to stop running. myConnection.configureBlocking(false); ByteBuffer header = ByteBuffer.allocate(Header.SIZE); ByteBuffer body = null; int read = 0; while (myRunning) { read = 0; if (myConnection.isConnectionPending()) { myConnection.finishConnect(); } if (myConnection.isConnected()) { synchronized (this) { myClientConnected = true; notifyAll(); } if (header.hasRemaining()) { read = myConnection.read(header); } else { if (body == null) { // First 4 bytes are the message length. final ByteBuffer dup = header.duplicate(); dup.flip(); final int length = EndianUtils.swap(dup.asIntBuffer() .get(0)); body = ByteBuffer.allocate(length - Header.SIZE); } if (body.hasRemaining()) { read = myConnection.read(body); } else { // Finished a message. header.flip(); body.flip(); // Make sure backed by an array. final ByteBuffer completeMessage = ByteBuffer .wrap(new byte[header.capacity() + body.capacity()]); completeMessage.put(header); completeMessage.put(body); synchronized (this) { myRequests.add(completeMessage.array()); notifyAll(); } // Setup for the next message. header = ByteBuffer.allocate(Header.SIZE); body = null; if (!myReplies.isEmpty()) { final byte[] reply = myReplies.remove(0); final ByteBuffer buffer = ByteBuffer.wrap(reply); while (buffer.hasRemaining()) { myConnection.write(buffer); } } } } } else { // Disconnected. return; } if (read < 0) { return; } else if (read == 0) { sleep(); } } } /** * */ protected void sleep() { long now = System.currentTimeMillis(); final long deadline = now + 5000; try { synchronized (this) { while (now < deadline) { wait(100); now = deadline; } } } catch (final InterruptedException e) { // Ignore. } } }
package com.dpizarro.uipicker.library.blur; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Bitmap; import android.os.Build; import android.renderscript.Allocation; import android.renderscript.Element; import android.renderscript.RenderScript; import android.renderscript.ScriptIntrinsicBlur; import android.util.Log; class Blur { /** * Process the image using renderscript if possible with default radius. * * @param context renderscript requires an android context * @param sentBitmap the bitmap to blur * @return the Bitmap blurred. */ public static Bitmap apply(Context context, Bitmap sentBitmap) { return apply(context, sentBitmap, PickerUIBlur.DEFAULT_BLUR_RADIUS, PickerUIBlur.DEFAULT_USE_BLUR_RENDERSCRIPT); } /** * Process the image using renderscript if possible * * @param context renderscript requires an android context * @param sentBitmap the bitmap to blur * @param radius the radius to apply in the blur task * @param useRenderScript if want to use renderScript algorithm * @return the Bitmap blurred. */ @SuppressLint("NewApi") public static Bitmap apply(Context context, Bitmap sentBitmap, int radius, boolean useRenderScript) { Bitmap bitmap = Bitmap.createScaledBitmap(sentBitmap, sentBitmap.getWidth() / 2, sentBitmap.getHeight() / 2, false); /** * If you want to use renderScript algorithm, first check if build version is < JellyBean to use library compat. */ if (useRenderScript) { if (Build.VERSION.SDK_INT > Build.VERSION_CODES.JELLY_BEAN) { final RenderScript rs = RenderScript.create(context); //use this constructor for best performance, because it uses USAGE_SHARED mode which reuses memory final Allocation input = Allocation.createFromBitmap(rs, bitmap); final Allocation output = Allocation.createTyped(rs, input.getType()); final ScriptIntrinsicBlur script = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs)); script.setRadius(radius); script.setInput(input); script.forEach(output); output.copyTo(bitmap); return bitmap; } else { try { final android.support.v8.renderscript.RenderScript rs = android.support.v8.renderscript.RenderScript.create(context); //use this constructor for best performance, because it uses USAGE_SHARED mode which reuses memory final android.support.v8.renderscript.Allocation input = android.support.v8.renderscript.Allocation .createFromBitmap(rs, bitmap); final android.support.v8.renderscript.Allocation output = android.support.v8.renderscript.Allocation .createTyped(rs, input.getType()); final android.support.v8.renderscript.ScriptIntrinsicBlur script = android.support.v8.renderscript.ScriptIntrinsicBlur .create(rs, android.support.v8.renderscript.Element.U8_4(rs)); script.setRadius(radius); script.setInput(input); script.forEach(output); output.copyTo(bitmap); } catch (Exception e) { bitmap = fastblur(sentBitmap, radius); } return bitmap; } } else { return fastblur(sentBitmap, radius); } } /** * Stack Blur v1.0 from * http://www.quasimondo.com/StackBlurForCanvas/StackBlurDemo.html * * Java Author: Mario Klingemann <mario at quasimondo.com> * http://incubator.quasimondo.com * created Feburary 29, 2004 * Android port : Yahel Bouaziz <yahel at kayenko.com> * http://www.kayenko.com * ported april 5th, 2012 * * This is a compromise between Gaussian Blur and Box blur * It creates much better looking blurs than Box Blur, but is * 7x faster than my Gaussian Blur implementation. * * I called it Stack Blur because this describes best how this * filter works internally: it creates a kind of moving stack * of colors whilst scanning through the image. Thereby it * just has to add one new block of color to the right side * of the stack and remove the leftmost color. The remaining * colors on the topmost layer of the stack are either added on * or reduced by one, depending on if they are on the right or * on the left side of the stack. * * If you are using this algorithm in your code please add * the following line: * * Stack Blur Algorithm by Mario Klingemann <mario@quasimondo.com> */ private static Bitmap fastblur(Bitmap bitmap, int radius) { if (radius < 1) { return (null); } int w = bitmap.getWidth(); int h = bitmap.getHeight(); int[] pix = new int[w * h]; Log.e("pix", w + " " + h + " " + pix.length); bitmap.getPixels(pix, 0, w, 0, 0, w, h); int wm = w - 1; int hm = h - 1; int wh = w * h; int div = radius + radius + 1; int r[] = new int[wh]; int g[] = new int[wh]; int b[] = new int[wh]; int rsum, gsum, bsum, x, y, i, p, yp, yi, yw; int vmin[] = new int[Math.max(w, h)]; int divsum = (div + 1) >> 1; divsum *= divsum; int dv[] = new int[256 * divsum]; for (i = 0; i < 256 * divsum; i++) { dv[i] = (i / divsum); } yw = yi = 0; int[][] stack = new int[div][3]; int stackpointer; int stackstart; int[] sir; int rbs; int r1 = radius + 1; int routsum, goutsum, boutsum; int rinsum, ginsum, binsum; for (y = 0; y < h; y++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; for (i = -radius; i <= radius; i++) { p = pix[yi + Math.min(wm, Math.max(i, 0))]; sir = stack[i + radius]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rbs = r1 - Math.abs(i); rsum += sir[0] * rbs; gsum += sir[1] * rbs; bsum += sir[2] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } } stackpointer = radius; for (x = 0; x < w; x++) { r[yi] = dv[rsum]; g[yi] = dv[gsum]; b[yi] = dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (y == 0) { vmin[x] = Math.min(x + radius + 1, wm); } p = pix[yw + vmin[x]]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[(stackpointer) % div]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi++; } yw += w; } for (x = 0; x < w; x++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; yp = -radius * w; for (i = -radius; i <= radius; i++) { yi = Math.max(0, yp) + x; sir = stack[i + radius]; sir[0] = r[yi]; sir[1] = g[yi]; sir[2] = b[yi]; rbs = r1 - Math.abs(i); rsum += r[yi] * rbs; gsum += g[yi] * rbs; bsum += b[yi] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } if (i < hm) { yp += w; } } yi = x; stackpointer = radius; for (y = 0; y < h; y++) { // Preserve alpha channel: ( 0xff000000 & pix[yi] ) pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16) | (dv[gsum] << 8) | dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (x == 0) { vmin[y] = Math.min(y + r1, hm) * w; } p = x + vmin[y]; sir[0] = r[p]; sir[1] = g[p]; sir[2] = b[p]; rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[stackpointer]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi += w; } } Log.e("pix", w + " " + h + " " + pix.length); bitmap.setPixels(pix, 0, w, 0, 0, w, h); return (bitmap); } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. ******************************************************************************/ package com.badlogic.gdx.math; import java.io.Serializable; /** Encapsulates a 2D rectangle defined by it's bottom corner point and its extends in x (width) and y (height). * @author badlogicgames@gmail.com */ public class Rectangle implements Serializable { /** Static temporary rectangle. Use with care! Use only when sure other code will not also use this. */ static public final Rectangle tmp = new Rectangle(); /** Static temporary rectangle. Use with care! Use only when sure other code will not also use this. */ static public final Rectangle tmp2 = new Rectangle(); private static final long serialVersionUID = 5733252015138115702L; public float x, y; public float width, height; /** Constructs a new rectangle with all values set to zero */ public Rectangle () { } /** Constructs a new rectangle with the given corner point in the bottom left and dimensions. * @param x The corner point x-coordinate * @param y The corner point y-coordinate * @param width The width * @param height The height */ public Rectangle (float x, float y, float width, float height) { this.x = x; this.y = y; this.width = width; this.height = height; } /** Constructs a rectangle based on the given rectangle * @param rect The rectangle */ public Rectangle (Rectangle rect) { x = rect.x; y = rect.y; width = rect.width; height = rect.height; } /** @param x bottom-left x coordinate * @param y bottom-left y coordinate * @param width width * @param height height * @return this rectangle for chaining */ public Rectangle set (float x, float y, float width, float height) { this.x = x; this.y = y; this.width = width; this.height = height; return this; } /** @return the x-coordinate of the bottom left corner */ public float getX () { return x; } /** Sets the x-coordinate of the bottom left corner * @param x The x-coordinate * @return this rectangle for chaining */ public Rectangle setX (float x) { this.x = x; return this; } /** @return the y-coordinate of the bottom left corner */ public float getY () { return y; } /** Sets the y-coordinate of the bottom left corner * @param y The y-coordinate * @return this rectangle for chaining */ public Rectangle setY (float y) { this.y = y; return this; } /** @return the width */ public float getWidth () { return width; } /** Sets the width of this rectangle * @param width The width * @return this rectangle for chaining */ public Rectangle setWidth (float width) { this.width = width; return this; } /** @return the height */ public float getHeight () { return height; } /** Sets the height of this rectangle * @param height The height * @return this rectangle for chaining */ public Rectangle setHeight (float height) { this.height = height; return this; } /** return the Vector2 with coordinates of this rectangle * @param position The Vector2 */ public Vector2 getPosition (Vector2 position) { return position.set(x, y); } /** Sets the x and y-coordinates of the bottom left corner from vector * @param position The position vector * @return this rectangle for chaining */ public Rectangle setPosition (Vector2 position) { this.x = position.x; this.y = position.y; return this; } /** Sets the x and y-coordinates of the bottom left corner * @param x The x-coordinate * @param y The y-coordinate * @return this rectangle for chaining */ public Rectangle setPosition (float x, float y) { this.x = x; this.y = y; return this; } /** Sets the width and height of this rectangle * @param width The width * @param height The height * @return this rectangle for chaining */ public Rectangle setSize (float width, float height) { this.width = width; this.height = height; return this; } /** Sets the squared size of this rectangle * @param sizeXY The size * @return this rectangle for chaining */ public Rectangle setSize (float sizeXY) { this.width = sizeXY; this.height = sizeXY; return this; } /** @return the Vector2 with size of this rectangle * @param size The Vector2 */ public Vector2 getSize (Vector2 size) { return size.set(width, height); } /** @param x point x coordinate * @param y point y coordinate * @return whether the point is contained in the rectangle */ public boolean contains (float x, float y) { return this.x <= x && this.x + this.width >= x && this.y <= y && this.y + this.height >= y; } /** @param point The coordinates vector * @return whether the point is contained in the rectangle */ public boolean contains (Vector2 point) { return contains(point.x, point.y); } /** @param rectangle the other {@link Rectangle}. * @return whether the other rectangle is contained in this rectangle. */ public boolean contains (Rectangle rectangle) { float xmin = rectangle.x; float xmax = xmin + rectangle.width; float ymin = rectangle.y; float ymax = ymin + rectangle.height; return ((xmin > x && xmin < x + width) && (xmax > x && xmax < x + width)) && ((ymin > y && ymin < y + height) && (ymax > y && ymax < y + height)); } /** @param r the other {@link Rectangle} * @return whether this rectangle overlaps the other rectangle. */ public boolean overlaps (Rectangle r) { return x < r.x + r.width && x + width > r.x && y < r.y + r.height && y + height > r.y; } /** Sets the values of the given rectangle to this rectangle. * @param rect the other rectangle * @return this rectangle for chaining */ public Rectangle set (Rectangle rect) { this.x = rect.x; this.y = rect.y; this.width = rect.width; this.height = rect.height; return this; } /** Merges this rectangle with the other rectangle. * The rectangle should not have negative width or negative height. * @param rect the other rectangle * @return this rectangle for chaining */ public Rectangle merge (Rectangle rect) { float minX = Math.min(x, rect.x); float maxX = Math.max(x + width, rect.x + rect.width); x = minX; width = maxX - minX; float minY = Math.min(y, rect.y); float maxY = Math.max(y + height, rect.y + rect.height); y = minY; height = maxY - minY; return this; } /** Merges this rectangle with a point * The rectangle should not have negative width or negative height. * @param x the x coordinate of the point * @param y the y coordinate of the point * @return this rectangle for chaining */ public Rectangle merge (float x, float y) { float minX = Math.min(this.x, x); float maxX = Math.max(this.x + width, x); this.x = minX; this.width = maxX - minX; float minY = Math.min(this.y, y); float maxY = Math.max(this.y + height, y); this.y = minY; this.height = maxY - minY; return this; } /** Merges this rectangle with a point * The rectangle should not have negative width or negative height. * @param vec the vector describing the point * @return this rectangle for chaining */ public Rectangle merge (Vector2 vec) { return merge(vec.x, vec.y); } /** Merges this rectangle with a list of points * The rectangle should not have negative width or negative height. * @param vecs the vectors describing the points * @return this rectangle for chaining */ public Rectangle merge (Vector2[] vecs) { float minX = x; float maxX = x+width; float minY = y; float maxY = y+height; for(int i = 0; i < vecs.length; ++i) { Vector2 v = vecs[i]; minX = Math.min(minX, v.x); maxX = Math.max(maxX, v.x); minY = Math.min(minY, v.y); maxY = Math.max(maxY, v.y); } x = minX; width = maxX - minX; y = minY; height = maxY - minY; return this; } /** Calculates the aspect ratio ( width / height ) of this rectangle * @return the aspect ratio of this rectangle. Returns Float.NaN if height is 0 to avoid ArithmeticException */ public float getAspectRatio () { return (height == 0) ? Float.NaN : width / height; } /** Calculates the center of the rectangle. Results are located in the given Vector2 * @param vector the Vector2 to use * @return the given vector with results stored inside */ public Vector2 getCenter (Vector2 vector) { vector.x = x + width / 2; vector.y = y + height / 2; return vector; } /** Moves this rectangle so that its center point is located at a given position * @param x the position's x * @param y the position's y * @return this for chaining */ public Rectangle setCenter (float x, float y) { setPosition(x - width / 2, y - height / 2); return this; } /** Moves this rectangle so that its center point is located at a given position * @param position the position * @return this for chaining */ public Rectangle setCenter (Vector2 position) { setPosition(position.x - width / 2, position.y - height / 2); return this; } /** Fits this rectangle around another rectangle while maintaining aspect ratio This scales and centers the rectangle to the * other rectangle (e.g. Having a camera translate and scale to show a given area) * @param rect the other rectangle to fit this rectangle around * @return this rectangle for chaining */ public Rectangle fitOutside (Rectangle rect) { float ratio = getAspectRatio(); if (ratio > rect.getAspectRatio()) { // Wider than tall setSize(rect.height * ratio, rect.height); } else { // Taller than wide setSize(rect.width, rect.width / ratio); } setPosition((rect.x + rect.width / 2) - width / 2, (rect.y + rect.height / 2) - height / 2); return this; } /** Fits this rectangle into another rectangle while maintaining aspect ratio. This scales and centers the rectangle to the * other rectangle (e.g. Scaling a texture within a arbitrary cell without squeezing) * @param rect the other rectangle to fit this rectangle inside * @return this rectangle for chaining */ public Rectangle fitInside (Rectangle rect) { float ratio = getAspectRatio(); if (ratio < rect.getAspectRatio()) { // Taller than wide setSize(rect.height * ratio, rect.height); } else { // Wider than tall setSize(rect.width, rect.width / ratio); } setPosition((rect.x + rect.width / 2) - width / 2, (rect.y + rect.height / 2) - height / 2); return this; } public String toString () { return x + "," + y + "," + width + "," + height; } public int hashCode () { final int prime = 31; int result = 1; result = prime * result + Float.floatToIntBits(height); result = prime * result + Float.floatToIntBits(width); result = prime * result + Float.floatToIntBits(x); result = prime * result + Float.floatToIntBits(y); return result; } public boolean equals (Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Rectangle other = (Rectangle)obj; if (Float.floatToIntBits(height) != Float.floatToIntBits(other.height)) return false; if (Float.floatToIntBits(width) != Float.floatToIntBits(other.width)) return false; if (Float.floatToIntBits(x) != Float.floatToIntBits(other.x)) return false; if (Float.floatToIntBits(y) != Float.floatToIntBits(other.y)) return false; return true; } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect.testing.google; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.Multiset; import com.google.common.collect.Multiset.Entry; import com.google.common.collect.Multisets; import com.google.common.collect.testing.AbstractCollectionTestSuiteBuilder; import com.google.common.collect.testing.AbstractTester; import com.google.common.collect.testing.FeatureSpecificTestSuiteBuilder; import com.google.common.collect.testing.Helpers; import com.google.common.collect.testing.OneSizeTestContainerGenerator; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.SetTestSuiteBuilder; import com.google.common.collect.testing.TestSetGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.Feature; import com.google.common.collect.testing.testers.CollectionSerializationEqualTester; import com.google.common.testing.SerializableTester; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import junit.framework.TestSuite; /** * Creates, based on your criteria, a JUnit test suite that exhaustively tests a {@code Multiset} * implementation. * * @author Jared Levy * @author Louis Wasserman */ @GwtIncompatible public class MultisetTestSuiteBuilder<E> extends AbstractCollectionTestSuiteBuilder<MultisetTestSuiteBuilder<E>, E> { public static <E> MultisetTestSuiteBuilder<E> using(TestMultisetGenerator<E> generator) { return new MultisetTestSuiteBuilder<E>().usingGenerator(generator); } public enum NoRecurse implements Feature<Void> { NO_ENTRY_SET; @Override public Set<Feature<? super Void>> getImpliedFeatures() { return Collections.emptySet(); } } @Override protected List<Class<? extends AbstractTester>> getTesters() { List<Class<? extends AbstractTester>> testers = Helpers.copyToList(super.getTesters()); testers.add(CollectionSerializationEqualTester.class); testers.add(MultisetAddTester.class); testers.add(MultisetContainsTester.class); testers.add(MultisetCountTester.class); testers.add(MultisetElementSetTester.class); testers.add(MultisetEqualsTester.class); testers.add(MultisetReadsTester.class); testers.add(MultisetSetCountConditionallyTester.class); testers.add(MultisetSetCountUnconditionallyTester.class); testers.add(MultisetRemoveTester.class); testers.add(MultisetEntrySetTester.class); testers.add(MultisetIteratorTester.class); testers.add(MultisetSerializationTester.class); return testers; } private static Set<Feature<?>> computeEntrySetFeatures(Set<Feature<?>> features) { Set<Feature<?>> derivedFeatures = new HashSet<>(features); derivedFeatures.remove(CollectionFeature.GENERAL_PURPOSE); derivedFeatures.remove(CollectionFeature.SUPPORTS_ADD); derivedFeatures.remove(CollectionFeature.ALLOWS_NULL_VALUES); derivedFeatures.add(CollectionFeature.REJECTS_DUPLICATES_AT_CREATION); if (!derivedFeatures.remove(CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS)) { derivedFeatures.remove(CollectionFeature.SERIALIZABLE); } return derivedFeatures; } static Set<Feature<?>> computeElementSetFeatures(Set<Feature<?>> features) { Set<Feature<?>> derivedFeatures = new HashSet<>(features); derivedFeatures.remove(CollectionFeature.GENERAL_PURPOSE); derivedFeatures.remove(CollectionFeature.SUPPORTS_ADD); if (!derivedFeatures.remove(CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS)) { derivedFeatures.remove(CollectionFeature.SERIALIZABLE); } return derivedFeatures; } private static Set<Feature<?>> computeReserializedMultisetFeatures(Set<Feature<?>> features) { Set<Feature<?>> derivedFeatures = new HashSet<>(features); derivedFeatures.remove(CollectionFeature.SERIALIZABLE); derivedFeatures.remove(CollectionFeature.SERIALIZABLE_INCLUDING_VIEWS); return derivedFeatures; } @Override protected List<TestSuite> createDerivedSuites( FeatureSpecificTestSuiteBuilder<?, ? extends OneSizeTestContainerGenerator<Collection<E>, E>> parentBuilder) { List<TestSuite> derivedSuites = new ArrayList<>(super.createDerivedSuites(parentBuilder)); derivedSuites.add(createElementSetTestSuite(parentBuilder)); if (!parentBuilder.getFeatures().contains(NoRecurse.NO_ENTRY_SET)) { derivedSuites.add( SetTestSuiteBuilder.using(new EntrySetGenerator<E>(parentBuilder.getSubjectGenerator())) .named(getName() + ".entrySet") .withFeatures(computeEntrySetFeatures(parentBuilder.getFeatures())) .suppressing(parentBuilder.getSuppressedTests()) .withSetUp(parentBuilder.getSetUp()) .withTearDown(parentBuilder.getTearDown()) .createTestSuite()); } if (parentBuilder.getFeatures().contains(CollectionFeature.SERIALIZABLE)) { derivedSuites.add( MultisetTestSuiteBuilder.using( new ReserializedMultisetGenerator<E>(parentBuilder.getSubjectGenerator())) .named(getName() + " reserialized") .withFeatures(computeReserializedMultisetFeatures(parentBuilder.getFeatures())) .suppressing(parentBuilder.getSuppressedTests()) .withSetUp(parentBuilder.getSetUp()) .withTearDown(parentBuilder.getTearDown()) .createTestSuite()); } return derivedSuites; } TestSuite createElementSetTestSuite( FeatureSpecificTestSuiteBuilder<?, ? extends OneSizeTestContainerGenerator<Collection<E>, E>> parentBuilder) { return SetTestSuiteBuilder.using( new ElementSetGenerator<E>(parentBuilder.getSubjectGenerator())) .named(getName() + ".elementSet") .withFeatures(computeElementSetFeatures(parentBuilder.getFeatures())) .suppressing(parentBuilder.getSuppressedTests()) .withSetUp(parentBuilder.getSetUp()) .withTearDown(parentBuilder.getTearDown()) .createTestSuite(); } static class ElementSetGenerator<E> implements TestSetGenerator<E> { final OneSizeTestContainerGenerator<Collection<E>, E> gen; ElementSetGenerator(OneSizeTestContainerGenerator<Collection<E>, E> gen) { this.gen = gen; } @Override public SampleElements<E> samples() { return gen.samples(); } @Override public Set<E> create(Object... elements) { Object[] duplicated = new Object[elements.length * 2]; for (int i = 0; i < elements.length; i++) { duplicated[i] = elements[i]; duplicated[i + elements.length] = elements[i]; } return ((Multiset<E>) gen.create(duplicated)).elementSet(); } @Override public E[] createArray(int length) { return gen.createArray(length); } @Override public Iterable<E> order(List<E> insertionOrder) { return gen.order(new ArrayList<E>(new LinkedHashSet<E>(insertionOrder))); } } static class EntrySetGenerator<E> implements TestSetGenerator<Multiset.Entry<E>> { final OneSizeTestContainerGenerator<Collection<E>, E> gen; private EntrySetGenerator(OneSizeTestContainerGenerator<Collection<E>, E> gen) { this.gen = gen; } @Override public SampleElements<Multiset.Entry<E>> samples() { SampleElements<E> samples = gen.samples(); return new SampleElements<>( Multisets.immutableEntry(samples.e0(), 3), Multisets.immutableEntry(samples.e1(), 4), Multisets.immutableEntry(samples.e2(), 1), Multisets.immutableEntry(samples.e3(), 5), Multisets.immutableEntry(samples.e4(), 2)); } @Override public Set<Multiset.Entry<E>> create(Object... entries) { List<Object> contents = new ArrayList<>(); Set<E> elements = new HashSet<>(); for (Object o : entries) { @SuppressWarnings("unchecked") Multiset.Entry<E> entry = (Entry<E>) o; checkArgument( elements.add(entry.getElement()), "Duplicate keys not allowed in EntrySetGenerator"); for (int i = 0; i < entry.getCount(); i++) { contents.add(entry.getElement()); } } return ((Multiset<E>) gen.create(contents.toArray())).entrySet(); } @SuppressWarnings("unchecked") @Override public Multiset.Entry<E>[] createArray(int length) { return new Multiset.Entry[length]; } @Override public Iterable<Entry<E>> order(List<Entry<E>> insertionOrder) { // We mimic the order from gen. Map<E, Entry<E>> map = new LinkedHashMap<>(); for (Entry<E> entry : insertionOrder) { map.put(entry.getElement(), entry); } Set<E> seen = new HashSet<>(); List<Entry<E>> order = new ArrayList<>(); for (E e : gen.order(new ArrayList<E>(map.keySet()))) { if (seen.add(e)) { order.add(map.get(e)); } } return order; } } static class ReserializedMultisetGenerator<E> implements TestMultisetGenerator<E> { final OneSizeTestContainerGenerator<Collection<E>, E> gen; private ReserializedMultisetGenerator(OneSizeTestContainerGenerator<Collection<E>, E> gen) { this.gen = gen; } @Override public SampleElements<E> samples() { return gen.samples(); } @Override public Multiset<E> create(Object... elements) { return (Multiset<E>) SerializableTester.reserialize(gen.create(elements)); } @Override public E[] createArray(int length) { return gen.createArray(length); } @Override public Iterable<E> order(List<E> insertionOrder) { return gen.order(insertionOrder); } } }
/** * Copyright 2011-2015 John Ericksen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.androidtransfuse.processor; import org.androidtransfuse.TransfuseAnalysisException; import org.androidtransfuse.config.TransfuseAndroidModule; import org.androidtransfuse.model.Mergeable; import org.androidtransfuse.model.manifest.*; import org.androidtransfuse.util.AndroidLiterals; import org.androidtransfuse.util.Logger; import org.apache.commons.beanutils.PropertyUtils; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; /** * @author John Ericksen */ @Singleton public class ManifestManager { private final List<Application> applications = new ArrayList<Application>(); private final String manifestPackage; private final List<Activity> activities = new ArrayList<Activity>(); private final List<Receiver> broadcastReceivers = new ArrayList<Receiver>(); private final List<Service> services = new ArrayList<Service>(); private final List<UsesPermission> usesPermissions = new ArrayList<UsesPermission>(); private final List<UsesFeature> usesFeatures = new ArrayList<UsesFeature>(); private final List<Permission> permissions = new ArrayList<Permission>(); private final Logger log; private UsesSDK usesSdk; @Inject public ManifestManager(@Named(TransfuseAndroidModule.ORIGINAL_MANIFEST) Manifest originalManifest, Logger log) { this.log = log; this.manifestPackage = originalManifest.getApplicationPackage(); } public void addApplication(Application application) { log.debug("Adding to manifest: " + application); this.applications.add(application); } public void addPermission(Permission permission){ try { log.debug("Adding to manifest: " + permission); updateMergeTags(Permission.class, permission); permissions.add(permission); } catch (MergerException e) { throw new TransfuseAnalysisException("Unable to Merge UsesPermission", e); } } public void addUsesFeature(UsesFeature usesFeature){ try { log.debug("Adding to manifest: " + usesFeature); updateMergeTags(UsesFeature.class, usesFeature); usesFeatures.add(usesFeature); } catch (MergerException e) { throw new TransfuseAnalysisException("Unable to Merge UsesFeature", e); } } public void addUsesPermission(UsesPermission usesPermission) { try { log.debug("Adding to manifest: " + usesPermission); updateMergeTags(UsesPermission.class, usesPermission); usesPermissions.add(usesPermission); } catch (MergerException e) { throw new TransfuseAnalysisException("Unable to Merge UsesPermission", e); } } public void setUsesSdk(UsesSDK usesSdk) { log.debug("Adding to manifest: " + usesSdk); this.usesSdk = usesSdk; } public void addActivity(Activity activity) { try { log.debug("Adding to manifest: " + activity); updateMergeTags(Activity.class, activity); updateMergeTags(IntentFilter.class, activity.getIntentFilters()); updateMergeTags(MetaData.class, activity.getMetaData()); this.activities.add(activity); } catch (MergerException e) { throw new TransfuseAnalysisException("Unable to Merge Activity", e); } } public void addBroadcastReceiver(Receiver broadcastReceiver) { try { log.debug("Adding to manifest: " + broadcastReceiver); updateMergeTags(Receiver.class, broadcastReceiver); updateMergeTags(IntentFilter.class, broadcastReceiver.getIntentFilters()); updateMergeTags(MetaData.class, broadcastReceiver.getMetaData()); this.broadcastReceivers.add(broadcastReceiver); } catch (MergerException e) { throw new TransfuseAnalysisException("Unable to Merge Broadcast Receiver", e); } } private <T extends Mergeable> void updateMergeTags(Class<T> clazz, List<T> mergeableCollection) throws MergerException { for (T mergeable : mergeableCollection) { updateMergeTags(clazz, mergeable); } } public void addService(Service service) { try { log.debug("Adding to manifest: " + service); updateMergeTags(Service.class, service); updateMergeTags(IntentFilter.class, service.getIntentFilters()); updateMergeTags(MetaData.class, service.getMetaData()); this.services.add(service); } catch (MergerException e) { throw new TransfuseAnalysisException("Unable to Merge Service", e); } } public Manifest getManifest() throws MergerException { Manifest manifest = new Manifest(); manifest.setApplicationPackage(manifestPackage); Application localApplication; if (applications.isEmpty()){ localApplication = new Application(); localApplication.setName(AndroidLiterals.APPLICATION.getName()); } else{ localApplication = applications.get(0); } //todo: do multiple applications result in an error? updateMergeTags(Application.class, localApplication); localApplication.getActivities().addAll(activities); localApplication.getReceivers().addAll(broadcastReceivers); localApplication.getServices().addAll(services); manifest.getApplications().add(localApplication); manifest.getUsesPermissions().addAll(usesPermissions); manifest.getUsesFeatures().addAll(usesFeatures); manifest.getPermissions().addAll(permissions); if(usesSdk != null){ manifest.getUsesSDKs().add(usesSdk); } manifest.updatePackages(); return manifest; } private <T extends Mergeable> void updateMergeTags(Class<T> clazz, T mergeable) throws MergerException { try { mergeable.setGenerated(true); BeanInfo beanInfo = Introspector.getBeanInfo(clazz); for (PropertyDescriptor propertyDescriptor : beanInfo.getPropertyDescriptors()) { Method readMethod = propertyDescriptor.getReadMethod(); Method writeMethod = propertyDescriptor.getWriteMethod(); Merge mergeAnnotation = findAnnotation(Merge.class, writeMethod, readMethod); Object property = PropertyUtils.getProperty(mergeable, propertyDescriptor.getName()); if (mergeAnnotation != null && property != null) { mergeable.addMergeTag(mergeAnnotation.value()); } } } catch (IntrospectionException e) { throw new MergerException(e); } catch (InvocationTargetException e) { throw new MergerException(e); } catch (NoSuchMethodException e) { throw new MergerException(e); } catch (IllegalAccessException e) { throw new MergerException(e); } } private <T extends Annotation> T findAnnotation(Class<T> annotationClass, Method... methods) { T annotation = null; if (methods != null) { for (Method method : methods) { if (annotation == null && method != null && method.isAnnotationPresent(annotationClass)) { annotation = method.getAnnotation(annotationClass); } } } return annotation; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.cache; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import com.gemstone.gemfire.internal.cache.lru.EnableLRU; import com.gemstone.gemfire.internal.cache.lru.LRUClockNode; import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand; import com.gemstone.gemfire.internal.offheap.OffHeapRegionEntryHelper; import com.gemstone.gemfire.internal.offheap.annotations.Released; import com.gemstone.gemfire.internal.offheap.annotations.Retained; import com.gemstone.gemfire.internal.offheap.annotations.Unretained; import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; // macros whose definition changes this class: // disk: DISK // lru: LRU // stats: STATS // versioned: VERSIONED // offheap: OFFHEAP // One of the following key macros must be defined: // key object: KEY_OBJECT // key int: KEY_INT // key long: KEY_LONG // key uuid: KEY_UUID // key string1: KEY_STRING1 // key string2: KEY_STRING2 /** * Do not modify this class. It was generated. * Instead modify LeafRegionEntry.cpp and then run * bin/generateRegionEntryClasses.sh from the directory * that contains your build.xml. */ public class VMThinLRURegionEntryOffHeapStringKey2 extends VMThinLRURegionEntryOffHeap { public VMThinLRURegionEntryOffHeapStringKey2 (RegionEntryContext context, String key, @Retained Object value , boolean byteEncode ) { super(context, value ); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY long tmpBits1 = 0L; long tmpBits2 = 0L; if (byteEncode) { for (int i=key.length()-1; i >= 0; i--) { // Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to keep findbugs happy. if (i < 7) { tmpBits1 |= (byte)key.charAt(i) & 0xff; tmpBits1 <<= 8; } else { tmpBits2 <<= 8; tmpBits2 |= (byte)key.charAt(i) & 0xff; } } tmpBits1 |= 1<<6; } else { for (int i=key.length()-1; i >= 0; i--) { if (i < 3) { tmpBits1 |= key.charAt(i); tmpBits1 <<= 16; } else { tmpBits2 <<= 16; tmpBits2 |= key.charAt(i); } } } tmpBits1 |= key.length(); this.bits1 = tmpBits1; this.bits2 = tmpBits2; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // common code protected int hash; private HashEntry<Object, Object> next; @SuppressWarnings("unused") private volatile long lastModified; private static final AtomicLongFieldUpdater<VMThinLRURegionEntryOffHeapStringKey2> lastModifiedUpdater = AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryOffHeapStringKey2.class, "lastModified"); /** * All access done using ohAddrUpdater so it is used even though the compiler can not tell it is. */ @SuppressWarnings("unused") @Retained @Released private volatile long ohAddress; /** * I needed to add this because I wanted clear to call setValue which normally can only be called while the re is synced. * But if I sync in that code it causes a lock ordering deadlock with the disk regions because they also get a rw lock in clear. * Some hardware platforms do not support CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync * on the re and we will once again be deadlocked. * I don't know if we support any of the hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks * on disk regions. */ private final static AtomicLongFieldUpdater<VMThinLRURegionEntryOffHeapStringKey2> ohAddrUpdater = AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryOffHeapStringKey2.class, "ohAddress"); @Override public Token getValueAsToken() { return OffHeapRegionEntryHelper.getValueAsToken(this); } @Override protected Object getValueField() { return OffHeapRegionEntryHelper._getValue(this); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override @Unretained protected void setValueField(@Unretained Object v) { OffHeapRegionEntryHelper.setValue(this, v); } @Override @Retained public Object _getValueRetain(RegionEntryContext context, boolean decompress) { return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context); } @Override public long getAddress() { return ohAddrUpdater.get(this); } @Override public boolean setAddress(long expectedAddr, long newAddr) { return ohAddrUpdater.compareAndSet(this, expectedAddr, newAddr); } @Override @Released public void release() { OffHeapRegionEntryHelper.releaseEntry(this); } @Override public void returnToPool() { // Deadcoded for now; never was working // if (this instanceof VMThinRegionEntryLongKey) { // factory.returnToPool((VMThinRegionEntryLongKey)this); // } } protected long getlastModifiedField() { return lastModifiedUpdater.get(this); } protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) { return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue); } /** * @see HashEntry#getEntryHash() */ public final int getEntryHash() { return this.hash; } protected void setEntryHash(int v) { this.hash = v; } /** * @see HashEntry#getNextEntry() */ public final HashEntry<Object, Object> getNextEntry() { return this.next; } /** * @see HashEntry#setNextEntry */ public final void setNextEntry(final HashEntry<Object, Object> n) { this.next = n; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // lru code @Override public void setDelayedDiskId(LocalRegion r) { // nothing needed for LRUs with no disk } public final synchronized int updateEntrySize(EnableLRU capacityController) { return updateEntrySize(capacityController, _getValue()); // OFHEAP: _getValue ok w/o incing refcount because we are synced and only getting the size } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public final synchronized int updateEntrySize(EnableLRU capacityController, Object value) { int oldSize = getEntrySize(); int newSize = capacityController.entrySize( getKeyForSizing(), value); setEntrySize(newSize); int delta = newSize - oldSize; // if ( debug ) log( "updateEntrySize key=" + getKey() // + (_getValue() == Token.INVALID ? " invalid" : // (_getValue() == Token.LOCAL_INVALID ? "local_invalid" : // (_getValue()==null ? " evicted" : " valid"))) // + " oldSize=" + oldSize // + " newSize=" + this.size ); return delta; } public final boolean testRecentlyUsed() { return areAnyBitsSet(RECENTLY_USED); } @Override public final void setRecentlyUsed() { setBits(RECENTLY_USED); } public final void unsetRecentlyUsed() { clearBits(~RECENTLY_USED); } public final boolean testEvicted() { return areAnyBitsSet(EVICTED); } public final void setEvicted() { setBits(EVICTED); } public final void unsetEvicted() { clearBits(~EVICTED); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private LRUClockNode nextLRU; private LRUClockNode prevLRU; private int size; public final void setNextLRUNode( LRUClockNode next ) { this.nextLRU = next; } public final LRUClockNode nextLRUNode() { return this.nextLRU; } public final void setPrevLRUNode( LRUClockNode prev ) { this.prevLRU = prev; } public final LRUClockNode prevLRUNode() { return this.prevLRU; } public final int getEntrySize() { return this.size; } protected final void setEntrySize(int size) { this.size = size; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp //@Override //public StringBuilder appendFieldsToString(final StringBuilder sb) { // StringBuilder result = super.appendFieldsToString(sb); // result.append("; prev=").append(this.prevLRU==null?"null":"not null"); // result.append("; next=").append(this.nextLRU==null?"null":"not null"); // return result; //} @Override public Object getKeyForSizing() { // inline keys always report null for sizing since the size comes from the entry size return null; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // key code // strlen is encoded in lowest 6 bits (max strlen is 63) // character encoding info is in bits 7 and 8 // The other bits are used to encoded character data. private final long bits1; // bits2 encodes character data private final long bits2; private int getKeyLength() { return (int) (this.bits1 & 0x003fL); } private int getEncoding() { // 0 means encoded as char // 1 means encoded as bytes that are all <= 0x7f; return (int) (this.bits1 >> 6) & 0x03; } @Override public final Object getKey() { int keylen = getKeyLength(); char[] chars = new char[keylen]; long tmpBits1 = this.bits1; long tmpBits2 = this.bits2; if (getEncoding() == 1) { for (int i=0; i < keylen; i++) { if (i < 7) { tmpBits1 >>= 8; chars[i] = (char) (tmpBits1 & 0x00ff); } else { chars[i] = (char) (tmpBits2 & 0x00ff); tmpBits2 >>= 8; } } } else { for (int i=0; i < keylen; i++) { if (i < 3) { tmpBits1 >>= 16; chars[i] = (char) (tmpBits1 & 0x00FFff); } else { chars[i] = (char) (tmpBits2 & 0x00FFff); tmpBits2 >>= 16; } } } return new String(chars); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public boolean isKeyEqual(Object k) { if (k instanceof String) { String str = (String)k; int keylen = getKeyLength(); if (str.length() == keylen) { long tmpBits1 = this.bits1; long tmpBits2 = this.bits2; if (getEncoding() == 1) { for (int i=0; i < keylen; i++) { char c; if (i < 7) { tmpBits1 >>= 8; c = (char) (tmpBits1 & 0x00ff); } else { c = (char) (tmpBits2 & 0x00ff); tmpBits2 >>= 8; } if (str.charAt(i) != c) { return false; } } } else { for (int i=0; i < keylen; i++) { char c; if (i < 3) { tmpBits1 >>= 16; c = (char) (tmpBits1 & 0x00FFff); } else { c = (char) (tmpBits2 & 0x00FFff); tmpBits2 >>= 16; } if (str.charAt(i) != c) { return false; } } } return true; } } return false; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.xcontent; import java.nio.charset.StandardCharsets; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.ToXContent.Params; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; /** * */ @SuppressWarnings("unchecked") public class XContentHelper { public static XContentParser createParser(BytesReference bytes) throws IOException { Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { InputStream compressedInput = compressor.streamInput(bytes.streamInput()); if (compressedInput.markSupported() == false) { compressedInput = new BufferedInputStream(compressedInput); } XContentType contentType = XContentFactory.xContentType(compressedInput); return XContentFactory.xContent(contentType).createParser(compressedInput); } else { return XContentFactory.xContent(bytes).createParser(bytes.streamInput()); } } public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException { try { XContentType contentType; InputStream input; Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { InputStream compressedStreamInput = compressor.streamInput(bytes.streamInput()); if (compressedStreamInput.markSupported() == false) { compressedStreamInput = new BufferedInputStream(compressedStreamInput); } contentType = XContentFactory.xContentType(compressedStreamInput); input = compressedStreamInput; } else { contentType = XContentFactory.xContentType(bytes); input = bytes.streamInput(); } try (XContentParser parser = XContentFactory.xContent(contentType).createParser(input)) { if (ordered) { return Tuple.tuple(contentType, parser.mapOrdered()); } else { return Tuple.tuple(contentType, parser.map()); } } } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse content to map", e); } } public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException { return convertToJson(bytes, reformatJson, false); } public static String convertToJson(BytesReference bytes, boolean reformatJson, boolean prettyPrint) throws IOException { if (bytes.hasArray()) { return convertToJson(bytes.array(), bytes.arrayOffset(), bytes.length(), reformatJson, prettyPrint); } XContentType xContentType = XContentFactory.xContentType(bytes); if (xContentType == XContentType.JSON && !reformatJson) { BytesArray bytesArray = bytes.toBytesArray(); return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), StandardCharsets.UTF_8); } XContentParser parser = null; try { parser = XContentFactory.xContent(xContentType).createParser(bytes.streamInput()); parser.nextToken(); XContentBuilder builder = XContentFactory.jsonBuilder(); if (prettyPrint) { builder.prettyPrint(); } builder.copyCurrentStructure(parser); return builder.string(); } finally { if (parser != null) { parser.close(); } } } public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson) throws IOException { return convertToJson(data, offset, length, reformatJson, false); } public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson, boolean prettyPrint) throws IOException { XContentType xContentType = XContentFactory.xContentType(data, offset, length); if (xContentType == XContentType.JSON && !reformatJson) { return new String(data, offset, length, StandardCharsets.UTF_8); } XContentParser parser = null; try { parser = XContentFactory.xContent(xContentType).createParser(data, offset, length); parser.nextToken(); XContentBuilder builder = XContentFactory.jsonBuilder(); if (prettyPrint) { builder.prettyPrint(); } builder.copyCurrentStructure(parser); return builder.string(); } finally { if (parser != null) { parser.close(); } } } /** * Writes serialized toXContent to pretty-printed JSON string. * * @param toXContent object to be pretty printed * @return pretty-printed JSON serialization */ public static String toString(ToXContent toXContent) { return toString(toXContent, EMPTY_PARAMS); } /** * Writes serialized toXContent to pretty-printed JSON string. * * @param toXContent object to be pretty printed * @param params serialization parameters * @return pretty-printed JSON serialization */ public static String toString(ToXContent toXContent, Params params) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); if (params.paramAsBoolean("pretty", true)) { builder.prettyPrint(); } if (params.paramAsBoolean("human", true)) { builder.humanReadable(true); } builder.startObject(); toXContent.toXContent(builder, params); builder.endObject(); return builder.string(); } catch (IOException e) { try { XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); builder.startObject(); builder.field("error", e.getMessage()); builder.endObject(); return builder.string(); } catch (IOException e2) { throw new ElasticsearchException("cannot generate error message for deserialization", e); } } } /** * Updates the provided changes into the source. If the key exists in the changes, it overrides the one in source * unless both are Maps, in which case it recuersively updated it. * * @param source the original map to be updated * @param changes the changes to update into updated * @param checkUpdatesAreUnequal should this method check if updates to the same key (that are not both maps) are * unequal? This is just a .equals check on the objects, but that can take some time on long strings. * @return true if the source map was modified */ public static boolean update(Map<String, Object> source, Map<String, Object> changes, boolean checkUpdatesAreUnequal) { boolean modified = false; for (Map.Entry<String, Object> changesEntry : changes.entrySet()) { if (!source.containsKey(changesEntry.getKey())) { // safe to copy, change does not exist in source source.put(changesEntry.getKey(), changesEntry.getValue()); modified = true; continue; } Object old = source.get(changesEntry.getKey()); if (old instanceof Map && changesEntry.getValue() instanceof Map) { // recursive merge maps modified |= update((Map<String, Object>) source.get(changesEntry.getKey()), (Map<String, Object>) changesEntry.getValue(), checkUpdatesAreUnequal && !modified); continue; } // update the field source.put(changesEntry.getKey(), changesEntry.getValue()); if (modified) { continue; } if (!checkUpdatesAreUnequal) { modified = true; continue; } modified = !Objects.equals(old, changesEntry.getValue()); } return modified; } /** * Merges the defaults provided as the second parameter into the content of the first. Only does recursive merge * for inner maps. */ @SuppressWarnings({"unchecked"}) public static void mergeDefaults(Map<String, Object> content, Map<String, Object> defaults) { for (Map.Entry<String, Object> defaultEntry : defaults.entrySet()) { if (!content.containsKey(defaultEntry.getKey())) { // copy it over, it does not exists in the content content.put(defaultEntry.getKey(), defaultEntry.getValue()); } else { // in the content and in the default, only merge compound ones (maps) if (content.get(defaultEntry.getKey()) instanceof Map && defaultEntry.getValue() instanceof Map) { mergeDefaults((Map<String, Object>) content.get(defaultEntry.getKey()), (Map<String, Object>) defaultEntry.getValue()); } else if (content.get(defaultEntry.getKey()) instanceof List && defaultEntry.getValue() instanceof List) { List defaultList = (List) defaultEntry.getValue(); List contentList = (List) content.get(defaultEntry.getKey()); List mergedList = new ArrayList(); if (allListValuesAreMapsOfOne(defaultList) && allListValuesAreMapsOfOne(contentList)) { // all are in the form of [ {"key1" : {}}, {"key2" : {}} ], merge based on keys Map<String, Map<String, Object>> processed = new LinkedHashMap<>(); for (Object o : contentList) { Map<String, Object> map = (Map<String, Object>) o; Map.Entry<String, Object> entry = map.entrySet().iterator().next(); processed.put(entry.getKey(), map); } for (Object o : defaultList) { Map<String, Object> map = (Map<String, Object>) o; Map.Entry<String, Object> entry = map.entrySet().iterator().next(); if (processed.containsKey(entry.getKey())) { mergeDefaults(processed.get(entry.getKey()), map); } else { // put the default entries after the content ones. processed.put(entry.getKey(), map); } } for (Map<String, Object> map : processed.values()) { mergedList.add(map); } } else { // if both are lists, simply combine them, first the defaults, then the content // just make sure not to add the same value twice mergedList.addAll(defaultList); for (Object o : contentList) { if (!mergedList.contains(o)) { mergedList.add(o); } } } content.put(defaultEntry.getKey(), mergedList); } } } } private static boolean allListValuesAreMapsOfOne(List list) { for (Object o : list) { if (!(o instanceof Map)) { return false; } if (((Map) o).size() != 1) { return false; } } return true; } public static void copyCurrentStructure(XContentGenerator generator, XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); // Let's handle field-name separately first if (token == XContentParser.Token.FIELD_NAME) { generator.writeFieldName(parser.currentName()); token = parser.nextToken(); // fall-through to copy the associated value } switch (token) { case START_ARRAY: generator.writeStartArray(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { copyCurrentStructure(generator, parser); } generator.writeEndArray(); break; case START_OBJECT: generator.writeStartObject(); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { copyCurrentStructure(generator, parser); } generator.writeEndObject(); break; default: // others are simple: copyCurrentEvent(generator, parser); } } public static void copyCurrentEvent(XContentGenerator generator, XContentParser parser) throws IOException { switch (parser.currentToken()) { case START_OBJECT: generator.writeStartObject(); break; case END_OBJECT: generator.writeEndObject(); break; case START_ARRAY: generator.writeStartArray(); break; case END_ARRAY: generator.writeEndArray(); break; case FIELD_NAME: generator.writeFieldName(parser.currentName()); break; case VALUE_STRING: if (parser.hasTextCharacters()) { generator.writeString(parser.textCharacters(), parser.textOffset(), parser.textLength()); } else { generator.writeString(parser.text()); } break; case VALUE_NUMBER: switch (parser.numberType()) { case INT: generator.writeNumber(parser.intValue()); break; case LONG: generator.writeNumber(parser.longValue()); break; case FLOAT: generator.writeNumber(parser.floatValue()); break; case DOUBLE: generator.writeNumber(parser.doubleValue()); break; } break; case VALUE_BOOLEAN: generator.writeBoolean(parser.booleanValue()); break; case VALUE_NULL: generator.writeNull(); break; case VALUE_EMBEDDED_OBJECT: generator.writeBinary(parser.binaryValue()); } } /** * Directly writes the source to the output builder */ public static void writeDirect(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); if (compressedStreamInput.markSupported() == false) { compressedStreamInput = new BufferedInputStream(compressedStreamInput); } XContentType contentType = XContentFactory.xContentType(compressedStreamInput); if (contentType == rawBuilder.contentType()) { Streams.copy(compressedStreamInput, rawBuilder.stream()); } else { try (XContentParser parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput)) { parser.nextToken(); rawBuilder.copyCurrentStructure(parser); } } } else { XContentType contentType = XContentFactory.xContentType(source); if (contentType == rawBuilder.contentType()) { source.writeTo(rawBuilder.stream()); } else { try (XContentParser parser = XContentFactory.xContent(contentType).createParser(source)) { parser.nextToken(); rawBuilder.copyCurrentStructure(parser); } } } } /** * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using * {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference)}. */ public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); if (compressedStreamInput.markSupported() == false) { compressedStreamInput = new BufferedInputStream(compressedStreamInput); } XContentType contentType = XContentFactory.xContentType(compressedStreamInput); if (contentType == builder.contentType()) { builder.rawField(field, compressedStreamInput); } else { try (XContentParser parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput)) { parser.nextToken(); builder.field(field); builder.copyCurrentStructure(parser); } } } else { XContentType contentType = XContentFactory.xContentType(source); if (contentType == builder.contentType()) { builder.rawField(field, source); } else { try (XContentParser parser = XContentFactory.xContent(contentType).createParser(source)) { parser.nextToken(); builder.field(field); builder.copyCurrentStructure(parser); } } } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.devtools.build.lib.packages.Aspect.INJECTING_RULE_KIND_PARAMETER_KEY; import static com.google.devtools.build.lib.util.Preconditions.checkNotNull; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Predicates; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionEnvironment; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactOwner; import com.google.devtools.build.lib.actions.EmptyRunfilesSupplier; import com.google.devtools.build.lib.actions.ParameterFile; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.Root; import com.google.devtools.build.lib.actions.extra.ExtraActionInfo; import com.google.devtools.build.lib.actions.extra.JavaCompileInfo; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.CustomMultiArgv; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode; import com.google.devtools.build.lib.skyframe.AspectValue; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.util.StringCanonicalizer; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** Action that represents a Java compilation. */ @ThreadCompatible @Immutable public final class JavaCompileAction extends SpawnAction { private static final String JACOCO_INSTRUMENTATION_PROCESSOR = "jacoco"; private static final ResourceSet LOCAL_RESOURCES = ResourceSet.createWithRamCpuIo(750 /*MB*/, 0.5 /*CPU*/, 0.0 /*IO*/); /** Environment variable that sets the UTF-8 charset. */ static final ImmutableMap<String, String> UTF8_ENVIRONMENT = ImmutableMap.of("LC_CTYPE", "en_US.UTF-8"); // TODO(#3320): This is missing the configuration's action environment! static final ActionEnvironment UTF8_ACTION_ENVIRONMENT = ActionEnvironment.create(UTF8_ENVIRONMENT); private final CommandLine javaCompileCommandLine; private final CommandLine commandLine; /** * The directory in which generated classfiles are placed. * May be erased/created by the JavaBuilder. */ private final PathFragment classDirectory; private final Artifact outputJar; /** * The list of classpath entries to specify to javac. */ private final NestedSet<Artifact> classpathEntries; /** The list of bootclasspath entries to specify to javac. */ private final ImmutableList<Artifact> bootclasspathEntries; /** The list of sourcepath entries to specify to javac. */ private final ImmutableList<Artifact> sourcePathEntries; /** * The path to the extdir to specify to javac. */ private final ImmutableList<Artifact> extdirInputs; /** The list of classpath entries to search for annotation processors. */ private final NestedSet<Artifact> processorPath; /** * The list of annotation processor classes to run. */ private final ImmutableList<String> processorNames; /** Set of additional Java source files to compile. */ private final ImmutableList<Artifact> sourceJars; /** The set of explicit Java source files to compile. */ private final ImmutableSet<Artifact> sourceFiles; /** * The compiler options to pass to javac. */ private final ImmutableList<String> javacOpts; /** The subset of classpath jars provided by direct dependencies. */ private final NestedSet<Artifact> directJars; /** * The level of strict dependency checks (off, warnings, or errors). */ private final BuildConfiguration.StrictDepsMode strictJavaDeps; /** The set of .jdeps artifacts provided by direct dependencies. */ private final NestedSet<Artifact> compileTimeDependencyArtifacts; /** * Constructs an action to compile a set of Java source files to class files. * * @param owner the action owner, typically a java_* RuleConfiguredTarget. * @param tools the tools used by the action * @param inputs the inputs of the action * @param outputs the outputs of the action * @param javaCompileCommandLine the command line for the java library builder - it's actually * written to the parameter file, but other parts (for example, ide_build_info) need access to * the data * @param commandLine the actual invocation command line * @param classDirectory the directory in which generated classfiles are placed * @param outputJar the jar file the compilation outputs will be written to * @param classpathEntries the compile-time classpath entries * @param bootclasspathEntries the compile-time bootclasspath entries * @param extdirInputs the compile-time extclasspath entries * @param processorPath the classpath to search for annotation processors * @param processorNames the annotation processors to run * @param sourceJars jars of sources to compile * @param sourceFiles source files to compile * @param javacOpts the javac options for the compilation * @param directJars the subset of classpath jars provided by direct dependencies * @param executionInfo the execution info * @param strictJavaDeps the Strict Java Deps mode * @param compileTimeDependencyArtifacts the jdeps files for direct dependencies * @param progressMessage the progress message */ private JavaCompileAction( ActionOwner owner, NestedSet<Artifact> tools, NestedSet<Artifact> inputs, Collection<Artifact> outputs, CommandLine javaCompileCommandLine, CommandLine commandLine, PathFragment classDirectory, Artifact outputJar, NestedSet<Artifact> classpathEntries, ImmutableList<Artifact> bootclasspathEntries, ImmutableList<Artifact> sourcePathEntries, ImmutableList<Artifact> extdirInputs, NestedSet<Artifact> processorPath, List<String> processorNames, Collection<Artifact> sourceJars, ImmutableSet<Artifact> sourceFiles, List<String> javacOpts, NestedSet<Artifact> directJars, Map<String, String> executionInfo, StrictDepsMode strictJavaDeps, NestedSet<Artifact> compileTimeDependencyArtifacts, String progressMessage) { super( owner, tools, inputs, outputs, LOCAL_RESOURCES, commandLine, false, // TODO(#3320): This is missing the configuration's action environment! UTF8_ACTION_ENVIRONMENT, ImmutableMap.copyOf(executionInfo), progressMessage, EmptyRunfilesSupplier.INSTANCE, "Javac", false /*executeUnconditionally*/, null /*extraActionInfoSupplier*/); this.javaCompileCommandLine = javaCompileCommandLine; this.commandLine = commandLine; this.classDirectory = checkNotNull(classDirectory); this.outputJar = outputJar; this.classpathEntries = classpathEntries; this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries); this.sourcePathEntries = ImmutableList.copyOf(sourcePathEntries); this.extdirInputs = extdirInputs; this.processorPath = processorPath; this.processorNames = ImmutableList.copyOf(processorNames); this.sourceJars = ImmutableList.copyOf(sourceJars); this.sourceFiles = sourceFiles; this.javacOpts = ImmutableList.copyOf(javacOpts); this.directJars = checkNotNull(directJars, "directJars must not be null"); this.strictJavaDeps = strictJavaDeps; this.compileTimeDependencyArtifacts = compileTimeDependencyArtifacts; } /** Returns the given (passed to constructor) source files. */ @VisibleForTesting ImmutableSet<Artifact> getSourceFiles() { return sourceFiles; } /** * Returns the list of paths that represents the classpath. */ @VisibleForTesting public Iterable<Artifact> getClasspath() { return classpathEntries; } /** Returns the list of paths that represents the bootclasspath. */ @VisibleForTesting Collection<Artifact> getBootclasspath() { return bootclasspathEntries; } /** Returns the list of paths that represents the sourcepath. */ @VisibleForTesting public Collection<Artifact> getSourcePathEntries() { return sourcePathEntries; } /** * Returns the path to the extdir. */ @VisibleForTesting public Collection<Artifact> getExtdir() { return extdirInputs; } /** * Returns the list of paths that represents the source jars. */ @VisibleForTesting public Collection<Artifact> getSourceJars() { return sourceJars; } /** Returns the list of paths that represents the processor path. */ @VisibleForTesting public NestedSet<Artifact> getProcessorpath() { return processorPath; } @VisibleForTesting public List<String> getJavacOpts() { return javacOpts; } @VisibleForTesting public NestedSet<Artifact> getDirectJars() { return directJars; } @VisibleForTesting public NestedSet<Artifact> getCompileTimeDependencyArtifacts() { return compileTimeDependencyArtifacts; } @VisibleForTesting public BuildConfiguration.StrictDepsMode getStrictJavaDepsMode() { return strictJavaDeps; } public PathFragment getClassDirectory() { return classDirectory; } /** * Returns the list of class names of processors that should * be run. */ @VisibleForTesting public List<String> getProcessorNames() { return processorNames; } /** * Returns the output jar artifact that gets generated by archiving the results of the Java * compilation. */ public Artifact getOutputJar() { return outputJar; } @Override public Artifact getPrimaryOutput() { return getOutputJar(); } /** * Constructs a command line that can be used to invoke the * JavaBuilder. * * <p>Do not use this method, except for testing (and for the in-process * strategy). */ @VisibleForTesting public Iterable<String> buildCommandLine() { return javaCompileCommandLine.arguments(); } /** Returns the command and arguments for a java compile action. */ public List<String> getCommand() { return ImmutableList.copyOf(commandLine.arguments()); } @Override public String toString() { StringBuilder result = new StringBuilder(); result.append("JavaBuilder "); Joiner.on(' ').appendTo(result, commandLine.arguments()); return result.toString(); } @Override public ExtraActionInfo.Builder getExtraActionInfo() { JavaCompileInfo.Builder info = JavaCompileInfo.newBuilder(); info.addAllSourceFile(Artifact.toExecPaths(getSourceFiles())); info.addAllClasspath(Artifact.toExecPaths(getClasspath())); info.addAllBootclasspath(Artifact.toExecPaths(getBootclasspath())); info.addAllSourcepath(Artifact.toExecPaths(getSourceJars())); info.addAllJavacOpt(getJavacOpts()); info.addAllProcessor(getProcessorNames()); info.addAllProcessorpath(Artifact.toExecPaths(getProcessorpath())); info.setOutputjar(getOutputJar().getExecPathString()); return super.getExtraActionInfo() .setExtension(JavaCompileInfo.javaCompileInfo, info.build()); } /** * Builds the list of mappings between jars on the classpath and their originating targets names. */ @VisibleForTesting static class JarsToTargetsArgv extends CustomMultiArgv { private final Iterable<Artifact> classpath; private final NestedSet<Artifact> directJars; @VisibleForTesting JarsToTargetsArgv(Iterable<Artifact> classpath, NestedSet<Artifact> directJars) { this.classpath = classpath; this.directJars = directJars; } @Override public Iterable<String> argv() { Set<Artifact> directJarSet = directJars.toSet(); ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Artifact jar : classpath) { builder.add(directJarSet.contains(jar) ? "--direct_dependency" : "--indirect_dependency"); builder.add(jar.getExecPathString()); builder.add(getArtifactOwnerGeneralizedLabel(jar)); } return builder.build(); } private String getArtifactOwnerGeneralizedLabel(Artifact artifact) { ArtifactOwner owner = checkNotNull(artifact.getArtifactOwner(), artifact); StringBuilder result = new StringBuilder(); Label label = owner.getLabel(); result.append( label.getPackageIdentifier().getRepository().isDefault() || label.getPackageIdentifier().getRepository().isMain() ? label.toString() // Escape '@' prefix for .params file. : "@" + label); if (owner instanceof AspectValue.AspectKey) { AspectValue.AspectKey aspectOwner = (AspectValue.AspectKey) owner; ImmutableCollection<String> injectingRuleKind = aspectOwner.getParameters().getAttribute(INJECTING_RULE_KIND_PARAMETER_KEY); if (injectingRuleKind.size() == 1) { result.append(' ').append(getOnlyElement(injectingRuleKind)); } } return result.toString(); } } /** Creates an ArgvFragment containing the common initial command line arguments */ private static CustomMultiArgv spawnCommandLineBase( final PathFragment javaExecutable, final Artifact javaBuilderJar, final ImmutableList<Artifact> instrumentationJars, final ImmutableList<String> javaBuilderJvmFlags, final String javaBuilderMainClass, final String pathDelimiter) { return new CustomMultiArgv() { @Override public Iterable<String> argv() { checkNotNull(javaBuilderJar); CustomCommandLine.Builder builder = CustomCommandLine.builder().addPath(javaExecutable).add(javaBuilderJvmFlags); if (!instrumentationJars.isEmpty()) { builder .addJoinExecPaths( "-cp", pathDelimiter, Iterables.concat(instrumentationJars, ImmutableList.of(javaBuilderJar))) .add(javaBuilderMainClass); } else { // If there are no instrumentation jars, use simpler '-jar' option to launch JavaBuilder. builder.addExecPath("-jar", javaBuilderJar); } return builder.build().arguments(); } }; } /** * Tells {@link Builder} how to create new artifacts. Is there so that {@link Builder} can be * exercised in tests without creating a full {@link RuleContext}. */ public interface ArtifactFactory { /** * Create an artifact with the specified root-relative path under the specified root. */ Artifact create(PathFragment rootRelativePath, Root root); } @VisibleForTesting static ArtifactFactory createArtifactFactory(final AnalysisEnvironment env) { return new ArtifactFactory() { @Override public Artifact create(PathFragment rootRelativePath, Root root) { return env.getDerivedArtifact(rootRelativePath, root); } }; } /** * Builder class to construct Java compile actions. */ public static class Builder { private final ActionOwner owner; private final AnalysisEnvironment analysisEnvironment; private final ArtifactFactory artifactFactory; private final BuildConfiguration configuration; private final JavaSemantics semantics; private PathFragment javaExecutable; private List<Artifact> javabaseInputs = ImmutableList.of(); private Artifact outputJar; private Artifact gensrcOutputJar; private Artifact manifestProtoOutput; private Artifact outputDepsProto; private Collection<Artifact> additionalOutputs; private Artifact paramFile; private Artifact metadata; private ImmutableSet<Artifact> sourceFiles = ImmutableSet.of(); private final Collection<Artifact> sourceJars = new ArrayList<>(); private BuildConfiguration.StrictDepsMode strictJavaDeps = BuildConfiguration.StrictDepsMode.OFF; private NestedSet<Artifact> directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private NestedSet<Artifact> compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private List<String> javacOpts = new ArrayList<>(); private ImmutableList<String> javacJvmOpts = ImmutableList.of(); private ImmutableMap<String, String> executionInfo = ImmutableMap.of(); private boolean compressJar; private NestedSet<Artifact> classpathEntries = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableList<Artifact> bootclasspathEntries = ImmutableList.of(); private ImmutableList<Artifact> sourcePathEntries = ImmutableList.of(); private ImmutableList<Artifact> extdirInputs = ImmutableList.of(); private Artifact javaBuilderJar; private Artifact langtoolsJar; private NestedSet<Artifact> toolsJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableList<Artifact> instrumentationJars = ImmutableList.of(); private PathFragment sourceGenDirectory; private PathFragment tempDirectory; private PathFragment classDirectory; private NestedSet<Artifact> processorPath = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private final List<String> processorNames = new ArrayList<>(); /** The list of custom javac flags to pass to annotation processors. */ private final List<String> processorFlags = new ArrayList<>(); private String ruleKind; private Label targetLabel; private boolean testOnly = false; /** * Creates a Builder from an owner and a build configuration. */ public Builder(ActionOwner owner, AnalysisEnvironment analysisEnvironment, ArtifactFactory artifactFactory, BuildConfiguration configuration, JavaSemantics semantics) { this.owner = owner; this.analysisEnvironment = analysisEnvironment; this.artifactFactory = artifactFactory; this.configuration = configuration; this.semantics = semantics; } /** * Creates a Builder from an owner and a build configuration. */ public Builder(final RuleContext ruleContext, JavaSemantics semantics) { this(ruleContext.getActionOwner(), ruleContext.getAnalysisEnvironment(), new ArtifactFactory() { @Override public Artifact create(PathFragment rootRelativePath, Root root) { return ruleContext.getDerivedArtifact(rootRelativePath, root); } }, ruleContext.getConfiguration(), semantics); } public JavaCompileAction build() { // TODO(bazel-team): all the params should be calculated before getting here, and the various // aggregation code below should go away. final String pathSeparator = configuration.getHostPathSeparator(); final List<String> internedJcopts = new ArrayList<>(); for (String jcopt : javacOpts) { internedJcopts.add(StringCanonicalizer.intern(jcopt)); } // Invariant: if strictJavaDeps is OFF, then directJars and // dependencyArtifacts are ignored if (strictJavaDeps == BuildConfiguration.StrictDepsMode.OFF) { directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); } // Invariant: if java_classpath is set to 'off', dependencyArtifacts are ignored JavaConfiguration javaConfiguration = configuration.getFragment(JavaConfiguration.class); if (javaConfiguration.getReduceJavaClasspath() == JavaClasspathMode.OFF) { compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); } if (paramFile == null) { paramFile = artifactFactory.create( ParameterFile.derivePath(outputJar.getRootRelativePath()), configuration.getBinDirectory(targetLabel.getPackageIdentifier().getRepository())); } Preconditions.checkState(javaExecutable != null, owner); ImmutableList.Builder<Artifact> outputsBuilder = ImmutableList.<Artifact>builder() .addAll( new ArrayList<>(Collections2.filter(Arrays.asList( outputJar, metadata, gensrcOutputJar, manifestProtoOutput, outputDepsProto), Predicates.notNull()))); if (additionalOutputs != null) { outputsBuilder.addAll(additionalOutputs); } ImmutableList<Artifact> outputs = outputsBuilder.build(); CustomCommandLine paramFileContents = buildParamFileContents(internedJcopts); Action parameterFileWriteAction = new ParameterFileWriteAction(owner, paramFile, paramFileContents, ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1); analysisEnvironment.registerAction(parameterFileWriteAction); CustomMultiArgv spawnCommandLineBase = spawnCommandLineBase( javaExecutable, javaBuilderJar, instrumentationJars, javacJvmOpts, semantics.getJavaBuilderMainClass(), pathSeparator); // The actual params-file-based command line executed for a compile action. CommandLine javaBuilderCommandLine = CustomCommandLine.builder() .add(spawnCommandLineBase) .addPaths("@%s", paramFile.getExecPath()) .build(); NestedSet<Artifact> tools = NestedSetBuilder.<Artifact>stableOrder() .add(langtoolsJar) .addTransitive(toolsJars) .add(javaBuilderJar) .addAll(instrumentationJars) .build(); NestedSet<Artifact> inputs = NestedSetBuilder.<Artifact>stableOrder() .addTransitive(classpathEntries) .addTransitive(compileTimeDependencyArtifacts) .addTransitive(processorPath) .addAll(sourceJars) .addAll(sourceFiles) .addAll(javabaseInputs) .addAll(bootclasspathEntries) .addAll(sourcePathEntries) .addAll(extdirInputs) .add(paramFile) .addTransitive(tools) .build(); return new JavaCompileAction( owner, tools, inputs, outputs, paramFileContents, javaBuilderCommandLine, classDirectory, outputJar, classpathEntries, bootclasspathEntries, sourcePathEntries, extdirInputs, processorPath, processorNames, sourceJars, sourceFiles, internedJcopts, directJars, executionInfo, strictJavaDeps, compileTimeDependencyArtifacts, buildProgressMessage()); } private CustomCommandLine buildParamFileContents(Collection<String> javacOpts) { checkNotNull(classDirectory, "classDirectory should not be null"); checkNotNull(tempDirectory, "tempDirectory should not be null"); CustomCommandLine.Builder result = CustomCommandLine.builder(); result.add("--classdir").addPath(classDirectory); result.add("--tempdir").addPath(tempDirectory); if (outputJar != null) { result.addExecPath("--output", outputJar); } if (sourceGenDirectory != null) { result.add("--sourcegendir").addPath(sourceGenDirectory); } if (gensrcOutputJar != null) { result.addExecPath("--generated_sources_output", gensrcOutputJar); } if (manifestProtoOutput != null) { result.addExecPath("--output_manifest_proto", manifestProtoOutput); } if (compressJar) { result.add("--compress_jar"); } if (outputDepsProto != null) { result.addExecPath("--output_deps_proto", outputDepsProto); } if (!extdirInputs.isEmpty()) { result.addExecPaths("--extclasspath", extdirInputs); } if (!bootclasspathEntries.isEmpty()) { result.addExecPaths("--bootclasspath", bootclasspathEntries); } if (!sourcePathEntries.isEmpty()) { result.addExecPaths("--sourcepath", sourcePathEntries); } if (!processorPath.isEmpty()) { result.addExecPaths("--processorpath", processorPath); } if (!processorNames.isEmpty()) { result.add("--processors", processorNames); } if (!processorFlags.isEmpty()) { result.add("--javacopts", processorFlags); } if (!sourceJars.isEmpty()) { result.addExecPaths("--source_jars", sourceJars); } if (!sourceFiles.isEmpty()) { result.addExecPaths("--sources", sourceFiles); } if (!javacOpts.isEmpty()) { result.add("--javacopts", javacOpts); } if (ruleKind != null) { result.add("--rule_kind"); result.add(ruleKind); } if (targetLabel != null) { result.add("--target_label"); if (targetLabel.getPackageIdentifier().getRepository().isDefault() || targetLabel.getPackageIdentifier().getRepository().isMain()) { result.add(targetLabel.toString()); } else { // @-prefixed strings will be assumed to be filenames and expanded by // {@link JavaLibraryBuildRequest}, so add an extra &at; to escape it. result.add("@" + targetLabel); } } if (testOnly) { result.add("--testonly"); } if (!classpathEntries.isEmpty()) { result.addExecPaths("--classpath", classpathEntries); } // strict_java_deps controls whether the mapping from jars to targets is // written out and whether we try to minimize the compile-time classpath. if (strictJavaDeps != BuildConfiguration.StrictDepsMode.OFF) { result.add("--strict_java_deps"); result.add(strictJavaDeps.toString()); result.add(new JarsToTargetsArgv(classpathEntries, directJars)); if (configuration.getFragment(JavaConfiguration.class).getReduceJavaClasspath() == JavaClasspathMode.JAVABUILDER) { result.add("--reduce_classpath"); if (!compileTimeDependencyArtifacts.isEmpty()) { result.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts); } } } if (metadata != null) { result.add("--post_processor"); result.addExecPath(JACOCO_INSTRUMENTATION_PROCESSOR, metadata); result.addPath( configuration .getCoverageMetadataDirectory(targetLabel.getPackageIdentifier().getRepository()) .getExecPath()); result.add("-*Test"); result.add("-*TestCase"); } return result.build(); } private String buildProgressMessage() { StringBuilder sb = new StringBuilder("Building "); sb.append(outputJar.prettyPrint()); sb.append(" ("); boolean first = true; first = appendCount(sb, first, sourceFiles.size(), "source file"); first = appendCount(sb, first, sourceJars.size(), "source jar"); sb.append(")"); addProcessorNames(sb); return sb.toString(); } private void addProcessorNames(StringBuilder sb) { if (processorNames.isEmpty()) { return; } List<String> shortNames = new ArrayList<>(); for (String name : processorNames) { // Annotation processor names are qualified class names. Omit the package part for the // progress message, e.g. `com.google.Foo` -> `Foo`. int idx = name.lastIndexOf('.'); String shortName = idx != -1 ? name.substring(idx + 1) : name; shortNames.add(shortName); } sb.append(" and running annotation processors ("); Joiner.on(", ").appendTo(sb, shortNames); sb.append(")"); return; } /** * Append an input count to the progress message, e.g. "2 source jars". If an input count has * already been appended, prefix with ", ". */ private static boolean appendCount(StringBuilder sb, boolean first, int count, String name) { if (count > 0) { if (!first) { sb.append(", "); } else { first = false; } sb.append(count).append(' ').append(name); if (count > 1) { sb.append('s'); } } return first; } public Builder setParameterFile(Artifact paramFile) { this.paramFile = paramFile; return this; } public Builder setJavaExecutable(PathFragment javaExecutable) { this.javaExecutable = javaExecutable; return this; } public Builder setJavaBaseInputs(Iterable<Artifact> javabaseInputs) { this.javabaseInputs = ImmutableList.copyOf(javabaseInputs); return this; } public Builder setOutputJar(Artifact outputJar) { this.outputJar = outputJar; return this; } public Builder setGensrcOutputJar(Artifact gensrcOutputJar) { this.gensrcOutputJar = gensrcOutputJar; return this; } public Builder setManifestProtoOutput(Artifact manifestProtoOutput) { this.manifestProtoOutput = manifestProtoOutput; return this; } public Builder setOutputDepsProto(Artifact outputDepsProto) { this.outputDepsProto = outputDepsProto; return this; } public Builder setAdditionalOutputs(Collection<Artifact> outputs) { this.additionalOutputs = outputs; return this; } public Builder setMetadata(Artifact metadata) { this.metadata = metadata; return this; } public Builder setSourceFiles(ImmutableSet<Artifact> sourceFiles) { this.sourceFiles = sourceFiles; return this; } public Builder addSourceJars(Collection<Artifact> sourceJars) { this.sourceJars.addAll(sourceJars); return this; } /** * Sets the strictness of Java dependency checking, see {@link * com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode}. */ public Builder setStrictJavaDeps(BuildConfiguration.StrictDepsMode strictDeps) { strictJavaDeps = strictDeps; return this; } /** Accumulates the given jar artifacts as being provided by direct dependencies. */ public Builder setDirectJars(NestedSet<Artifact> directJars) { this.directJars = checkNotNull(directJars, "directJars must not be null"); return this; } public Builder setCompileTimeDependencyArtifacts(NestedSet<Artifact> dependencyArtifacts) { checkNotNull(compileTimeDependencyArtifacts, "dependencyArtifacts must not be null"); this.compileTimeDependencyArtifacts = dependencyArtifacts; return this; } public Builder setJavacOpts(Iterable<String> copts) { this.javacOpts = ImmutableList.copyOf(copts); return this; } public Builder setJavacJvmOpts(ImmutableList<String> opts) { this.javacJvmOpts = opts; return this; } public Builder setJavacExecutionInfo(ImmutableMap<String, String> executionInfo) { this.executionInfo = executionInfo; return this; } public Builder setCompressJar(boolean compressJar) { this.compressJar = compressJar; return this; } public Builder setClasspathEntries(NestedSet<Artifact> classpathEntries) { this.classpathEntries = classpathEntries; return this; } public Builder setBootclasspathEntries(Iterable<Artifact> bootclasspathEntries) { this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries); return this; } public Builder setSourcePathEntries(Iterable<Artifact> sourcePathEntries) { this.sourcePathEntries = ImmutableList.copyOf(sourcePathEntries); return this; } public Builder setExtdirInputs(Iterable<Artifact> extdirEntries) { this.extdirInputs = ImmutableList.copyOf(extdirEntries); return this; } /** * Sets the directory where source files generated by annotation processors should be stored. */ public Builder setSourceGenDirectory(PathFragment sourceGenDirectory) { this.sourceGenDirectory = sourceGenDirectory; return this; } public Builder setTempDirectory(PathFragment tempDirectory) { this.tempDirectory = tempDirectory; return this; } public Builder setClassDirectory(PathFragment classDirectory) { this.classDirectory = classDirectory; return this; } public Builder setProcessorPaths(NestedSet<Artifact> processorPaths) { this.processorPath = processorPaths; return this; } public Builder addProcessorNames(Collection<String> processorNames) { this.processorNames.addAll(processorNames); return this; } public Builder addProcessorFlags(Collection<String> processorFlags) { this.processorFlags.addAll(processorFlags); return this; } public Builder setLangtoolsJar(Artifact langtoolsJar) { this.langtoolsJar = langtoolsJar; return this; } /** Sets the tools jars. */ public Builder setToolsJars(NestedSet<Artifact> toolsJars) { checkNotNull(toolsJars, "toolsJars must not be null"); this.toolsJars = toolsJars; return this; } public Builder setJavaBuilderJar(Artifact javaBuilderJar) { this.javaBuilderJar = javaBuilderJar; return this; } public Builder setInstrumentationJars(Iterable<Artifact> instrumentationJars) { this.instrumentationJars = ImmutableList.copyOf(instrumentationJars); return this; } public Builder setRuleKind(String ruleKind) { this.ruleKind = ruleKind; return this; } public Builder setTargetLabel(Label targetLabel) { this.targetLabel = targetLabel; return this; } public Builder setTestOnly(boolean testOnly) { this.testOnly = testOnly; return this; } } }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.appenders.kafka.appender; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; import org.kaaproject.kaa.common.avro.GenericAvroConverter; import org.kaaproject.kaa.common.dto.logs.LogAppenderDto; import org.kaaproject.kaa.server.appenders.kafka.config.gen.KafkaConfig; import org.kaaproject.kaa.server.common.log.shared.appender.AbstractLogAppender; import org.kaaproject.kaa.server.common.log.shared.appender.LogDeliveryCallback; import org.kaaproject.kaa.server.common.log.shared.appender.LogEvent; import org.kaaproject.kaa.server.common.log.shared.appender.LogEventPack; import org.kaaproject.kaa.server.common.log.shared.avro.gen.RecordHeader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; public class KafkaLogAppender extends AbstractLogAppender<KafkaConfig> { private static final Logger LOG = LoggerFactory.getLogger(KafkaLogAppender.class); private static final int MAX_CALLBACK_THREAD_POOL_SIZE = 10; private ExecutorService executor; private ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); private AtomicInteger kafkaSuccessLogCount = new AtomicInteger(); private AtomicInteger kafkaFailureLogCount = new AtomicInteger(); private AtomicInteger inputLogCount = new AtomicInteger(); private LogEventDao logEventDao; private String topicName; private boolean closed = false; private ThreadLocal<Map<String, GenericAvroConverter<GenericRecord>>> converters = new ThreadLocal<Map<String, GenericAvroConverter<GenericRecord>>>() { @Override protected Map<String, GenericAvroConverter<GenericRecord>> initialValue() { return new HashMap<String, GenericAvroConverter<GenericRecord>>(); } }; /** * Instantiates a new KafkaLogAppender. */ public KafkaLogAppender() { super(KafkaConfig.class); scheduler.scheduleWithFixedDelay(new Runnable() { @Override public void run() { long second = System.currentTimeMillis() / 1000; LOG.info( "[{}] Received {} log record count, {} success kafka callbacks, {} failure kafka callbacks / second.", second, inputLogCount.getAndSet(0), kafkaSuccessLogCount.getAndSet(0), kafkaFailureLogCount.getAndSet(0)); } }, 0L, 1L, TimeUnit.SECONDS); } @Override public void close() { LOG.info("Try to stop kafka log appender..."); if (!closed) { closed = true; if (logEventDao != null) { logEventDao.close(); } if (executor != null) { executor.shutdownNow(); } if (scheduler != null) { scheduler.shutdownNow(); } } LOG.info("Kafka log appender stoped."); } @Override public void doAppend(final LogEventPack logEventPack, final RecordHeader header, final LogDeliveryCallback listener) { if (!closed) { executor.submit(new Runnable() { @Override public void run() { try { LOG.debug("[{}] appending {} logs to kafka collection", topicName, logEventPack.getEvents() .size()); GenericAvroConverter<GenericRecord> eventConverter = getConverter(logEventPack.getLogSchema() .getSchema()); GenericAvroConverter<GenericRecord> headerConverter = getConverter(header.getSchema() .toString()); List<KafkaLogEventDto> dtoList = generateKafkaLogEvent(logEventPack, header, eventConverter); LOG.debug("[{}] saving {} objects", topicName, dtoList.size()); if (!dtoList.isEmpty()) { int logCount = dtoList.size(); inputLogCount.getAndAdd(logCount); logEventDao.save(dtoList, eventConverter, headerConverter, new LogAppenderCallback( listener, kafkaSuccessLogCount, kafkaFailureLogCount)); LOG.debug("[{}] appended {} logs to kafka collection", topicName, logEventPack.getEvents() .size()); } else { listener.onInternalError(); } } catch (Exception ex) { LOG.warn("Got exception. Can't process log events", ex); listener.onInternalError(); } } }); } else { LOG.info("Attempted to append to closed appender named [{}].", getName()); listener.onConnectionError(); } } @Override protected void initFromConfiguration(LogAppenderDto appender, KafkaConfig configuration) { LOG.info("Initializing new appender instance using {}", configuration); try { logEventDao = new KafkaLogEventDao(configuration); int executorPoolSize = Math.min(configuration.getExecutorThreadPoolSize(), MAX_CALLBACK_THREAD_POOL_SIZE); executor = Executors.newFixedThreadPool(executorPoolSize); topicName = configuration.getTopic(); LOG.info("Kafka log appender initialized"); } catch (Exception ex) { LOG.error("Failed to init kafka log appender: ", ex); } } protected List<KafkaLogEventDto> generateKafkaLogEvent(LogEventPack logEventPack, RecordHeader header, GenericAvroConverter<GenericRecord> eventConverter) throws IOException { LOG.debug("Generate LogEventDto objects from LogEventPack [{}] and header [{}]", logEventPack, header); List<KafkaLogEventDto> events = new ArrayList<>(logEventPack.getEvents().size()); try { for (LogEvent logEvent : logEventPack.getEvents()) { LOG.debug("Convert log events [{}] to dto objects.", logEvent); if (logEvent == null | logEvent.getLogData() == null) { continue; } LOG.trace("Avro record converter [{}] with log data [{}]", eventConverter, logEvent.getLogData()); GenericRecord decodedLog = eventConverter.decodeBinary(logEvent.getLogData()); events.add(new KafkaLogEventDto(header, decodedLog)); } } catch (IOException ex) { LOG.error("Unexpected IOException while decoding LogEvents", ex); throw ex; } return events; } /** * Gets the converter. * * @param schema the schema * @return the converter */ private GenericAvroConverter<GenericRecord> getConverter(String schema) { LOG.trace("Get converter for schema [{}]", schema); Map<String, GenericAvroConverter<GenericRecord>> converterMap = converters.get(); GenericAvroConverter<GenericRecord> genAvroConverter = converterMap.get(schema); if (genAvroConverter == null) { LOG.trace("Create new converter for schema [{}]", schema); genAvroConverter = new GenericAvroConverter<GenericRecord>(schema); converterMap.put(schema, genAvroConverter); converters.set(converterMap); } LOG.trace("Get converter [{}] from map.", genAvroConverter); return genAvroConverter; } private static final class LogAppenderCallback implements Callback { private final LogDeliveryCallback callback; private final AtomicInteger kafkaSuccessLogCount; private final AtomicInteger kafkaFailureLogCount; private final int size; private LogAppenderCallback(LogDeliveryCallback callback, AtomicInteger kafkaSuccessLogCount, AtomicInteger kafkaFailureLogCount) { this.callback = callback; this.kafkaSuccessLogCount = kafkaSuccessLogCount; this.kafkaFailureLogCount = kafkaFailureLogCount; this.size = 1; } @Override public void onCompletion(RecordMetadata record, Exception ex) { if (ex == null) { kafkaSuccessLogCount.getAndAdd(size); callback.onSuccess(); } else { kafkaFailureLogCount.getAndAdd(size); LOG.warn("Failed to store record", ex); if (ex instanceof IOException) { callback.onConnectionError(); } else { callback.onInternalError(); } } } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment; import com.fasterxml.jackson.annotation.JsonProperty; import io.druid.data.input.impl.TimestampSpec; import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; /** */ @PublicApi public class Metadata { // container is used for arbitrary key-value pairs in segment metadata e.g. // kafka firehose uses it to store commit offset private final Map<String, Object> container; @Nullable private final AggregatorFactory[] aggregators; @Nullable private final TimestampSpec timestampSpec; @Nullable private final Granularity queryGranularity; @Nullable private final Boolean rollup; public Metadata( @JsonProperty("container") @Nullable Map<String, Object> container, @JsonProperty("aggregators") @Nullable AggregatorFactory[] aggregators, @JsonProperty("timestampSpec") @Nullable TimestampSpec timestampSpec, @JsonProperty("queryGranularity") @Nullable Granularity queryGranularity, @JsonProperty("rollup") @Nullable Boolean rollup ) { this.container = container == null ? new ConcurrentHashMap<>() : container; this.aggregators = aggregators; this.timestampSpec = timestampSpec; this.queryGranularity = queryGranularity; this.rollup = rollup; } @JsonProperty public Map<String, Object> getContainer() { return container; } @JsonProperty @Nullable public AggregatorFactory[] getAggregators() { return aggregators; } @JsonProperty @Nullable public TimestampSpec getTimestampSpec() { return timestampSpec; } @JsonProperty @Nullable public Granularity getQueryGranularity() { return queryGranularity; } @JsonProperty @Nullable public Boolean isRollup() { return rollup; } public Metadata putAll(@Nullable Map<String, Object> other) { if (other != null) { container.putAll(other); } return this; } public Object get(String key) { return container.get(key); } public Metadata put(String key, @Nullable Object value) { if (value != null) { container.put(key, value); } return this; } // arbitrary key-value pairs from the metadata just follow the semantics of last one wins if same // key exists in multiple input Metadata containers // for others e.g. Aggregators, appropriate merging is done @Nullable public static Metadata merge( @Nullable List<Metadata> toBeMerged, @Nullable AggregatorFactory[] overrideMergedAggregators ) { if (toBeMerged == null || toBeMerged.size() == 0) { return null; } boolean foundSomeMetadata = false; Map<String, Object> mergedContainer = new HashMap<>(); List<AggregatorFactory[]> aggregatorsToMerge = overrideMergedAggregators == null ? new ArrayList<>() : null; List<TimestampSpec> timestampSpecsToMerge = new ArrayList<>(); List<Granularity> gransToMerge = new ArrayList<>(); List<Boolean> rollupToMerge = new ArrayList<>(); for (Metadata metadata : toBeMerged) { if (metadata != null) { foundSomeMetadata = true; if (aggregatorsToMerge != null) { aggregatorsToMerge.add(metadata.getAggregators()); } if (timestampSpecsToMerge != null && metadata.getTimestampSpec() != null) { timestampSpecsToMerge.add(metadata.getTimestampSpec()); } if (gransToMerge != null) { gransToMerge.add(metadata.getQueryGranularity()); } if (rollupToMerge != null) { rollupToMerge.add(metadata.isRollup()); } mergedContainer.putAll(metadata.container); } else { //if metadata and hence aggregators and queryGranularity for some segment being merged are unknown then //final merged segment should not have same in metadata aggregatorsToMerge = null; timestampSpecsToMerge = null; gransToMerge = null; rollupToMerge = null; } } if (!foundSomeMetadata) { return null; } final AggregatorFactory[] mergedAggregators = aggregatorsToMerge == null ? overrideMergedAggregators : AggregatorFactory.mergeAggregators(aggregatorsToMerge); final TimestampSpec mergedTimestampSpec = timestampSpecsToMerge == null ? null : TimestampSpec.mergeTimestampSpec(timestampSpecsToMerge); final Granularity mergedGranularity = gransToMerge == null ? null : Granularity.mergeGranularities(gransToMerge); Boolean rollup = null; if (rollupToMerge != null && !rollupToMerge.isEmpty()) { rollup = rollupToMerge.get(0); for (Boolean r : rollupToMerge) { if (r == null) { rollup = null; break; } else if (!r.equals(rollup)) { rollup = null; break; } else { rollup = r; } } } return new Metadata( mergedContainer, mergedAggregators, mergedTimestampSpec, mergedGranularity, rollup ); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final Metadata metadata = (Metadata) o; return Objects.equals(container, metadata.container) && Arrays.equals(aggregators, metadata.aggregators) && Objects.equals(timestampSpec, metadata.timestampSpec) && Objects.equals(queryGranularity, metadata.queryGranularity) && Objects.equals(rollup, metadata.rollup); } @Override public int hashCode() { return Objects.hash(container, Arrays.hashCode(aggregators), timestampSpec, queryGranularity, rollup); } @Override public String toString() { return "Metadata{" + "container=" + container + ", aggregators=" + Arrays.toString(aggregators) + ", timestampSpec=" + timestampSpec + ", queryGranularity=" + queryGranularity + ", rollup=" + rollup + '}'; } }
/* Copyright 2015 Samsung Electronics Co., LTD * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gearvrf.asynchronous; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import android.graphics.BitmapFactory; import android.opengl.GLES20; /** * Implement this class to use compressed texture formats that GVRF does not * support. * * A {@link GVRCompressedTextureLoader} contains the logic to detect a * particular file type, and to parse the header. It is an abstract class (not * an interface) so that it can contain a protected method ( * {@code CompressedTexture()}) that calls the private {@code CompressedTexture} * constructor: this limits the chances that someone will create an instance * with invalid value, while still allowing apps to add new loaders without * having to add them to this package. * * <p> * The general data flow is * <ul> * <li>The internal load method loads the file into memory * <li>If one and only one {@linkplain #sniff(byte[], Reader) sniff()} method * returns {@code true}, the data is passed to the corresponding * {@linkplain #parse(byte[], Reader) parse()} method * <li>The {@code parse()} method extracts GL parameters, and uses * {@code CompressedTexture()} to return an internal {@code CompressedTexture} * instance * <li>The internal load method passes that {@code CompressedTexture} to a * GL-thread callback, that converts it to a texture and passes that texture to * the app's {@link org.gearvrf.GVRAndroidResource.BitmapTextureCallback * BitmapTextureCallback} * </ul> * * @since 1.6.1 */ public abstract class GVRCompressedTextureLoader { protected GVRCompressedTextureLoader() { } /** * Bytes of header data that we need to * {@link #sniff(byte[], Reader)} or {@link #parse(byte[], Reader)}. * * When we <em>know</em> that a file contains a compressed texture, we can * simply load the whole thing into a {@code byte[]}, and pass the offset of * the actual data to * {@link #CompressedTexture(int, int, int, int, int, byte[], int, int)}. * But, when a file may contain either an Android {@link Bitmap} or a * compressed texture, we don't want to load the whole file into memory: * {@link BitmapFactory#decodeStream(InputStream)} is more memory-efficient * than {@link BitmapFactory#decodeByteArray(byte[], int, int)}. * * @return Number of bytes of header data needed to successfully sniff or * parse the file format. * * @since 1.6.6 */ public abstract int headerLength(); /** * Does this byte array contain an instance of 'my' compressed texture? The * {@link CompressedTexture#load(InputStream) load()} methods will call all * registered Loader's sniffers: if one and only one returns {@code true}, * the load() method will return a {@code CompressedTexture}. * * <p> * <em>Note:</em> This routine needs to be very fast! The * {@link CompressedTexture#load(InputStream) load()} routine will call all * registered sniffers, rather than looking at (possibly invalid) file * extensions, or asking the user for a (possibly invalid) hint. * * @param data * A compressed texture file's contents * @param reader * A data reader, pointing to data[0] * @return Whether or not this data is in 'my' format */ public abstract boolean sniff(byte[] data, Reader reader); /** * Parse the header, and return a {@link CompressedTexture}. This will only * be called if the loader's {@link #sniff(byte[], Reader)} function * returned {@code true}. * * @param data * A compressed texture file's contents: this loader's * {@link #sniff(byte[], Reader)} function has already returned * {@code true}. * @param reader * A data reader, pointing to data[0] * @return A {@code CompressedTexture}, from * {@link #CompressedTexture(int, int, int, int, int, byte[], int, int)} */ public abstract CompressedTexture parse(byte[] data, Reader reader); /** * Provides external parsers access to the internal * {@code CompressedTexture} constructor. * * The {@code CompressedTexture} class represents a texture file, loaded * into memory; it's what your {@link #parse(byte[], Reader)} method needs * to return. * * <p> * The first four parameters are passed directly to * {@code glCompressedTexImage2D}; the names are from <a href= * "https://www.khronos.org/opengles/sdk/docs/man/xhtml/glCompressedTexImage2D.xml" * >https://www.khronos.org/opengles/sdk/docs/man/xhtml/ * glCompressedTexImage2D.xml</a> * * @param internalformat * The * {@link GLES20#glCompressedTexImage2D(int, int, int, int, int, int, int, java.nio.Buffer) * glCompressedTexImage2D()} <code>internalformat</code> * parameter. * @param width * The * {@link GLES20#glCompressedTexImage2D(int, int, int, int, int, int, int, java.nio.Buffer) * glCompressedTexImage2D()} <code>width</code> parameter. * @param height * The * {@link GLES20#glCompressedTexImage2D(int, int, int, int, int, int, int, java.nio.Buffer) * glCompressedTexImage2D()} <code>height</code> parameter. * @param imageSize * The * {@link GLES20#glCompressedTexImage2D(int, int, int, int, int, int, int, java.nio.Buffer) * glCompressedTexImage2D()} <code>imageSize</code> parameter. * @param levels * The number of mipmap levels * @param data * The {@code byte[]} passed to {@link #parse(byte[], Reader)} * @param dataOffset * Header length - offset of first byte of texture data * @param dataBytes * Number of bytes of texture data * @return An internal buffer that the GL thread can use to create a * {@link GVRCompressedTexture} */ protected CompressedTexture CompressedTexture(int internalformat, int width, int height, int imageSize, int levels, byte[] data, int dataOffset, int dataBytes) { ByteBuffer buffer = ByteBuffer.wrap(data, dataOffset, dataBytes); return new CompressedTexture(internalformat, width, height, imageSize, levels, buffer); } /** * Register a loader with the 'sniffer'. * * 'Factory loaders' are pre-registered. To load a format we don't support, * create a {@link GVRCompressedTextureLoader} descendant. Then, before * trying to load any files in that format, create an instance and call * {@link #register()}: * * <pre> * * new MyCompressedFormat().register(); * </pre> */ public void register() { synchronized (loaders) { loaders.add(this); maximumHeaderLength = 0; for (GVRCompressedTextureLoader loader : loaders) { int headerLength = loader.headerLength(); if (headerLength > maximumHeaderLength) { maximumHeaderLength = headerLength; } } } } static List<GVRCompressedTextureLoader> getLoaders() { return loaders; } private static final List<GVRCompressedTextureLoader> loaders = new ArrayList<GVRCompressedTextureLoader>(); static int maximumHeaderLength = 0; /* * We can (and do) expect apps to register any custom loaders before calling * one of the load() methods, but we can't have the 'factory loaders' * register themselves in their own static initializers: If the only * reference to a loader is in its own class, Java may never call its * initializer. */ static { new AdaptiveScalableTextureCompression().register(); new EricssonTextureCompression2().register(); new KTX().register(); } /** Utility class for reading big- and little-endian numbers from a header */ protected static final class Reader { private final byte[] data; // private final int length; private int readPointer; /** Wrap a Reader around a byte array */ protected Reader(byte[] data) { this.data = data; // this.length = data.length; this.readPointer = 0; } private byte read() { return data[readPointer++]; } protected static final int INTEGER_BYTES = Integer.SIZE / Byte.SIZE; protected static final int SHORT_BYTES = Short.SIZE / Byte.SIZE; /** * Read an int * * @param bytes * Should be in range 1..4. Not checked, and bad things can * happen if you pass invalid values! * @return A little-endian number from the byte array */ protected int read(int bytes) { return readLE(bytes); } /** * Read a little-endian int * * @param bytes * Should be in range 1..4. Not checked, and bad things can * happen if you pass invalid values! * @return A little-endian number from the byte array */ protected int readLE(int bytes) { int result = 0; for (int index = 0; index < bytes; ++index) { result |= ((int) read() & 0xff) << (index * Byte.SIZE); } return result; } /* * readBE() is slightly cheaper than readLE(): readBE() takes 4 * shift-left-by-8 ops to read an integer, while readLE() takes a * shift-by-0, a shift-by-8, a shift-by-16, and a shift-by-24. * * This suggests that it may make sense to implement readLE() as * Integer.reverseBytes(readBE(bytes)) - but simple benchmarking shows * that this is actually a bit slower. (Integer.reverseBytes() is * cleverly implemented, but it still takes 4 shifts, 3 |s, and 2 &s.) */ /** * Read a big-endian int * * @param bytes * Should be in range 1..4. Not checked, and bad things can * happen if you pass invalid values! * @return A big-endian number from the byte array */ protected int readBE(int bytes) { int result = 0; for (int index = 0; index < bytes; ++index) { result = (result << Byte.SIZE) | ((int) read() & 0xff); } return result; } /** * Read a long * * @param bytes * Should be in range 1..8. Not checked, and bad things can * happen if you pass invalid values! * @return A little-endian number from the byte array * @deprecated Sniffers need to be very fast! */ protected long readLong(int bytes) { long result = 0L; for (int index = 0; index < bytes; ++index) { result |= ((long) read() & 0xffL) << (index * Byte.SIZE); } return result; } /** Advance the read pointer */ protected void skip(int bytes) { readPointer += bytes; } /** Set the read pointer to the start of the stream */ protected void reset() { readPointer = 0; } /** * Get the current value of the read pointer. In conjunction with * {@link #setPosition(int)}, this can be used to 'read ahead'. */ protected int getPosition() { return readPointer; } /** * Set the value of the read pointer. This can be used to return to a * previous {@link #getPosition()} after 'reading ahead'. */ protected void setPosition(int position) { readPointer = position; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.resourcemanager.network; import com.azure.resourcemanager.network.models.ApplicationSecurityGroup; import com.azure.resourcemanager.network.models.Network; import com.azure.resourcemanager.network.models.NetworkInterface; import com.azure.resourcemanager.network.models.NetworkInterfaces; import com.azure.resourcemanager.network.models.Networks; import com.azure.resourcemanager.network.models.NicIpConfiguration; import com.azure.resourcemanager.resources.fluentcore.arm.ResourceUtils; import com.azure.resourcemanager.resources.models.ResourceGroup; import com.azure.resourcemanager.resources.models.ResourceGroups; import com.azure.core.management.Region; import com.azure.resourcemanager.resources.fluentcore.model.Creatable; import com.azure.resourcemanager.resources.fluentcore.model.CreatedResources; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; public class NetworkInterfaceOperationsTests extends NetworkManagementTest { @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void canUseMultipleIPConfigs() throws Exception { String networkName = generateRandomResourceName("net", 15); String[] nicNames = new String[3]; for (int i = 0; i < nicNames.length; i++) { nicNames[i] = generateRandomResourceName("nic", 15); } Network network = networkManager .networks() .define(networkName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withAddressSpace("10.0.0.0/27") .withSubnet("subnet1", "10.0.0.0/28") .withSubnet("subnet2", "10.0.0.16/28") .create(); List<Creatable<NetworkInterface>> nicDefinitions = Arrays .asList( // 0 - NIC that starts with one IP config and ends with two (Creatable<NetworkInterface>) (networkManager .networkInterfaces() .define(nicNames[0]) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withExistingPrimaryNetwork(network) .withSubnet("subnet1") .withPrimaryPrivateIPAddressDynamic()), // 1 - NIC that starts with two IP configs and ends with one networkManager .networkInterfaces() .define(nicNames[1]) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withExistingPrimaryNetwork(network) .withSubnet("subnet1") .withPrimaryPrivateIPAddressDynamic() .defineSecondaryIPConfiguration("nicip2") .withExistingNetwork(network) .withSubnet("subnet1") .withPrivateIpAddressDynamic() .attach(), // 2 - NIC that starts with two IP configs and ends with two networkManager .networkInterfaces() .define(nicNames[2]) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withExistingPrimaryNetwork(network) .withSubnet("subnet1") .withPrimaryPrivateIPAddressDynamic() .defineSecondaryIPConfiguration("nicip2") .withExistingNetwork(network) .withSubnet("subnet1") .withPrivateIpAddressDynamic() .attach()); // Create the NICs in parallel CreatedResources<NetworkInterface> createdNics = networkManager.networkInterfaces().create(nicDefinitions); NetworkInterface[] nics = new NetworkInterface[nicDefinitions.size()]; for (int i = 0; i < nicDefinitions.size(); i++) { nics[i] = createdNics.get(nicDefinitions.get(i).key()); } NicIpConfiguration primaryIPConfig, secondaryIPConfig; NetworkInterface nic; // Verify NIC0 nic = nics[0]; Assertions.assertNotNull(nic); primaryIPConfig = nic.primaryIPConfiguration(); Assertions.assertNotNull(primaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(primaryIPConfig.networkId())); // Verify NIC1 nic = nics[1]; Assertions.assertNotNull(nic); Assertions.assertEquals(2, nic.ipConfigurations().size()); primaryIPConfig = nic.primaryIPConfiguration(); Assertions.assertNotNull(primaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(primaryIPConfig.networkId())); secondaryIPConfig = nic.ipConfigurations().get("nicip2"); Assertions.assertNotNull(secondaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(secondaryIPConfig.networkId())); // Verify NIC2 nic = nics[2]; Assertions.assertNotNull(nic); Assertions.assertEquals(2, nic.ipConfigurations().size()); primaryIPConfig = nic.primaryIPConfiguration(); Assertions.assertNotNull(primaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(primaryIPConfig.networkId())); secondaryIPConfig = nic.ipConfigurations().get("nicip2"); Assertions.assertNotNull(secondaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(secondaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(secondaryIPConfig.networkId())); nic = null; List<Mono<NetworkInterface>> nicUpdates = Arrays .asList( // Update NIC0 nics[0] .update() .defineSecondaryIPConfiguration("nicip2") .withExistingNetwork(network) .withSubnet("subnet1") .withPrivateIpAddressDynamic() .attach() .applyAsync(), // Update NIC2 nics[1] .update() .withoutIPConfiguration("nicip2") .updateIPConfiguration("primary") .withSubnet("subnet2") .parent() .applyAsync(), // Update NIC3 nics[2] .update() .withoutIPConfiguration("nicip2") .defineSecondaryIPConfiguration("nicip3") .withExistingNetwork(network) .withSubnet("subnet1") .withPrivateIpAddressDynamic() .attach() .applyAsync()); List<NetworkInterface> updatedNics = Flux .mergeDelayError(32, (Mono<NetworkInterface>[]) nicUpdates.toArray(new Mono[0])) .collectList() .block(); // Verify updated NICs for (NetworkInterface n : updatedNics) { Assertions.assertNotNull(n); if (n.id().equalsIgnoreCase(nics[0].id())) { // Verify NIC0 Assertions.assertEquals(2, n.ipConfigurations().size()); primaryIPConfig = n.primaryIPConfiguration(); Assertions.assertNotNull(primaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(primaryIPConfig.networkId())); secondaryIPConfig = n.ipConfigurations().get("nicip2"); Assertions.assertNotNull(secondaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(secondaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(secondaryIPConfig.networkId())); } else if (n.id().equals(nics[1].id())) { // Verify NIC1 Assertions.assertEquals(1, n.ipConfigurations().size()); primaryIPConfig = n.primaryIPConfiguration(); Assertions.assertNotNull(primaryIPConfig); Assertions.assertNotEquals("nicip2", primaryIPConfig.name()); Assertions.assertTrue("subnet2".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(primaryIPConfig.networkId())); } else if (n.id().equals(nics[2].id())) { // Verify NIC Assertions.assertEquals(2, n.ipConfigurations().size()); primaryIPConfig = n.primaryIPConfiguration(); Assertions.assertNotNull(primaryIPConfig); Assertions.assertNotEquals("nicip2", primaryIPConfig.name()); Assertions.assertNotEquals("nicip3", primaryIPConfig.name()); Assertions.assertTrue("subnet1".equalsIgnoreCase(primaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(primaryIPConfig.networkId())); secondaryIPConfig = n.ipConfigurations().get("nicip3"); Assertions.assertNotNull(secondaryIPConfig); Assertions.assertTrue("subnet1".equalsIgnoreCase(secondaryIPConfig.subnetName())); Assertions.assertTrue(network.id().equalsIgnoreCase(secondaryIPConfig.networkId())); } else { Assertions.assertTrue(false, "Unrecognized NIC ID"); } } } @Test public void canCreateBatchOfNetworkInterfaces() throws Exception { ResourceGroups resourceGroups = resourceManager.resourceGroups(); Networks networks = networkManager.networks(); NetworkInterfaces networkInterfaces = networkManager.networkInterfaces(); Creatable<ResourceGroup> resourceGroupCreatable = resourceGroups.define(rgName).withRegion(Region.US_EAST); final String vnetName = "vnet1212"; Creatable<Network> networkCreatable = networks .define(vnetName) .withRegion(Region.US_EAST) .withNewResourceGroup(resourceGroupCreatable) .withAddressSpace("10.0.0.0/28"); // Prepare a batch of nics // final String nic1Name = "nic1"; Creatable<NetworkInterface> networkInterface1Creatable = networkInterfaces .define(nic1Name) .withRegion(Region.US_EAST) .withNewResourceGroup(resourceGroupCreatable) .withNewPrimaryNetwork(networkCreatable) .withPrimaryPrivateIPAddressStatic("10.0.0.5"); final String nic2Name = "nic2"; Creatable<NetworkInterface> networkInterface2Creatable = networkInterfaces .define(nic2Name) .withRegion(Region.US_EAST) .withNewResourceGroup(resourceGroupCreatable) .withNewPrimaryNetwork(networkCreatable) .withPrimaryPrivateIPAddressStatic("10.0.0.6"); final String nic3Name = "nic3"; Creatable<NetworkInterface> networkInterface3Creatable = networkInterfaces .define(nic3Name) .withRegion(Region.US_EAST) .withNewResourceGroup(resourceGroupCreatable) .withNewPrimaryNetwork(networkCreatable) .withPrimaryPrivateIPAddressStatic("10.0.0.7"); final String nic4Name = "nic4"; Creatable<NetworkInterface> networkInterface4Creatable = networkInterfaces .define(nic4Name) .withRegion(Region.US_EAST) .withNewResourceGroup(resourceGroupCreatable) .withNewPrimaryNetwork(networkCreatable) .withPrimaryPrivateIPAddressStatic("10.0.0.8"); @SuppressWarnings("unchecked") Collection<NetworkInterface> batchNics = networkInterfaces .create( networkInterface1Creatable, networkInterface2Creatable, networkInterface3Creatable, networkInterface4Creatable) .values(); Assertions.assertTrue(batchNics.size() == 4); HashMap<String, Boolean> found = new LinkedHashMap<>(); for (NetworkInterface nic : batchNics) { if (nic.name().equalsIgnoreCase(nic1Name)) { found.put(nic1Name, true); } if (nic.name().equalsIgnoreCase(nic2Name)) { found.put(nic2Name, true); } if (nic.name().equalsIgnoreCase(nic3Name)) { found.put(nic3Name, true); } if (nic.name().equalsIgnoreCase(nic4Name)) { found.put(nic4Name, true); } } Assertions.assertTrue(found.size() == 4); } @Test public void canCreateNicWithApplicationSecurityGroup() { Network network = networkManager .networks() .define("vnet1") .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withAddressSpace("10.0.0.0/27") .withSubnet("subnet1", "10.0.0.0/28") .withSubnet("subnet2", "10.0.0.16/28") .create(); ApplicationSecurityGroup asg1 = networkManager.applicationSecurityGroups().define("asg1") .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .create(); NetworkInterface nic = networkManager.networkInterfaces().define("nic1") .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .withExistingPrimaryNetwork(network) .withSubnet("subnet1") .withPrimaryPrivateIPAddressDynamic() .withExistingApplicationSecurityGroup(asg1) .create(); List<ApplicationSecurityGroup> applicationSecurityGroups = nic.primaryIPConfiguration().listAssociatedApplicationSecurityGroups(); Assertions.assertEquals(1, applicationSecurityGroups.size()); Assertions.assertEquals("asg1", applicationSecurityGroups.iterator().next().name()); ApplicationSecurityGroup asg2 = networkManager.applicationSecurityGroups().define("asg2") .withRegion(Region.US_EAST) .withExistingResourceGroup(rgName) .create(); nic.update() .withoutApplicationSecurityGroup(asg1.name()) .withExistingApplicationSecurityGroup(asg2) .defineSecondaryIPConfiguration("nicip2") .withExistingNetwork(network) .withSubnet("subnet1") .withPrivateIpAddressDynamic() .attach() .apply(); applicationSecurityGroups = nic.primaryIPConfiguration().listAssociatedApplicationSecurityGroups(); Assertions.assertEquals(1, applicationSecurityGroups.size()); Assertions.assertEquals("asg2", applicationSecurityGroups.iterator().next().name()); nic.update() .withoutApplicationSecurityGroup(asg1.name()) .withExistingApplicationSecurityGroup(asg1) .apply(); Assertions.assertEquals(2, nic.ipConfigurations().get("nicip2").innerModel().applicationSecurityGroups().size()); Assertions.assertEquals( new HashSet<>(Arrays.asList("asg1", "asg2")), nic.ipConfigurations().get("nicip2").innerModel().applicationSecurityGroups().stream().map(inner -> ResourceUtils.nameFromResourceId(inner.id())).collect(Collectors.toSet())); if (!isPlaybackMode()) { // avoid concurrent request in playback applicationSecurityGroups = nic.ipConfigurations().get("nicip2").listAssociatedApplicationSecurityGroups(); Assertions.assertEquals(2, applicationSecurityGroups.size()); Assertions.assertEquals( new HashSet<>(Arrays.asList("asg1", "asg2")), applicationSecurityGroups.stream().map(ApplicationSecurityGroup::name).collect(Collectors.toSet())); } } @Test @Disabled("Deadlock from CountDownLatch") public void canDeleteNetworkWithServiceCallBack() { String vnetName = generateRandomResourceName("vnet", 15); networkManager .networks() .define(vnetName) .withRegion(Region.US_EAST) .withNewResourceGroup(rgName) .withAddressSpace("172.16.0.0/16") .defineSubnet("Front-end") .withAddressPrefix("172.16.1.0/24") .attach() .defineSubnet("Back-end") .withAddressPrefix("172.16.3.0/24") .attach() .create(); // TODO: Fix deadlock final CountDownLatch latch = new CountDownLatch(1); final AtomicInteger counter = new AtomicInteger(0); networkManager .networks() .deleteByResourceGroupAsync(rgName, vnetName) .doOnSuccess( aVoid -> { counter.incrementAndGet(); latch.countDown(); }) .doOnError(throwable -> latch.countDown()); try { latch.await(); } catch (InterruptedException exception) { throw new RuntimeException(exception); } Assertions.assertEquals(counter.intValue(), 1); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.measure; import java.util.List; import java.util.Collections; import javax.measure.Unit; import javax.measure.quantity.Angle; import javax.measure.UnitConverter; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.resources.Errors; import org.apache.sis.util.resources.Vocabulary; import static org.apache.sis.math.MathFunctions.truncate; /** * A converter from decimal degrees to sexagesimal degrees. Sexagesimal degrees are pseudo-unit * in the <cite>sign - degrees - decimal point - minutes (two digits) - integer seconds (two digits) - * fraction of seconds (any precision)</cite> format. * * <p>When possible, Apache SIS always handles angles in radians, decimal degrees or any other proportional units. * Sexagesimal angles are considered a string representation issue (handled by {@link AngleFormat}) rather than a * unit issue. Unfortunately, this pseudo-unit is extensively used in the EPSG database, so we have to support it.</p> * * <div class="section">Immutability and thread safety</div> * This class and all inner classes are immutable, and thus inherently thread-safe. * * @author Martin Desruisseaux (IRD, Geomatys) * @version 0.8 * @since 0.3 * @module */ class SexagesimalConverter extends AbstractConverter { /** * Serial number for compatibility with different versions. */ private static final long serialVersionUID = -2119974989555436361L; /** * Small tolerance factor when comparing numbers close to 1. * For comparing numbers other than 1, multiply by the number magnitude. */ static final double EPS = 1E-10; /** * Pseudo-unit for sexagesimal degree. Numbers in this pseudo-unit have the following format: * * <cite>sign - degrees - decimal point - minutes (two digits) - fraction of minutes (any precision)</cite>. * * Using this unit is loosely equivalent to formatting decimal degrees with the * {@code "D.MMm"} {@link AngleFormat} pattern. * * <p>This unit is non-linear and not practical for computation. Consequently, it should be * avoided as much as possible. This pseudo-unit is defined only because used in the EPSG * database (code 9111).</p> */ static final ConventionalUnit<Angle> DM; /** * Pseudo-unit for sexagesimal degree. Numbers in this pseudo-unit have the following format: * * <cite>sign - degrees - decimal point - minutes (two digits) - integer seconds (two digits) - * fraction of seconds (any precision)</cite>. * * Using this unit is loosely equivalent to formatting decimal degrees with the * {@code "D.MMSSs"} {@link AngleFormat} pattern. * * <p>This unit is non-linear and not practical for computation. Consequently, it should be * avoided as much as possible. This pseudo-unit is defined only because extensively used in * the EPSG database (code 9110).</p> */ static final Unit<Angle> DMS; /** * Pseudo-unit for degree - minute - second. * Numbers in this pseudo-unit have the following format: * * <cite>signed degrees (integer) - arc-minutes (integer) - arc-seconds * (real, any precision)</cite>. * * Using this unit is loosely equivalent to formatting decimal degrees with the * {@code "DMMSS.s"} {@link AngleFormat} pattern. * * <p>This unit is non-linear and not practical for computation. Consequently, it should be * avoided as much as possible. This pseudo-unit is defined only because extensively used in * EPSG database (code 9107).</p> */ static final Unit<Angle> DMS_SCALED; static { final SystemUnit<Angle> rad = (SystemUnit<Angle>) Units.RADIAN; final UnitConverter toRadian = Units.DEGREE.getConverterTo(rad); DM = new ConventionalUnit<>(rad, new ConcatenatedConverter( new SexagesimalConverter(false, 100).inverse(), toRadian), "D.M", UnitRegistry.OTHER, (short) 9111); DMS = new ConventionalUnit<>(rad, new ConcatenatedConverter( new SexagesimalConverter(true, 10000).inverse(), toRadian), "D.MS", UnitRegistry.OTHER, (short) 9110); DMS_SCALED = new ConventionalUnit<>(rad, new ConcatenatedConverter( new SexagesimalConverter(true, 1).inverse(), toRadian), "DMS", UnitRegistry.OTHER, (short) 9107); } /** * {@code true} if the seconds field is present. */ final boolean hasSeconds; /** * The value to divide DMS unit by. * For "degree minute second" (EPSG code 9107), this is 1. * For "sexagesimal degree" (EPSG code 9110), this is 10000. */ final double divider; /** * The inverse of this converter. */ private final UnitConverter inverse; /** * Constructs a converter for sexagesimal units. * * @param hasSeconds {@code true} if the seconds field is present. * @param divider the value to divide DMS unit by. * For "degree minute second" (EPSG code 9107), this is 1. * For "sexagesimal degree" (EPSG code 9110), this is 10000. */ private SexagesimalConverter(final boolean hasSeconds, final double divider) { this.hasSeconds = hasSeconds; this.divider = divider; this.inverse = new Inverse(this); } /** * Constructs a converter for sexagesimal units. * This constructor is for {@link Inverse} usage only. */ private SexagesimalConverter(final SexagesimalConverter inverse) { this.hasSeconds = inverse.hasSeconds; this.divider = inverse.divider; this.inverse = inverse; } /** * Returns {@code false} since this converter is not an identity function. */ @Override public boolean isIdentity() { return false; } /** * Returns {@code false} since the conversion is non-linear. */ @Override public boolean isLinear() { return false; } /** * Returns a collection containing only {@code this} since this conversion is not * a concatenation of other converters. */ @Override public List<? extends UnitConverter> getConversionSteps() { return Collections.singletonList(this); } /** * Returns the inverse of this converter. */ @Override public final UnitConverter inverse() { return inverse; } /** * Performs a conversion from fractional degrees to sexagesimal degrees. */ @Override public double convert(double angle) { final double deg = truncate(angle); angle = (angle - deg) * 60; if (hasSeconds) { final double min = truncate(angle); angle = (angle - min) * 60; // Secondes angle += (deg*100 + min)*100; } else { angle += deg * 100; } return angle / divider; } /** * Performs a conversion from fractional degrees to sexagesimal degrees. * This method delegates to the version working on {@code double} primitive type, * so it may not provide the accuracy normally required by this method contract. */ @Override public final Number convert(final Number value) { return convert(value.doubleValue()); } /** * Considers this converter as non-derivable. Actually it would be possible to provide a derivative value * for input values other than the discontinuities points, but for now we presume that it is less dangerous * to return NaN every time, so the user can not miss that this function is not derivable everywhere. */ @Override public final double derivative(double value) { return Double.NaN; } /** * Concatenates this converter with another converter. The resulting converter is equivalent to first converting * by the specified converter (right converter), and then converting by this converter (left converter). */ @Override public UnitConverter concatenate(final UnitConverter converter) { ArgumentChecks.ensureNonNull("converter", converter); if (equals(converter.inverse())) { return LinearConverter.IDENTITY; } return new ConcatenatedConverter(converter, this); } /** * Compares this converter with the specified object. */ @Override public final boolean equals(final Object object) { return object != null && object.getClass() == getClass() && ((SexagesimalConverter) object).divider == divider; } /** * Returns a hash value for this converter. */ @Override public final int hashCode() { return ((int) divider) ^ getClass().hashCode(); } /** * The inverse of {@link SexagesimalConverter}, i.e. the converter from sexagesimal degrees to decimal degrees. */ private static final class Inverse extends SexagesimalConverter { /** * Serial number for compatibility with different versions. */ private static final long serialVersionUID = -1928146841653975281L; /** * Constructs a converter. */ public Inverse(final SexagesimalConverter inverse) { super(inverse); } /** * Performs a conversion from sexagesimal degrees to fractional degrees. * * @throws IllegalArgumentException If the given angle can not be converted. */ @Override public double convert(final double angle) throws IllegalArgumentException { double deg,min,sec; if (hasSeconds) { sec = angle * divider; deg = truncate(sec/10000); sec -= 10000*deg; min = truncate(sec/ 100); sec -= 100*min; } else { sec = 0; min = angle * divider; deg = truncate(min / 100); min -= deg * 100; } if (min <= -60 || min >= 60) { // Do not enter for NaN if (Math.abs(Math.abs(min) - 100) <= (EPS * 100)) { if (min >= 0) deg++; else deg--; min = 0; } else { throw illegalField(angle, min, Vocabulary.Keys.AngularMinutes); } } if (sec <= -60 || sec >= 60) { // Do not enter for NaN if (Math.abs(Math.abs(sec) - 100) <= (EPS * 100)) { if (sec >= 0) min++; else min--; sec = 0; } else { throw illegalField(angle, sec, Vocabulary.Keys.AngularSeconds); } } return (sec/60 + min)/60 + deg; } /** * Creates an exception for an illegal field. * * @param value the user-supplied angle value. * @param field the value of the illegal field. * @param unit the vocabulary key for the field (minutes or seconds). * @return the exception to throw. */ private static IllegalArgumentException illegalField(final double value, final double field, final short unit) { return new IllegalArgumentException(Errors.format(Errors.Keys.IllegalArgumentField_4, "angle", value, Vocabulary.format(unit), field)); } } }
package php.runtime; import php.runtime.env.Environment; import php.runtime.env.TraceInfo; import php.runtime.invoke.Invoker; import php.runtime.lang.BaseWrapper; import php.runtime.lang.ForeachIterator; import php.runtime.lang.IObject; import php.runtime.lang.StdClass; import php.runtime.lang.spl.Traversable; import php.runtime.memory.*; import php.runtime.memory.helper.UndefinedMemory; import php.runtime.memory.helper.VariadicMemory; import php.runtime.memory.support.MemoryOperation; import php.runtime.reflection.support.ReflectionUtils; import java.util.HashMap; import java.util.Map; abstract public class Memory implements Comparable<Memory> { public enum Type { NULL, BOOL, INT, DOUBLE, STRING, ARRAY, OBJECT, REFERENCE, KEY_VALUE; public Class toClass(){ if (this == DOUBLE) return Double.TYPE; else if (this == INT) return Long.TYPE; else if (this == STRING) return String.class; else if (this == BOOL) return Boolean.TYPE; else if (this == ARRAY) return ArrayMemory.class; else if (this == OBJECT) return ObjectMemory.class; else if (this == REFERENCE) return Memory.class; else if (this == KEY_VALUE) return KeyValueMemory.class; return null; } public static Type valueOf(Class clazz){ if (clazz == Long.TYPE) return INT; if (clazz == Double.TYPE) return DOUBLE; if (clazz == String.class) return STRING; if (clazz == Boolean.TYPE) return BOOL; if (clazz == ArrayMemory.class) return ARRAY; if (clazz == ObjectMemory.class) return OBJECT; if (clazz == KeyValueMemory.class) return KEY_VALUE; return REFERENCE; } @Override public String toString(){ switch (this){ case ARRAY: return "array"; case BOOL: return "boolean"; case DOUBLE: return "float"; case INT: return "integer"; case NULL: return "NULL"; case OBJECT: return "object"; case STRING: return "string"; default: return "unknown"; } } protected final static Map<String, Type> TYPE_MAP = new HashMap<String, Type>(){{ put("array", ARRAY); put("bool", BOOL); put("boolean", BOOL); put("double", DOUBLE); put("float", DOUBLE); put("int", INT); put("integer", INT); put("long", INT); put("null", NULL); put("string", STRING); put("object", OBJECT); }}; public static Type of(String code) { return TYPE_MAP.get(code.toLowerCase()); } } public final Type type; protected Memory(Type type) { this.type = type; } public static final Memory NULL = NullMemory.INSTANCE; public static final Memory UNDEFINED = UndefinedMemory.INSTANCE; public static final Memory FALSE = FalseMemory.INSTANCE; public static final Memory TRUE = TrueMemory.INSTANCE; public static final Memory CONST_INT_0 = new LongMemory(0); public static final Memory CONST_INT_M1 = new LongMemory(-1); public static final Memory CONST_INT_1 = new LongMemory(1); public static final Memory CONST_INT_2 = new LongMemory(2); public static final Memory CONST_INT_3 = new LongMemory(3); public static final Memory CONST_INT_4 = new LongMemory(4); public static final Memory CONST_INT_5 = new LongMemory(5); public static final Memory CONST_DOUBLE_0 = new DoubleMemory(0.0); public static final Memory CONST_DOUBLE_1 = new DoubleMemory(1.0); public static final Memory CONST_DOUBLE_NAN = new DoubleMemory(Double.NaN); public static final Memory CONST_EMPTY_STRING = new StringMemory(""); public boolean isNull(){ return type == Type.NULL; } public boolean isUndefined(){ return toValue() == UNDEFINED; } public boolean isShortcut(){ return false; } abstract public long toLong(); public int toInteger(){ return (int)toLong(); } abstract public double toDouble(); abstract public boolean toBoolean(); abstract public Memory toNumeric(); abstract public String toString(); public Memory toUnset() { return NULL; } public String toBinaryString(){ return toString(); } public Memory toBinary() { return new BinaryMemory(toString()); } public float toFloat() { return (float) toDouble(); } public Memory toArray() { ArrayMemory result = new ArrayMemory(); result.add(toImmutable()); return result.toConstant(); } public Memory toObject(Environment env) { StdClass stdClass = new StdClass(env); stdClass.getProperties().refOfIndex("scalar").assign(toImmutable()); return new ObjectMemory(stdClass); } @SuppressWarnings("unchecked") public <T extends IObject> T toObject(Class<T> clazz) { try { return clazz.cast( toValue(ObjectMemory.class).value ); } catch (ClassCastException e) { if (!(this instanceof ObjectMemory)) { throw new ClassCastException( "Cannot convert '" + toString() + "' to an instance of " + ReflectionUtils.getClassName(clazz) + " class" ); } else { throw new ClassCastException( "Cannot convert instance of " + toValue(ObjectMemory.class).getReflection().getName() + " class" + " to an instance of " + ReflectionUtils.getClassName(clazz) ); } } } public <T extends Enum> T toEnum(Class<T> clazz) { return (T) Enum.valueOf(clazz, toString()); } public Invoker toInvoker(Environment env) { Invoker invoker = Invoker.valueOf(env, null, this); if (invoker != null) { invoker.setTrace(env.trace()); return invoker; } return null; } public Memory clone(Environment env, TraceInfo trace) throws Throwable { env.error(trace, "__clone method called on non-object"); return NULL; } public Type getRealType(){ return type; } public char toChar(){ switch (type){ case STRING: String tmp = toString(); if (tmp.isEmpty()) return '\0'; else return tmp.charAt(0); default: return (char)toLong(); } } public int getPointer(boolean absolute){ return super.hashCode(); } public int getPointer(){ return super.hashCode(); } public Memory newKeyValue(Memory memory){ return new KeyValueMemory(this.toValue(), memory); } public Memory newKeyValue(long memory){ return new KeyValueMemory(this.toValue(), LongMemory.valueOf(memory)); } public Memory newKeyValue(double memory){ return new KeyValueMemory(this.toValue(), new DoubleMemory(memory)); } public Memory newKeyValue(boolean memory){ return new KeyValueMemory(this.toValue(), memory ? TRUE : FALSE); } public Memory newKeyValue(String memory){ return new KeyValueMemory(this.toValue(), new StringMemory(memory)); } public Memory newKeyValueRight(Memory memory){ return new KeyValueMemory(memory, this.toValue()); } public Memory newKeyValueRight(long memory){ return new KeyValueMemory(LongMemory.valueOf(memory), this.toValue()); } public Memory newKeyValueRight(double memory){ return new KeyValueMemory(new DoubleMemory(memory), this.toValue()); } public Memory newKeyValueRight(boolean memory){ return new KeyValueMemory(memory ? TRUE : FALSE, this.toValue()); } public Memory newKeyValueRight(String memory){ return new KeyValueMemory(new StringMemory(memory), this.toValue()); } public boolean isObject() { return type == Type.OBJECT; } public boolean isClosure() { return false; } public boolean isResource() { return false; } public boolean isArray(){ return type == Type.ARRAY; } public boolean isTraversable() { return isArray() || instanceOf(Traversable.class.getName()); } public boolean isString() { return type == Type.STRING; } public boolean isNumber() { return type == Type.INT || type == Type.DOUBLE; } public boolean isReference() { return false; } // <value>[index] final public Memory valueOfIndex(Memory index) { return valueOfIndex(null, index); } public Memory valueOfIndex(TraceInfo trace, Memory index) { return NULL; } public Memory valueOfIndex(TraceInfo trace, long index) { return NULL; } final public Memory valueOfIndex(long index) { return valueOfIndex(null, index); } public Memory valueOfIndex(TraceInfo trace, double index) { return NULL; } final public Memory valueOfIndex(double index) { return valueOfIndex(null, index); } public Memory valueOfIndex(TraceInfo trace, String index) { return NULL; } final public Memory valueOfIndex(String index) { return valueOfIndex(null, index); } public Memory valueOfIndex(TraceInfo trace, boolean index) { return NULL; } final public Memory valueOfIndex(boolean index) { return valueOfIndex(null, index); } final public Memory refOfIndex(Memory index){ return refOfIndex(null, index); } public Memory refOfIndex(TraceInfo trace, Memory index) { return NULL; } public Memory refOfIndexAsShortcut(TraceInfo trace, Memory index) { return refOfIndex(trace, index); } public Memory refOfIndex(TraceInfo trace, long index) { return NULL; } final public Memory refOfIndex(long index) { return refOfIndex(null, index); } public Memory refOfIndex(TraceInfo trace, double index) { return NULL; } final public Memory refOfIndex(double index) { return refOfIndex(null, index); } public Memory refOfIndex(TraceInfo trace, String index) { return NULL; } final public Memory refOfIndex(String index) { return refOfIndex(null, index); } public Memory refOfIndex(TraceInfo trace, boolean index) { return NULL; } final public Memory refOfIndex(boolean index) { return refOfIndex(null, index); } public Memory refOfPush(TraceInfo trace) { return new ReferenceMemory(); } final public Memory refOfPush() { return refOfPush(null); } public void unsetOfIndex(TraceInfo trace, Memory index) { } public Memory issetOfIndex(TraceInfo trace, Memory index) { return NULL; } public Memory emptyOfIndex(TraceInfo trace, Memory index) { return issetOfIndex(trace, index); } // INC DEC abstract public Memory inc(); abstract public Memory dec(); // NEGATIVE abstract public Memory negative(); // CONCAT public String concat(Memory memory){ return toString() + memory.toString(); } public String concat(long value) { return toString() + value; } public String concat(double value) { return toString() + new DoubleMemory(value).toString(); } public String concat(boolean value) { return toString() + boolToString(value); } public String concat(String value) { return toString() + value; } // PLUS abstract public Memory plus(Memory memory); public Memory plus(long value){ return new LongMemory(toLong() + value); } public Memory plus(double value){ return new DoubleMemory(toDouble() + value); } public Memory plus(boolean value){ return new LongMemory(toLong() + (value ? 1 : 0)); } public Memory plus(String value){ return plus(StringMemory.toNumeric(value)); } // MINUS abstract public Memory minus(Memory memory); public Memory minus(long value){ return new LongMemory(toLong() - value); } public Memory minus(double value){ return new DoubleMemory(toDouble() - value); } public Memory minus(boolean value){ return new LongMemory(toLong() - (value ? 1 : 0)); } public Memory minus(String value){ return minus(StringMemory.toNumeric(value)); } // MUL abstract public Memory mul(Memory memory); public Memory mul(long value){ return new LongMemory(toLong() * value); } public Memory mul(double value){ return new DoubleMemory(toDouble() * value); } public Memory mul(boolean value){ return LongMemory.valueOf(toLong() * (value ? 1 : 0));} public Memory mul(String value){ return mul(StringMemory.toNumeric(value)); } // POW abstract public Memory pow(Memory memory); public Memory pow(long value) { Memory real = toNumeric(); if (real instanceof LongMemory) { double result = Math.pow(real.toLong(), value); if (result > Long.MAX_VALUE) { return new DoubleMemory(result); } return new LongMemory((long) result); } return new DoubleMemory(Math.pow(real.toDouble(), value)); } public Memory pow(double value) { return new DoubleMemory(Math.pow(toDouble(), value)); } public Memory pow(boolean value) { Memory real = toNumeric(); if (real instanceof LongMemory) { return value ? real.toImmutable() : Memory.CONST_INT_1; } return value ? real.toImmutable() : Memory.CONST_DOUBLE_1; } public Memory pow(String value) { return pow(StringMemory.toNumeric(value)); } // DIV abstract public Memory div(Memory memory); public Memory div(long value){ if(value==0) return FALSE; return new DoubleMemory(toDouble() / value); } public Memory div(double value){ if(value==0.0) return FALSE; return new DoubleMemory(toDouble() / value); } public Memory div(boolean value){ if(!value) return FALSE; return LongMemory.valueOf(toLong()); } public Memory div(String value){ return div(StringMemory.toNumeric(value)); } // MOD public Memory mod(Memory memory) { long t = memory.toLong(); if (t == 0) return FALSE; return LongMemory.valueOf(toLong() % t); } public Memory mod(long value){ if (value==0) return FALSE; return LongMemory.valueOf(toLong() % value); } public Memory mod(double value){ return mod((long)value); } public Memory mod(boolean value){ if (!value) return FALSE; return LongMemory.valueOf(toLong() % 1); } public Memory mod(String value){ return mod(StringMemory.toNumeric(value, true, CONST_INT_0)); } // NOT public boolean not(){ return !toBoolean(); } private static boolean _xor(boolean... args) { boolean r = false; for (boolean b : args) { r = r ^ b; } return r; } public boolean xor(Memory value) { return _xor(toBoolean(), value.toBoolean()); } public boolean xor(long value) { return _xor(toBoolean(), value != 0); } public boolean xor(double value) { return _xor(toBoolean(), OperatorUtils.toBoolean(value)); } public boolean xor(boolean value) { return _xor(toBoolean(), value); } public boolean xor(String value) { return _xor(toBoolean(), OperatorUtils.toBoolean(value)); } // EQUAL abstract public boolean equal(Memory memory); public boolean equal(long value){ return toLong() == value; } public boolean equal(double value) { return DoubleMemory.almostEqual(toDouble(), value); } public boolean equal(boolean value) { return toBoolean() == value; } public boolean equal(String value) { return equal(StringMemory.toNumeric(value)); } // IDENTICAL abstract public boolean identical(Memory memory); public boolean identical(long value) { return type == Type.INT && toLong() == value; } public boolean identical(double value) { return type == Type.DOUBLE && DoubleMemory.almostEqual(toDouble(), value); } public boolean identical(boolean value) { return type == Type.BOOL && value ? toImmutable() == TRUE : toImmutable() == FALSE; } public boolean identical(String value) { return type == Type.STRING && toString().equals(value); } // NOT EQUAL abstract public boolean notEqual(Memory memory); public boolean notEqual(long value){ return toLong() != value; } public boolean notEqual(double value) { return toDouble() != value; } public boolean notEqual(boolean value) { return toBoolean() != value; } public boolean notEqual(String value) { return !toString().equals(value); } // NOT IDENTICAL public boolean notIdentical(Memory memory) { return !identical(memory); } public boolean notIdentical(long memory) { return !identical(memory); } public boolean notIdentical(double memory) { return !identical(memory); } public boolean notIdentical(boolean memory) { return !identical(memory); } public boolean notIdentical(String memory) { return !identical(memory); } // SMALLER abstract public boolean smaller(Memory memory); public boolean smaller(long value) { return toDouble() < value; } public boolean smaller(double value) { return toDouble() < value; } public boolean smaller(boolean value) { return toDouble() < (value ? 1 : 0); } public boolean smaller(String value) { return this.smaller(StringMemory.toNumeric(value)); } // SMALLER EQ abstract public boolean smallerEq(Memory memory); public boolean smallerEq(long value) { return toDouble() <= value; } public boolean smallerEq(double value) { return toDouble() <= value; } public boolean smallerEq(boolean value) { return toDouble() <= (value ? 1 : 0); } public boolean smallerEq(String value) { return this.smallerEq(StringMemory.toNumeric(value)); } // GREATER abstract public boolean greater(Memory memory); public boolean greater(long value) { return toDouble() > value; } public boolean greater(double value) { return toDouble() > value; } public boolean greater(boolean value) { return toDouble() > (value ? 1 : 0); } public boolean greater(String value) { return this.smaller(StringMemory.toNumeric(value)); } // GREATER EQ abstract public boolean greaterEq(Memory memory); public boolean greaterEq(long value) { return toDouble() >= value; } public boolean greaterEq(double value) { return toDouble() >= value; } public boolean greaterEq(boolean value) { return toDouble() >= (value ? 1 : 0); } public boolean greaterEq(String value) { return this.greaterEq(StringMemory.toNumeric(value)); } // BIT & public Memory bitAnd(Memory memory) { return LongMemory.valueOf( toLong() & memory.toLong() ); } public Memory bitAnd(long memory) { return LongMemory.valueOf( toLong() & memory ); } public Memory bitAnd(double memory) { return LongMemory.valueOf( toLong() & (long)memory ); } public Memory bitAnd(boolean memory) { return LongMemory.valueOf( toLong() & (memory ? 1 : 0) ); } public Memory bitAnd(String memory) { return LongMemory.valueOf( toLong() & StringMemory.toNumeric(memory).toLong() ); } // BIT | public Memory bitOr(Memory memory) { return LongMemory.valueOf( toLong() | memory.toLong() ); } public Memory bitOr(long memory) { return LongMemory.valueOf( toLong() | memory ); } public Memory bitOr(double memory) { return LongMemory.valueOf( toLong() | (long)memory ); } public Memory bitOr(boolean memory) { return LongMemory.valueOf( toLong() | (memory ? 1 : 0) ); } public Memory bitOr(String memory) { return LongMemory.valueOf( toLong() | StringMemory.toNumeric(memory).toLong() ); } // BIT XOR ^ public Memory bitXor(Memory memory) { return LongMemory.valueOf( toLong() ^ memory.toLong() ); } public Memory bitXor(long memory) { return LongMemory.valueOf( toLong() ^ memory ); } public Memory bitXor(double memory) { return LongMemory.valueOf( toLong() ^ (long)memory ); } public Memory bitXor(boolean memory) { return LongMemory.valueOf( toLong() ^ (memory ? 1 : 0) ); } public Memory bitXor(String memory) { return LongMemory.valueOf( toLong() ^ StringMemory.toNumeric(memory).toLong() ); } // BIT not ~ public Memory bitNot(){ return LongMemory.valueOf(~toLong()); } // SHR >> public Memory bitShr(Memory memory) { return LongMemory.valueOf( toLong() >> memory.toLong() ); } public Memory bitShr(long memory) { return LongMemory.valueOf( toLong() >> memory ); } public Memory bitShr(double memory) { return LongMemory.valueOf( toLong() >> (long)memory ); } public Memory bitShr(boolean memory) { return LongMemory.valueOf( toLong() >> (memory ? 1 : 0) ); } public Memory bitShr(String memory) { return LongMemory.valueOf( toLong() >> StringMemory.toNumeric(memory).toLong() ); } // SHL << public Memory bitShl(Memory memory) { return LongMemory.valueOf( toLong() << memory.toLong() ); } public Memory bitShl(long memory) { return LongMemory.valueOf( toLong() << memory ); } public Memory bitShl(double memory) { return LongMemory.valueOf( toLong() << (long)memory ); } public Memory bitShl(boolean memory) { return LongMemory.valueOf( toLong() << (memory ? 1 : 0) ); } public Memory bitShl(String memory) { return LongMemory.valueOf( toLong() << StringMemory.toNumeric(memory).toLong() ); } // ASSIGN public Memory assign(Memory memory){ throw new RuntimeException("Invalid assign `memory` to " + type); } public Memory assign(long value){ throw new RuntimeException("Invalid assign `long` to " + type); } public Memory assign(double value) { throw new RuntimeException("Invalid assign `double` to " + type); } public Memory assign(boolean value) { throw new RuntimeException("Invalid assign `bool` to " + type); } public Memory assign(String value){ throw new RuntimeException("Invalid assign `string` to " + type); } public Memory assignRef(Memory memory){ throw new RuntimeException("Invalid assignRef `memory` to " + type); } public Memory assignRight(Memory memory) { return memory.assign(this); } public Memory assignRefRight(Memory memory) { return memory.assignRef(this); } public Memory assign(IObject object) { return this.assign(new ObjectMemory(object)); } public Memory assignConcat(Memory memory) { return assign(concat(memory)); } public Memory assignConcat(long memory) { return assign(concat(memory)); } public Memory assignConcat(double memory) { return assign(concat(memory)); } public Memory assignConcat(boolean memory) { return assign(concat(memory)); } public Memory assignConcat(String memory) { return assign(concat(memory)); } public Memory assignConcatRight(Memory memory) { return memory.assign(memory.concat(this)); } public Memory assignPlus(Memory memory) { return assign(plus(memory)); } public Memory assignPlus(long memory) { return assign(plus(memory)); } public Memory assignPlus(double memory) { return assign(plus(memory)); } public Memory assignPlus(boolean memory) { return assign(plus(memory)); } public Memory assignPlus(String memory) { return assign(plus(memory)); } public Memory assignPlusRight(Memory memory) { return memory.assign(memory.plus(this)); } public Memory assignMinus(Memory memory) { return assign(minus(memory)); } public Memory assignMinus(long memory) { return assign(minus(memory)); } public Memory assignMinus(double memory) { return assign(minus(memory)); } public Memory assignMinus(boolean memory) { return assign(minus(memory)); } public Memory assignMinus(String memory) { return assign(minus(memory)); } public Memory assignMinusRight(Memory memory) { return memory.assign(memory.minus(this)); } public Memory assignMul(Memory memory) { return assign(mul(memory)); } public Memory assignMul(long memory) { return assign(mul(memory)); } public Memory assignMul(double memory) { return assign(mul(memory)); } public Memory assignMul(boolean memory) { return assign(mul(memory)); } public Memory assignMul(String memory) { return assign(mul(memory)); } public Memory assignMulRight(Memory memory) { return memory.assign(memory.mul(this)); } public Memory assignPow(Memory memory) { return assign(pow(memory)); } public Memory assignPow(long memory) { return assign(pow(memory)); } public Memory assignPow(double memory) { return assign(pow(memory)); } public Memory assignPow(boolean memory) { return assign(pow(memory)); } public Memory assignPow(String memory) { return assign(pow(memory)); } public Memory assignPowRight(Memory memory) { return memory.assign(memory.pow(this)); } public Memory assignDiv(Memory memory) { return assign(div(memory)); } public Memory assignDiv(long memory) { return assign(div(memory)); } public Memory assignDiv(double memory) { return assign(div(memory)); } public Memory assignDiv(boolean memory) { return assign(div(memory)); } public Memory assignDiv(String memory) { return assign(div(memory)); } public Memory assignDivRight(Memory memory) { return memory.assign(memory.div(this)); } public Memory assignMod(Memory memory) { return assign(mod(memory)); } public Memory assignMod(long memory) { return assign(mod(memory)); } public Memory assignMod(double memory) { return assign(mod(memory)); } public Memory assignMod(boolean memory) { return assign(mod(memory)); } public Memory assignMod(String memory) { return assign(mod(memory)); } public Memory assignModRight(Memory memory) { return memory.assign(memory.mod(this)); } public Memory assignBitShr(Memory memory) { return assign(bitShr(memory)); } public Memory assignBitShr(long memory) { return assign(bitShr(memory)); } public Memory assignBitShr(double memory) { return assign(bitShr(memory)); } public Memory assignBitShr(boolean memory) { return assign(bitShr(memory)); } public Memory assignBitShr(String memory) { return assign(bitShr(memory)); } public Memory assignBitShrRight(Memory memory) { return memory.assign(memory.bitShr(this)); } public Memory assignBitShl(Memory memory) { return assign(bitShl(memory)); } public Memory assignBitShl(long memory) { return assign(bitShl(memory)); } public Memory assignBitShl(double memory) { return assign(bitShl(memory)); } public Memory assignBitShl(boolean memory) { return assign(bitShl(memory)); } public Memory assignBitShl(String memory) { return assign(bitShl(memory)); } public Memory assignBitShlRight(Memory memory) { return memory.assign(memory.bitShl(this)); } public Memory assignBitAnd(Memory memory) { return assign(bitAnd(memory)); } public Memory assignBitAnd(long memory) { return assign(bitAnd(memory)); } public Memory assignBitAnd(double memory) { return assign(bitAnd(memory)); } public Memory assignBitAnd(boolean memory) { return assign(bitAnd(memory)); } public Memory assignBitAnd(String memory) { return assign(bitAnd(memory)); } public Memory assignBitAndRight(Memory memory) { return memory.assign(memory.bitAnd(this)); } public Memory assignBitOr(Memory memory) { return assign(bitOr(memory)); } public Memory assignBitOr(long memory) { return assign(bitOr(memory)); } public Memory assignBitOr(double memory) { return assign(bitOr(memory)); } public Memory assignBitOr(boolean memory) { return assign(bitOr(memory)); } public Memory assignBitOr(String memory) { return assign(bitOr(memory)); } public Memory assignBitOrRight(Memory memory) { return memory.assign(memory.bitOr(this)); } public Memory assignBitXor(Memory memory) { return assign(bitXor(memory)); } public Memory assignBitXor(long memory) { return assign(bitXor(memory)); } public Memory assignBitXor(double memory) { return assign(bitXor(memory)); } public Memory assignBitXor(boolean memory) { return assign(bitXor(memory)); } public Memory assignBitXor(String memory) { return assign(bitXor(memory)); } public Memory assignBitXorRight(Memory memory) { return memory.assign(memory.bitXor(this)); } public void unset(){ } public void manualUnset(Environment env) { } public Memory toImmutable(){ return this; } public Memory toImmutable(Environment env, TraceInfo trace){ return toImmutable(); } @SuppressWarnings("unchecked") public <T extends Memory> T toValue(Class<T> clazz){ return (T) this; } public Memory toValue(){ return this; } public boolean isImmutable(){ return true; } /********** RIGHT ******************/ public Memory minusRight(Memory value){ return value.minus(this); } public Memory minusRight(long value){ return LongMemory.valueOf(value).minus(this); } public Memory minusRight(double value){ return new DoubleMemory(value).minus(this); } public Memory minusRight(boolean value){ return LongMemory.valueOf((value ? 1 : 0)).minus(this); } public Memory minusRight(String value){ return StringMemory.toNumeric(value).minus(this); } public Memory divRight(Memory value){ return value.div(this); } public Memory divRight(long value){ return LongMemory.valueOf(value).div(this); } public Memory divRight(double value){ return new DoubleMemory(value).div(this); } public Memory divRight(boolean value){ if(!value) return CONST_INT_0; else return TRUE.div(this); } public Memory divRight(String value){ return StringMemory.toNumeric(value).div(this); } public Memory modRight(Memory value){ return value.mod(this); } public Memory modRight(long value){ return LongMemory.valueOf(value).mod(this); } public Memory modRight(double value){ return new DoubleMemory(value).mod(this); } public Memory modRight(boolean value){ return LongMemory.valueOf((value ? 1 : 0)).mod(this); } public Memory modRight(String value){ return StringMemory.toNumeric(value).mod(this); } public Memory powRight(Memory value){ return value.pow(this); } public Memory powRight(long value){ return LongMemory.valueOf(value).pow(this); } public Memory powRight(double value){ return new DoubleMemory(value).pow(this); } public Memory powRight(boolean value){ return LongMemory.valueOf((value ? 1 : 0)).pow(this); } public Memory powRight(String value){ return StringMemory.toNumeric(value).pow(this); } public String concatRight(Memory value) { return value.concat(this); } public String concatRight(long value) { return value + toString(); } public String concatRight(double value) { return value + toString(); } public String concatRight(boolean value) { return boolToString(value) + toString(); } public String concatRight(String value) { return value + toString(); } public boolean smallerRight(Memory value) { return value.smaller(this); } public boolean smallerRight(long value) { return this.greaterEq(value); } public boolean smallerRight(double value) { return this.greaterEq(value); } public boolean smallerRight(boolean value) { return this.greaterEq(value); } public boolean smallerRight(String value) { return this.greaterEq(value); } public boolean smallerEqRight(Memory value) { return value.smallerEq(this); } public boolean smallerEqRight(long value) { return this.greater(value); } public boolean smallerEqRight(double value) { return this.greater(value); } public boolean smallerEqRight(boolean value) { return this.greater(value); } public boolean smallerEqRight(String value) { return this.greater(value); } public boolean greaterRight(Memory value) { return value.greater(this); } public boolean greaterRight(long value) { return this.smallerEq(value); } public boolean greaterRight(double value) { return this.smallerEq(value); } public boolean greaterRight(boolean value) { return this.smallerEq(value); } public boolean greaterRight(String value) { return this.smallerEq(value); } public boolean greaterEqRight(Memory value) { return value.greaterEq(this); } public boolean greaterEqRight(long value) { return this.smaller(value); } public boolean greaterEqRight(double value) { return this.smaller(value); } public boolean greaterEqRight(boolean value) { return this.smaller(value); } public boolean greaterEqRight(String value) { return this.smaller(value); } public Memory bitShrRight(Memory value){ return value.bitShr(this); } public Memory bitShrRight(long value){ return new LongMemory(value >> toLong()); } public Memory bitShrRight(double value){ return new LongMemory((long)value >> toLong()); } public Memory bitShrRight(boolean value){ return new LongMemory((value ? 1 : 0) >> toLong()); } public Memory bitShrRight(String value){ return StringMemory.toNumeric(value).bitShr(this); } public Memory bitShlRight(Memory value){ return value.bitShl(this); } public Memory bitShlRight(long value){ return new LongMemory(value << toLong()); } public Memory bitShlRight(double value){ return new LongMemory((long)value << toLong()); } public Memory bitShlRight(boolean value){ return new LongMemory((value ? 1 : 0) << toLong()); } public Memory bitShlRight(String value){ return StringMemory.toNumeric(value).bitShl(this); } public Memory unpack() { return new VariadicMemory(this); } /****************************************************************/ /** Static *****/ public static Memory assignRight(Memory value, Memory memory){ return memory.assign(value); } public static Memory assignRight(long value, Memory memory){ return memory.assign(value); } public static Memory assignRight(double value, Memory memory){ return memory.assign(value); } public static Memory assignRight(boolean value, Memory memory){ return memory.assign(value); } public static Memory assignRight(String value, Memory memory){ return memory.assign(value); } public static Memory assignRefRight(Memory value, Memory memory) { return memory.assignRef(value); } public static Memory assignConcatRight(Memory value, Memory memory){ return memory.assignConcat(value); } public static Memory assignConcatRight(long value, Memory memory){ return memory.assignConcat(value); } public static Memory assignConcatRight(double value, Memory memory){ return memory.assignConcat(value); } public static Memory assignConcatRight(boolean value, Memory memory){ return memory.assignConcat(value); } public static Memory assignConcatRight(String value, Memory memory){ return memory.assignConcat(value); } public static Memory assignPlusRight(Memory value, Memory memory){ return memory.assignPlus(value); } public static Memory assignPlusRight(long value, Memory memory){ return memory.assignPlus(value); } public static Memory assignPlusRight(double value, Memory memory){ return memory.assignPlus(value); } public static Memory assignPlusRight(boolean value, Memory memory){ return memory.assignPlus(value); } public static Memory assignPlusRight(String value, Memory memory){ return memory.assignPlus(value); } public static Memory assignMinusRight(Memory value, Memory memory){ return memory.assignMinus(value); } public static Memory assignMinusRight(long value, Memory memory){ return memory.assignMinus(value); } public static Memory assignMinusRight(double value, Memory memory){ return memory.assignMinus(value); } public static Memory assignMinusRight(boolean value, Memory memory){ return memory.assignMinus(value); } public static Memory assignMinusRight(String value, Memory memory){ return memory.assignMinus(value); } public static Memory assignMulRight(Memory value, Memory memory){ return memory.assignMul(value); } public static Memory assignMulRight(long value, Memory memory){ return memory.assignMul(value); } public static Memory assignMulRight(double value, Memory memory){ return memory.assignMul(value); } public static Memory assignMulRight(boolean value, Memory memory){ return memory.assignMul(value); } public static Memory assignMulRight(String value, Memory memory){ return memory.assignMul(value); } public static Memory assignPowRight(Memory value, Memory memory){ return memory.assignPow(value); } public static Memory assignPowRight(long value, Memory memory){ return memory.assignPow(value); } public static Memory assignPowRight(double value, Memory memory){ return memory.assignPow(value); } public static Memory assignPowRight(boolean value, Memory memory){ return memory.assignPow(value); } public static Memory assignPowRight(String value, Memory memory){ return memory.assignPow(value); } public static Memory assignDivRight(Memory value, Memory memory){ return memory.assignDiv(value); } public static Memory assignDivRight(long value, Memory memory){ return memory.assignDiv(value); } public static Memory assignDivRight(double value, Memory memory){ return memory.assignDiv(value); } public static Memory assignDivRight(boolean value, Memory memory){ return memory.assignDiv(value); } public static Memory assignDivRight(String value, Memory memory){ return memory.assignDiv(value); } public static Memory assignModRight(Memory value, Memory memory){ return memory.assignMod(value); } public static Memory assignModRight(long value, Memory memory){ return memory.assignMod(value); } public static Memory assignModRight(double value, Memory memory){ return memory.assignMod(value); } public static Memory assignModRight(boolean value, Memory memory){ return memory.assignMod(value); } public static Memory assignModRight(String value, Memory memory){ return memory.assignMod(value); } public static Memory assignBitShrRight(Memory value, Memory memory){ return memory.assignBitShr(value); } public static Memory assignBitShrRight(long value, Memory memory){ return memory.assignBitShr(value); } public static Memory assignBitShrRight(double value, Memory memory){ return memory.assignBitShr(value); } public static Memory assignBitShrRight(boolean value, Memory memory){ return memory.assignBitShr(value); } public static Memory assignBitShrRight(String value, Memory memory){ return memory.assignBitShr(value); } public static Memory assignBitShlRight(Memory value, Memory memory){ return memory.assignBitShl(value); } public static Memory assignBitShlRight(long value, Memory memory){ return memory.assignBitShl(value); } public static Memory assignBitShlRight(double value, Memory memory){ return memory.assignBitShl(value); } public static Memory assignBitShlRight(boolean value, Memory memory){ return memory.assignBitShl(value); } public static Memory assignBitShlRight(String value, Memory memory){ return memory.assignBitShl(value); } public static Memory assignBitAndRight(Memory value, Memory memory){ return memory.assignBitAnd(value); } public static Memory assignBitAndRight(long value, Memory memory){ return memory.assignBitAnd(value); } public static Memory assignBitAndRight(double value, Memory memory){ return memory.assignBitAnd(value); } public static Memory assignBitAndRight(boolean value, Memory memory){ return memory.assignBitAnd(value); } public static Memory assignBitAndRight(String value, Memory memory){ return memory.assignBitAnd(value); } public static Memory assignBitOrRight(Memory value, Memory memory){ return memory.assignBitOr(value); } public static Memory assignBitOrRight(long value, Memory memory){ return memory.assignBitOr(value); } public static Memory assignBitOrRight(double value, Memory memory){ return memory.assignBitOr(value); } public static Memory assignBitOrRight(boolean value, Memory memory){ return memory.assignBitOr(value); } public static Memory assignBitOrRight(String value, Memory memory){ return memory.assignBitOr(value); } public static Memory assignBitXorRight(Memory value, Memory memory){ return memory.assignBitXor(value); } public static Memory assignBitXorRight(long value, Memory memory){ return memory.assignBitXor(value); } public static Memory assignBitXorRight(double value, Memory memory){ return memory.assignBitXor(value); } public static Memory assignBitXorRight(boolean value, Memory memory){ return memory.assignBitXor(value); } public static Memory assignBitXorRight(String value, Memory memory){ return memory.assignBitXor(value); } //// public static Object unwrap(Environment env, Memory memory) { if (memory.isObject()) { IObject iObject = memory.toValue(ObjectMemory.class).value; if (iObject instanceof BaseWrapper) { return ((BaseWrapper) iObject).getWrappedObject(); } else { return iObject; } } else { switch (memory.getRealType()) { case BOOL: return memory.toBoolean(); case INT: return memory.toLong(); case DOUBLE: return memory.toDouble(); case STRING: return memory.toString(); case NULL: return null; case ARRAY: return memory.toValue(ArrayMemory.class); } return memory; } } @SuppressWarnings("unchecked") public static Memory wrap(Environment env, Object o) { if (o == null) { return NULL; } MemoryOperation operation = MemoryOperation.get(o.getClass(), null); if (operation != null) { return operation.unconvertNoThow(env, env.trace(), o); } else { return NULL; } } public static String boolToString(boolean value){ return value ? "1" : ""; } abstract public byte[] getBinaryBytes(); public ForeachIterator getNewIterator(Environment env, boolean getReferences, boolean getKeyReferences){ return null; } final public ForeachIterator getNewIterator(Environment env) { return getNewIterator(env, false, false); } public boolean instanceOf(String className, String lowerClassName){ return false; } public boolean instanceOf(Class<? extends IObject> clazz){ return instanceOf(ReflectionUtils.getClassName(clazz)); } public boolean instanceOf(String name){ return false; } @Override public int compareTo(Memory o) { if (greater(o)) return 1; else if (smaller(o)) return -1; else return 0; } }
/** * The MIT License (MIT) * * Copyright (c) 2011-2016 Incapture Technologies LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package rapture.kernel; import org.apache.commons.lang3.text.WordUtils; import org.apache.log4j.Logger; import rapture.common.*; import rapture.common.api.UserApi; import rapture.common.exception.ExceptionToString; import rapture.common.exception.RaptureException; import rapture.common.exception.RaptureExceptionFactory; import rapture.common.model.RaptureUser; import rapture.common.model.RaptureUserStorage; import rapture.common.version.ApiVersion; import rapture.repo.Repository; import rapture.server.ServerApiVersion; import rapture.util.IDGenerator; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.List; import static rapture.common.Scheme.DOCUMENT; public class UserApiImpl extends KernelBase implements UserApi { private static Logger log = Logger.getLogger(UserApiImpl.class); public static final String AUTOID = "id"; public UserApiImpl(Kernel raptureKernel) { super(raptureKernel); } @Override public RaptureUser changeMyPassword(CallingContext context, String oldHashPassword, String newHashPassword) { RaptureUser usr = Kernel.getAdmin().getTrusted().getUser(context, context.getUser()); if (usr != null) { if (usr.getHashPassword().equals(oldHashPassword)) { usr.setHashPassword(newHashPassword); RaptureUserStorage.add(usr, context.getUser(), "Updated my password"); return usr; } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_UNAUTHORIZED, "Bad Password"); } } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_BAD_REQUEST, "Could not find user record"); } } @Override public RaptureUser getWhoAmI(CallingContext context) { RaptureUser usr = Kernel.getAdmin().getTrusted().getUser(context, context.getUser()); if (usr != null) { return usr; } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_BAD_REQUEST, "Could not find this user"); } } @Override public RaptureUser changeMyEmail(CallingContext context, String newAddress) { RaptureUser usr = Kernel.getAdmin().getTrusted().getUser(context, context.getUser()); if (usr != null) { usr.setEmailAddress(newAddress); RaptureUserStorage.add(usr, context.getUser(), "Updated my email"); return usr; } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_BAD_REQUEST, "Could not find this user"); } } @Override public RaptureUser updateMyDescription(CallingContext context, String description) { RaptureUser usr = Kernel.getAdmin().getTrusted().getUser(context, context.getUser()); if (usr != null) { usr.setDescription(description); RaptureUserStorage.add(usr, context.getUser(), "Updated my description"); return usr; } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_BAD_REQUEST, "Could not find user record"); } } @Override public void logoutUser(CallingContext context) { // Remove cookie, or actually remove session CallingContextStorage.deleteByFields(context.getContext(), context.getUser(), "Logout user"); } private String getPreferenceKey(String user, String category, String name) { return String.format("preference/%s/%s/%s", user, category, name); } private String getPreferenceCatPrefix(String user) { return String.format("preference/%s", user); } private String getPreferenceCatPrefix(String user, String category) { return String.format("preference/%s/%s", user, category); } @Override public void storePreference(CallingContext context, String category, String name, String content) { // Preferences are stored in the settings repo, in // preference/[user]/[category]/[name] getSettingsRepo().addDocument(getPreferenceKey(context.getUser(), category, name), content, context.getUser(), "Store preference", false); } @Override public String getPreference(CallingContext context, String category, String name) { return getSettingsRepo().getDocument(getPreferenceKey(context.getUser(), category, name)); } @Override public void removePreference(CallingContext context, String category, String name) { getSettingsRepo().removeDocument(getPreferenceKey(context.getUser(), category, name), context.getUser(), "Removed preference"); } @Override public List<String> getPreferenceCategories(CallingContext context) { List<RaptureFolderInfo> categories = getSettingsRepo().getChildren(getPreferenceCatPrefix(context.getUser())); List<String> ret = new ArrayList<String>(categories == null ? 0 : categories.size()); if (categories == null) { return ret; } for (RaptureFolderInfo cat : categories) { if (cat.isFolder()) { ret.add(cat.getName()); } } return ret; } @Override public List<String> getPreferencesInCategory(CallingContext context, String category) { List<RaptureFolderInfo> preferences = getSettingsRepo().getChildren(getPreferenceCatPrefix(context.getUser(), category)); List<String> ret = new ArrayList<String>(preferences.size()); for (RaptureFolderInfo pref : preferences) { if (!pref.isFolder()) { ret.add(pref.getName()); } } return ret; } @Override public ApiVersion getServerApiVersion(final CallingContext context) { return ServerApiVersion.getApiVersion(); } @Override public Boolean isPermitted(CallingContext context, String apiCallOrEntitlement, String callParam) { String entitlementString = null; if (apiCallOrEntitlement.startsWith("/")) { entitlementString = apiCallOrEntitlement; } else { String[] elements = apiCallOrEntitlement.split("\\."); if (elements.length == 2) { String key = WordUtils.capitalize(elements[0] + "_" + elements[1]); try { entitlementString = EntitlementSet.valueOf(key).getPath(); } catch (Exception e) { log.warn("Method " + apiCallOrEntitlement + " unknown"); } } else if (elements.length == 3) { // If not in rapture.common then we need to use reflection here instead // Expect <sdkname>.<api>.<method> // EntitlementSet is rapture.<sdkname>.server.EntitlementSet String entitlementSetClassName = "rapture." + elements[0] + ".server.EntitlementSet"; String key = WordUtils.capitalize(elements[1] + "_" + elements[2]); try { Class<?> entitlementSetClass = Class.forName(entitlementSetClassName); try { Method entitlementSetValueOf = entitlementSetClass.getMethod("valueOf", String.class); Object entitlement = entitlementSetValueOf.invoke(entitlementSetClass, key); Method entitlementGetPath = entitlement.getClass().getMethod("getPath", (Class<?>[]) null); Object entitlementStringObject = entitlementGetPath.invoke(entitlement, (Object[]) null); if (entitlementStringObject != null) entitlementString = entitlementStringObject.toString(); } catch (NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { log.warn("Method " + apiCallOrEntitlement + " unknown " + e.getMessage()); log.trace(ExceptionToString.format(e)); } } catch (ClassNotFoundException | SecurityException e) { log.warn("Cannot find Class " + entitlementSetClassName); } } else { log.warn("Illegal method name format " + apiCallOrEntitlement); } } if (entitlementString == null) return false; int i1 = entitlementString.indexOf("$"); if (i1 > 0) { if (callParam == null) callParam = ""; int i2 = callParam.indexOf(":"); while (callParam.charAt(++i2) == '/') ; entitlementString = entitlementString.substring(0, i1) + callParam.substring(i2); } try { Kernel.getKernel().validateContext(context, entitlementString, null); return true; } catch (RaptureException e) { // Expected if not entitled log.trace(ExceptionToString.format(e)); return false; } } @Override public String addApiKey(CallingContext context, String appKey) { String apiKey = IDGenerator.getUUID(); APIKeyDefinition def = new APIKeyDefinition(); def.setAppKey(appKey); def.setUserId(context.getUser()); def.setApiKey(apiKey); String newPath = def.getAppKey() + "/" + apiKey; RaptureUser user = RaptureUserStorage.readByFields(context.getUser()); user.getApiKeys().add(newPath); RaptureUserStorage.add(user, context.getUser(), "Added api key"); APIKeyDefinitionStorage.add(def, context.getUser(), "New API Key"); return apiKey; } @Override public List<String> getApiKeyPairs(CallingContext context) { RaptureUser user = RaptureUserStorage.readByFields(context.getUser()); return user.getApiKeys(); } @Override public void revokeApiKey(CallingContext context, String appKey, String apiKey) { String testKey = appKey + "/" + apiKey; RaptureUser user = RaptureUserStorage.readByFields(context.getUser()); if (user.getApiKeys().contains(testKey)) { user.getApiKeys().remove(testKey); RaptureUserStorage.add(user, context.getUser(), "Revoked api key"); APIKeyDefinitionStorage.deleteByFields(appKey, apiKey, context.getUser(), "Revoke api key"); } else { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_BAD_REQUEST, "No api key for this user"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.lang; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.ObjectStreamField; import java.io.Serializable; /** * A modifiable {@link CharSequence sequence of characters} for use in creating * strings, where all accesses are synchronized. This class has mostly been replaced * by {@link StringBuilder} because this synchronization is rarely useful. This * class is mainly used to interact with legacy APIs that expose it. * * <p>For particularly complex string-building needs, consider {@link java.util.Formatter}. * * <p>The majority of the modification methods on this class return {@code * this} so that method calls can be chained together. For example: * {@code new StringBuffer("a").append("b").append("c").toString()}. * * @see CharSequence * @see Appendable * @see StringBuilder * @see String * @see String.format * @since 1.0 */ public final class StringBuffer extends AbstractStringBuilder implements Appendable, Serializable, CharSequence { private static final long serialVersionUID = 3388685877147921107L; private static final ObjectStreamField serialPersistentFields[] = { new ObjectStreamField("count", int.class), new ObjectStreamField("shared", boolean.class), new ObjectStreamField("value", char[].class), }; /** * Constructs a new StringBuffer using the default capacity which is 16. */ public StringBuffer() { super(); } /** * Constructs a new StringBuffer using the specified capacity. * * @param capacity * the initial capacity. */ public StringBuffer(int capacity) { super(capacity); } /** * Constructs a new StringBuffer containing the characters in the specified * string. The capacity of the new buffer will be the length of the * {@code String} plus the default capacity. * * @param string * the string content with which to initialize the new instance. * @throws NullPointerException * if {@code string} is {@code null}. */ public StringBuffer(String string) { super(string); } /** * Constructs a StringBuffer and initializes it with the content from the * specified {@code CharSequence}. The capacity of the new buffer will be * the length of the {@code CharSequence} plus the default capacity. * * @param cs * the content to initialize the instance. * @throws NullPointerException * if {@code cs} is {@code null}. * @since 1.5 */ public StringBuffer(CharSequence cs) { super(cs.toString()); } /** * Adds the string representation of the specified boolean to the end of * this StringBuffer. * <p> * If the argument is {@code true} the string {@code "true"} is appended, * otherwise the string {@code "false"} is appended. * * @param b * the boolean to append. * @return this StringBuffer. * @see String#valueOf(boolean) */ public StringBuffer append(boolean b) { return append(b ? "true" : "false"); } /** * Adds the specified character to the end of this buffer. * * @param ch * the character to append. * @return this StringBuffer. * @see String#valueOf(char) */ public synchronized StringBuffer append(char ch) { append0(ch); return this; } /** * Adds the string representation of the specified double to the end of this * StringBuffer. * * @param d * the double to append. * @return this StringBuffer. * @see String#valueOf(double) */ public StringBuffer append(double d) { RealToString.getInstance().appendDouble(this, d); return this; } /** * Adds the string representation of the specified float to the end of this * StringBuffer. * * @param f * the float to append. * @return this StringBuffer. * @see String#valueOf(float) */ public StringBuffer append(float f) { RealToString.getInstance().appendFloat(this, f); return this; } /** * Adds the string representation of the specified integer to the end of * this StringBuffer. * * @param i * the integer to append. * @return this StringBuffer. * @see String#valueOf(int) */ public StringBuffer append(int i) { IntegralToString.appendInt(this, i); return this; } /** * Adds the string representation of the specified long to the end of this * StringBuffer. * * @param l * the long to append. * @return this StringBuffer. * @see String#valueOf(long) */ public StringBuffer append(long l) { IntegralToString.appendLong(this, l); return this; } /** * Adds the string representation of the specified object to the end of this * StringBuffer. * <p> * If the specified object is {@code null} the string {@code "null"} is * appended, otherwise the objects {@code toString} is used to get its * string representation. * * @param obj * the object to append (may be null). * @return this StringBuffer. * @see String#valueOf(Object) */ public synchronized StringBuffer append(Object obj) { if (obj == null) { appendNull(); } else { append0(obj.toString()); } return this; } /** * Adds the specified string to the end of this buffer. * <p> * If the specified string is {@code null} the string {@code "null"} is * appended, otherwise the contents of the specified string is appended. * * @param string * the string to append (may be null). * @return this StringBuffer. */ public synchronized StringBuffer append(String string) { append0(string); return this; } /** * Adds the specified StringBuffer to the end of this buffer. * <p> * If the specified StringBuffer is {@code null} the string {@code "null"} * is appended, otherwise the contents of the specified StringBuffer is * appended. * * @param sb * the StringBuffer to append (may be null). * @return this StringBuffer. * * @since 1.4 */ public synchronized StringBuffer append(StringBuffer sb) { if (sb == null) { appendNull(); } else { synchronized (sb) { append0(sb.getValue(), 0, sb.length()); } } return this; } /** * Adds the character array to the end of this buffer. * * @param chars * the character array to append. * @return this StringBuffer. * @throws NullPointerException * if {@code chars} is {@code null}. */ public synchronized StringBuffer append(char[] chars) { append0(chars); return this; } /** * Adds the specified sequence of characters to the end of this buffer. * * @param chars * the character array to append. * @param start * the starting offset. * @param length * the number of characters. * @return this StringBuffer. * @throws ArrayIndexOutOfBoundsException * if {@code length < 0} , {@code start < 0} or {@code start + * length > chars.length}. * @throws NullPointerException * if {@code chars} is {@code null}. */ public synchronized StringBuffer append(char[] chars, int start, int length) { append0(chars, start, length); return this; } /** * Appends the specified CharSequence to this buffer. * <p> * If the specified CharSequence is {@code null} the string {@code "null"} * is appended, otherwise the contents of the specified CharSequence is * appended. * * @param s * the CharSequence to append. * @return this StringBuffer. * @since 1.5 */ public synchronized StringBuffer append(CharSequence s) { if (s == null) { appendNull(); } else { append0(s, 0, s.length()); } return this; } /** * Appends the specified subsequence of the CharSequence to this buffer. * <p> * If the specified CharSequence is {@code null}, then the string {@code * "null"} is used to extract a subsequence. * * @param s * the CharSequence to append. * @param start * the inclusive start index. * @param end * the exclusive end index. * @return this StringBuffer. * @throws IndexOutOfBoundsException * if {@code start} or {@code end} are negative, {@code start} * is greater than {@code end} or {@code end} is greater than * the length of {@code s}. * @since 1.5 */ public synchronized StringBuffer append(CharSequence s, int start, int end) { append0(s, start, end); return this; } /** * Appends the string representation of the specified Unicode code point to * the end of this buffer. * <p> * The code point is converted to a {@code char[]} as defined by * {@link Character#toChars(int)}. * * @param codePoint * the Unicode code point to encode and append. * @return this StringBuffer. * @see Character#toChars(int) * @since 1.5 */ public StringBuffer appendCodePoint(int codePoint) { return append(Character.toChars(codePoint)); } @Override public synchronized char charAt(int index) { return super.charAt(index); } @Override public synchronized int codePointAt(int index) { return super.codePointAt(index); } @Override public synchronized int codePointBefore(int index) { return super.codePointBefore(index); } @Override public synchronized int codePointCount(int beginIndex, int endIndex) { return super.codePointCount(beginIndex, endIndex); } /** * Deletes a range of characters. * * @param start * the offset of the first character. * @param end * the offset one past the last character. * @return this StringBuffer. * @throws StringIndexOutOfBoundsException * if {@code start < 0}, {@code start > end} or {@code end > * length()}. */ public synchronized StringBuffer delete(int start, int end) { delete0(start, end); return this; } /** * Deletes the character at the specified offset. * * @param location * the offset of the character to delete. * @return this StringBuffer. * @throws StringIndexOutOfBoundsException * if {@code location < 0} or {@code location >= length()} */ public synchronized StringBuffer deleteCharAt(int location) { deleteCharAt0(location); return this; } @Override public synchronized void ensureCapacity(int min) { super.ensureCapacity(min); } /** * Copies the requested sequence of characters to the {@code char[]} passed * starting at {@code idx}. * * @param start * the starting offset of characters to copy. * @param end * the ending offset of characters to copy. * @param buffer * the destination character array. * @param idx * the starting offset in the character array. * @throws IndexOutOfBoundsException * if {@code start < 0}, {@code end > length()}, {@code start > * end}, {@code index < 0}, {@code end - start > buffer.length - * index} */ @Override public synchronized void getChars(int start, int end, char[] buffer, int idx) { super.getChars(start, end, buffer, idx); } @Override public synchronized int indexOf(String subString, int start) { return super.indexOf(subString, start); } /** * Inserts the character into this buffer at the specified offset. * * @param index * the index at which to insert. * @param ch * the character to insert. * @return this buffer. * @throws ArrayIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public synchronized StringBuffer insert(int index, char ch) { insert0(index, ch); return this; } /** * Inserts the string representation of the specified boolean into this * buffer at the specified offset. * * @param index * the index at which to insert. * @param b * the boolean to insert. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public StringBuffer insert(int index, boolean b) { return insert(index, b ? "true" : "false"); } /** * Inserts the string representation of the specified integer into this * buffer at the specified offset. * * @param index * the index at which to insert. * @param i * the integer to insert. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public StringBuffer insert(int index, int i) { return insert(index, Integer.toString(i)); } /** * Inserts the string representation of the specified long into this buffer * at the specified offset. * * @param index * the index at which to insert. * @param l * the long to insert. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public StringBuffer insert(int index, long l) { return insert(index, Long.toString(l)); } /** * Inserts the string representation of the specified into this buffer * double at the specified offset. * * @param index * the index at which to insert. * @param d * the double to insert. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public StringBuffer insert(int index, double d) { return insert(index, Double.toString(d)); } /** * Inserts the string representation of the specified float into this buffer * at the specified offset. * * @param index * the index at which to insert. * @param f * the float to insert. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public StringBuffer insert(int index, float f) { return insert(index, Float.toString(f)); } /** * Inserts the string representation of the specified object into this * buffer at the specified offset. * <p> * If the specified object is {@code null}, the string {@code "null"} is * inserted, otherwise the objects {@code toString} method is used to get * its string representation. * * @param index * the index at which to insert. * @param obj * the object to insert (may be null). * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public StringBuffer insert(int index, Object obj) { return insert(index, obj == null ? "null" : obj.toString()); } /** * Inserts the string into this buffer at the specified offset. * <p> * If the specified string is {@code null}, the string {@code "null"} is * inserted, otherwise the contents of the string is inserted. * * @param index * the index at which to insert. * @param string * the string to insert (may be null). * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. */ public synchronized StringBuffer insert(int index, String string) { insert0(index, string); return this; } /** * Inserts the character array into this buffer at the specified offset. * * @param index * the index at which to insert. * @param chars * the character array to insert. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. * @throws NullPointerException * if {@code chars} is {@code null}. */ public synchronized StringBuffer insert(int index, char[] chars) { insert0(index, chars); return this; } /** * Inserts the specified subsequence of characters into this buffer at the * specified index. * * @param index * the index at which to insert. * @param chars * the character array to insert. * @param start * the starting offset. * @param length * the number of characters. * @return this buffer. * @throws NullPointerException * if {@code chars} is {@code null}. * @throws StringIndexOutOfBoundsException * if {@code length < 0}, {@code start < 0}, {@code start + * length > chars.length}, {@code index < 0} or {@code index > * length()} */ public synchronized StringBuffer insert(int index, char[] chars, int start, int length) { insert0(index, chars, start, length); return this; } /** * Inserts the specified CharSequence into this buffer at the specified * index. * <p> * If the specified CharSequence is {@code null}, the string {@code "null"} * is inserted, otherwise the contents of the CharSequence. * * @param index * The index at which to insert. * @param s * The char sequence to insert. * @return this buffer. * @throws IndexOutOfBoundsException * if {@code index < 0} or {@code index > length()}. * @since 1.5 */ public synchronized StringBuffer insert(int index, CharSequence s) { insert0(index, s == null ? "null" : s.toString()); return this; } /** * Inserts the specified subsequence into this buffer at the specified * index. * <p> * If the specified CharSequence is {@code null}, the string {@code "null"} * is inserted, otherwise the contents of the CharSequence. * * @param index * The index at which to insert. * @param s * The char sequence to insert. * @param start * The inclusive start index in the char sequence. * @param end * The exclusive end index in the char sequence. * @return this buffer. * @throws IndexOutOfBoundsException * if {@code index} is negative or greater than the current * length, {@code start} or {@code end} are negative, {@code * start} is greater than {@code end} or {@code end} is greater * than the length of {@code s}. * @since 1.5 */ public synchronized StringBuffer insert(int index, CharSequence s, int start, int end) { insert0(index, s, start, end); return this; } @Override public synchronized int lastIndexOf(String subString, int start) { return super.lastIndexOf(subString, start); } @Override public synchronized int offsetByCodePoints(int index, int codePointOffset) { return super.offsetByCodePoints(index, codePointOffset); } /** * Replaces the characters in the specified range with the contents of the * specified string. * * @param start * the inclusive begin index. * @param end * the exclusive end index. * @param string * the string that will replace the contents in the range. * @return this buffer. * @throws StringIndexOutOfBoundsException * if {@code start} or {@code end} are negative, {@code start} * is greater than {@code end} or {@code end} is greater than * the length of {@code s}. */ public synchronized StringBuffer replace(int start, int end, String string) { replace0(start, end, string); return this; } /** * Reverses the order of characters in this buffer. * * @return this buffer. */ public synchronized StringBuffer reverse() { reverse0(); return this; } @Override public synchronized void setCharAt(int index, char ch) { super.setCharAt(index, ch); } @Override public synchronized void setLength(int length) { super.setLength(length); } @Override public synchronized CharSequence subSequence(int start, int end) { return super.substring(start, end); } @Override public synchronized String substring(int start) { return super.substring(start); } @Override public synchronized String substring(int start, int end) { return super.substring(start, end); } @Override public synchronized String toString() { return super.toString(); } @Override public synchronized void trimToSize() { super.trimToSize(); } private synchronized void writeObject(ObjectOutputStream out) throws IOException { ObjectOutputStream.PutField fields = out.putFields(); fields.put("count", length()); fields.put("shared", false); fields.put("value", getValue()); out.writeFields(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { ObjectInputStream.GetField fields = in.readFields(); int count = fields.get("count", 0); char[] value = (char[]) fields.get("value", null); set(value, count); } }
package edu.ufl.bmi.ontology; import java.io.FileReader; import java.io.LineNumberReader; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.regex.Pattern; import java.util.regex.Matcher; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLNamedIndividual; import edu.ufl.bmi.misc.IriLookup; /** * This class is a pseudocompiler that takes an instruction set file and stages * appropriate instances of various instruction classes to carry out the * instruction in code. * * Here, "F2" means format 2 for instruction set syntax. This version of the * syntax is more data driven, meaning that if a field is either not present * or has a blank value, the instructions associated with that field are * skipped. */ public class RdfConversionInstructionSetF2Compiler { String fileName; //ArrayList<RdfConversionInstruction> instructionList; IriLookup iriMap; OWLDataFactory odf; HashMap<String, HashMap<String, OWLNamedIndividual>> searchIndexes; IriRepository iriRepository; String iriRepositoryPrefix; String uniqueIdFieldName; String iriPrefix; static final String VARIABLE_PATTERN = "\\[(.*)\\]"; static final String FOR_EACH_VARIABLE_PATTERN = "foreach \\[(.*)\\]"; public RdfConversionInstructionSetF2Compiler(String fName, IriLookup iriMap, OWLDataFactory odf, HashMap<String, HashMap<String, OWLNamedIndividual>> searchIndexes, IriRepository iriRepository, String iriRepositoryPrefix, String uniqueIdFieldName, String iriPrefix) { this.fileName = fName; this.iriMap = iriMap; this.odf = odf; this.searchIndexes = searchIndexes; this.iriRepository = iriRepository; this.iriRepositoryPrefix = iriRepositoryPrefix; this.uniqueIdFieldName = uniqueIdFieldName; this.iriPrefix = iriPrefix; } public RdfConversionInstructionSetExecutor compile() throws ParseException { RdfConversionInstructionSetExecutor rcise = new RdfConversionInstructionSetExecutor(); try { FileReader fr = new FileReader(fileName); LineNumberReader lnr = new LineNumberReader(fr); String line; ArrayList<RdfConversionInstruction> instructionList = null; Pattern variablePattern = Pattern.compile(VARIABLE_PATTERN); Pattern foreachPattern = Pattern.compile(FOR_EACH_VARIABLE_PATTERN); String elementName = ""; boolean multiple = false; while((line=lnr.readLine())!=null) { //System.err.println(line); line = line.trim(); //ignore any leading and trailing whitespace //skip all blank lines and comment lines if (line.length() == 0 || line.startsWith("#")) continue; Matcher mVariable = variablePattern.matcher(line); Matcher m = (mVariable.matches()) ? mVariable : foreachPattern.matcher(line); //if the line constitutes a variable name, then we're starting a new section if (m.matches()) { //the first time through the current variable name is empty, so only do // this section if we're changing variable names if (elementName.length() > 0) { //save away the instruction set in the hash by its associated variable name RdfConversionInstructionSet s = (multiple) ? new RdfConversionMultipleValueConversionInstructionSet(elementName, instructionList) : new RdfConversionInstructionSet(instructionList); boolean added = rcise.addInstructionSetForElement(elementName, s); multiple = !mVariable.matches(); if (!added) { System.err.println("instructions for element " + elementName + " were not added to " + "the instruction set execution engine."); } } //prepare a new instruction list for the next variable instructionList = new ArrayList<RdfConversionInstruction>(); // the variable name should be in group 1 of the instruction set elementName = m.group(1).trim(); System.out.println("ELEMENT NAME IS " + elementName); } else { if (line.contains("\\[")) System.err.println("line has [ but pattern did not match."); //an instruction is two parts -- instruction type : instruction content String[] flds = line.split(Pattern.quote(":"), 2); //System.out.println(flds.length + ", " + flds[0] + ", " + line); String instructionType = flds[0].trim(); if (flds.length==1) System.err.println(lnr.getLineNumber() + ": " + line); String instruction = flds[1].trim(); RdfConversionInstruction rci = compileInstruction(instructionType, instruction); instructionList.add(rci); } } /* * Needed to get the last instruction set read in. Can probably make this * more elegant by switching while() loop to do...while() loop. */ RdfConversionInstructionSet s = (multiple) ? new RdfConversionMultipleValueConversionInstructionSet(elementName, instructionList) : new RdfConversionInstructionSet(instructionList); boolean added = rcise.addInstructionSetForElement(elementName, s); if (!added) { System.err.println("instructions for element " + elementName + " were not added to " + "the instruction set execution engine."); } } catch (IOException ioe) { ioe.printStackTrace(); } rcise.initializeVariables(this.iriMap, this.odf); return rcise; } //public RdfConversionInstructionSet getInstructionSet() { // // } public RdfConversionInstruction compileInstruction(String instructionType, String instruction) throws ParseException { String[] flds = instruction.split(Pattern.quote("\t")); if (instructionType.equals("annotation")) { if (flds.length != 3) throw new ParseException( "annotation instruction must have three, tab delimited fields: " + instruction, 1); String variableName = flds[0].trim(); String annotationPropertyTxt = flds[1].trim(); String annotationValueInstruction = flds[2].trim(); RdfConversionAnnotationInstruction rcai = new RdfConversionAnnotationInstruction(iriMap, odf, variableName, annotationPropertyTxt, annotationValueInstruction); return rcai; } else if (instructionType.equals("new-individual")) { if (flds.length != 4 && flds.length != 5 && flds.length!=6) throw new ParseException( "new individual instruction must have four, tab-delimited fields: " + instruction, 2); String variableName = flds[0].trim(); String classIriTxt = flds[1].trim(); String annotationPropertyTxt = flds[2].trim(); String annotationValueInstruction = flds[3].trim(); RdfConversionNewIndividualInstruction rcnii = new RdfConversionNewIndividualInstruction( iriMap, odf, variableName, classIriTxt, annotationPropertyTxt, annotationValueInstruction, iriRepository, iriRepositoryPrefix, uniqueIdFieldName); if (flds.length == 5) { String fieldFive = flds[4].trim(); if (fieldFive.startsWith("iri=")) { String iriTxt = fieldFive.substring(4); System.out.println("individual IRI assignment = " + iriTxt); rcnii.setIriSourceVariableName(iriTxt); } else { rcnii.setCreationConditionLogic(fieldFive); } } else if (flds.length == 6) { String creationConditionLogic = flds[4].trim(); String iriField = flds[5].trim(); String iriTxt = (iriField.startsWith("iri=")) ? iriField.substring(4) : ""; rcnii.setCreationConditionLogic(creationConditionLogic); rcnii.setIriSourceVariableName(iriTxt); } return rcnii; } else if (instructionType.equals("data-property-expression")) { if (flds.length != 4) throw new ParseException( "data property expression instruction must have four, tab-delimited fields: " + instruction, 3); String variableName = flds[0].trim(); String dataPropertyIriTxt = flds[1].trim(); String dataValueInstruction = flds[2].trim(); //.replace("[","").replace("]",""); String dataType = flds[3].trim(); RdfConversionDataInstruction rcdi = new RdfConversionDataInstruction(iriMap, odf, variableName, dataPropertyIriTxt, dataValueInstruction, dataType); return rcdi; } else if (instructionType.equals("object-property-expression")) { if (flds.length != 3) throw new ParseException( "object property expression instructions require three, tab-delimited fields.", 4); String sourceVariableName = flds[0].trim(); String objectPropertyIriTxt = flds[1].trim(); String targetVariableName = flds[2].trim(); RdfConversionObjectPropertylInstruction rcopi = new RdfConversionObjectPropertylInstruction(iriMap, odf, sourceVariableName, objectPropertyIriTxt, targetVariableName); return rcopi; } else if (instructionType.equals("lookup-individual")) { if (flds.length != 2) throw new ParseException( "lookup individual instructions must have two, tab-delimited fields: " + instruction, 5); String variableName = flds[0].trim(); String searchFieldName = flds[1].trim().replace("[","").replace("]",""); RdfConversionLookupInstruction rclii = new RdfConversionLookupInstruction(iriMap, odf, variableName, searchFieldName, searchIndexes); return rclii; } else if (instructionType.equals("lookup-individual-by-element-value")) { if (flds.length !=2) throw new ParseException( "lookup-individual-by-element-value instuctions must have two, tab-delimited fields: " + instruction, 9); String variableName = flds[0].trim(); String lookupFieldName = flds[1].trim().replace("[","").replace("]",""); RdfConversionLookupByElementValueInstruction rclbevi = new RdfConversionLookupByElementValueInstruction( iriMap, odf, variableName, lookupFieldName, searchIndexes); return rclbevi; } else if (instructionType.equals("class-assertion-expression")) { if (flds.length !=2) throw new ParseException( "class assertion expressions must have two, tab-delimited fields: " + instruction, 7); String variableName = flds[0].trim(); String classIriHandle = flds[1].trim(); RdfClassAssertionInstruction rcai = new RdfClassAssertionInstruction(iriMap, odf, variableName, classIriHandle); return rcai; } else if (instructionType.equals("query-individual")) { if (flds.length != 4 && flds.length != 5) throw new ParseException( "query individual expressions must have four or five, tab-delimited fields." + instruction, 8); String variableName = flds[0].trim(); // e.g., affiliation-org String rowTypeName = flds[1].trim(); // e.g., organization String externalFileFieldName = flds[2].trim(); // e.g., ID String lookupValueFieldName = flds[3].trim(); // e.g., [OrganizationAffiliationID] /* IriLookup iriMap, HashMap<String,Integer> fieldNameToIndex, OWLDataFactory odf, String variableName, IriRepository iriRepository, String iriRepositoryPrefix, String externalFileFieldName, String externalFileRowTypeName, String iriPrefix, String lookupValueFieldName */ RdfConversionQueryIndividualInstruction rcqii = new RdfConversionQueryIndividualInstruction( iriMap, odf, variableName, iriRepository, iriRepositoryPrefix, externalFileFieldName, rowTypeName, iriPrefix, lookupValueFieldName); if (flds.length == 5) rcqii.setLookupVariableName(flds[4].trim()); return rcqii; } else if (instructionType.equals("query-individual-by-attribute-value")) { if (flds.length != 5 && flds.length != 6) throw new ParseException( "query individual expressions must have four or five, tab-delimited fields." + instruction, 10); String variableName = flds[0].trim(); // e.g., affiliation-org String rowTypeName = flds[1].trim(); // e.g., organization String externalFileFieldName = flds[2].trim(); // e.g., ID String lookupValueFieldName = flds[3].trim(); // e.g., [OrganizationAffiliationID] String lookupUniqueFieldName = flds[4].trim(); String searchInstructions = (flds.length == 6) ? flds[5].trim() : null; /* IriLookup iriMap, HashMap<String,Integer> fieldNameToIndex, OWLDataFactory odf, String variableName, IriRepository iriRepository, String iriRepositoryPrefix, String externalFileFieldName, String externalFileRowTypeName, String iriPrefix, String lookupValueFieldName */ RdfConversionQueryIndividualByAttributeValueInstruction rcqibav = new RdfConversionQueryIndividualByAttributeValueInstruction( iriMap, odf, variableName, iriRepository, iriRepositoryPrefix, externalFileFieldName, rowTypeName, iriPrefix, lookupValueFieldName, lookupUniqueFieldName, searchInstructions); return rcqibav; } else if (instructionType.equals("lookup-mapping-to-individual")) { if (flds.length != 4) throw new ParseException( "mapping lookup instructions must have four, tab-delimited fields.", 11); String variableName = flds[0].trim(); String lookupValueFieldName = flds[1].trim(); String mappingRdfFileName = flds[2].trim(); String sparqlQueryTemplate = flds[3].trim(); //IriLookup iriMap, OWLDataFactory odf, String variableName, String searchFieldName, // HashMap<String, HashMap<String, OWLNamedIndividual>> searchIndexes, String lookupFileLocation, String sparqlQueryTemplate) RdfConversionLookupMappingToIndividualInstruction rclmtii = new RdfConversionLookupMappingToIndividualInstruction( iriMap, odf, variableName, lookupValueFieldName, searchIndexes, mappingRdfFileName, sparqlQueryTemplate); return rclmtii; } else { throw new ParseException("don't understand instruction type of " + instructionType, 6); } } }
package net.ripe.db.whois; import com.google.common.collect.Maps; import net.ripe.db.whois.api.MailUpdatesTestSupport; import net.ripe.db.whois.api.httpserver.Audience; import net.ripe.db.whois.api.httpserver.JettyConfig; import net.ripe.db.whois.api.mail.dequeue.MessageDequeue; import net.ripe.db.whois.common.Slf4JLogConfiguration; import net.ripe.db.whois.common.Stub; import net.ripe.db.whois.common.TestDateTimeProvider; import net.ripe.db.whois.common.dao.RpslObjectDao; import net.ripe.db.whois.common.dao.RpslObjectInfo; import net.ripe.db.whois.common.dao.RpslObjectUpdateDao; import net.ripe.db.whois.common.dao.TagsDao; import net.ripe.db.whois.common.dao.jdbc.DatabaseHelper; import net.ripe.db.whois.common.dao.jdbc.IndexDao; import net.ripe.db.whois.common.dao.jdbc.domain.ObjectTypeIds; import net.ripe.db.whois.common.domain.IpRanges; import net.ripe.db.whois.common.domain.User; import net.ripe.db.whois.common.iptree.IpTreeUpdater; import net.ripe.db.whois.common.rpsl.ObjectType; import net.ripe.db.whois.common.rpsl.RpslObject; import net.ripe.db.whois.common.source.SourceAwareDataSource; import net.ripe.db.whois.common.source.SourceContext; import net.ripe.db.whois.common.support.DummyWhoisClient; import net.ripe.db.whois.query.QueryServer; import net.ripe.db.whois.scheduler.task.unref.UnrefCleanup; import net.ripe.db.whois.update.dao.PendingUpdateDao; import net.ripe.db.whois.update.dns.DnsGateway; import net.ripe.db.whois.update.dns.DnsGatewayStub; import net.ripe.db.whois.update.mail.MailGateway; import net.ripe.db.whois.update.mail.MailSenderStub; import org.joda.time.LocalDateTime; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.util.FileCopyUtils; import javax.mail.MessagingException; import javax.mail.internet.MimeMessage; import javax.sql.DataSource; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; import java.nio.charset.Charset; import java.util.Collection; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import static net.ripe.db.whois.common.domain.CIString.ciString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; // TODO [AK] Integrate in BaseSpec public class WhoisFixture { private static final Pattern CHARSET_PATTERN = Pattern.compile(".*;charset=(.*)"); private ClassPathXmlApplicationContext applicationContext; protected MailSenderStub mailSender; protected MailUpdatesTestSupport mailUpdatesTestSupport; protected RpslObjectDao rpslObjectDao; protected RpslObjectUpdateDao rpslObjectUpdateDao; protected TagsDao tagsDao; protected PendingUpdateDao pendingUpdateDao; protected MailGateway mailGateway; protected MessageDequeue messageDequeue; protected DataSource whoisDataSource; protected DnsGateway dnsGateway; protected IpRanges ipRanges; protected TestDateTimeProvider testDateTimeProvider; protected JettyConfig jettyConfig; protected Map<String, Stub> stubs; protected DatabaseHelper databaseHelper; protected IpTreeUpdater ipTreeUpdater; protected SourceContext sourceContext; protected UnrefCleanup unrefCleanup; protected IndexDao indexDao; private static final String SYNCUPDATES_INSTANCE = "TEST"; private static final String CHARSET = "ISO-8859-1"; static { Slf4JLogConfiguration.init(); System.setProperty("application.version", "0.1-TEST"); System.setProperty("mail.dequeue.threads", "2"); System.setProperty("mail.dequeue.interval", "10"); System.setProperty("whois.maintainers.power", "RIPE-NCC-HM-MNT"); System.setProperty("whois.maintainers.enduser", "RIPE-NCC-END-MNT"); System.setProperty("whois.maintainers.alloc", "RIPE-NCC-HM-MNT,RIPE-NCC-HM2-MNT"); System.setProperty("whois.maintainers.enum", "RIPE-GII-MNT,RIPE-NCC-MNT"); System.setProperty("whois.maintainers.dbm", "RIPE-NCC-LOCKED-MNT,RIPE-DBM-MNT"); System.setProperty("unrefcleanup.enabled", "true"); System.setProperty("unrefcleanup.deletes", "true"); System.setProperty("nrtm.enabled", "false"); } public void start() throws Exception { applicationContext = new ClassPathXmlApplicationContext("applicationContext-whois-test.xml"); mailSender = applicationContext.getBean(MailSenderStub.class); mailUpdatesTestSupport = applicationContext.getBean(MailUpdatesTestSupport.class); rpslObjectDao = applicationContext.getBean(RpslObjectDao.class); rpslObjectUpdateDao = applicationContext.getBean(RpslObjectUpdateDao.class); tagsDao = applicationContext.getBean(TagsDao.class); pendingUpdateDao = applicationContext.getBean(PendingUpdateDao.class); mailGateway = applicationContext.getBean(MailGateway.class); dnsGateway = applicationContext.getBean(DnsGateway.class); messageDequeue = applicationContext.getBean(MessageDequeue.class); whoisDataSource = applicationContext.getBean(SourceAwareDataSource.class); ipRanges = applicationContext.getBean(IpRanges.class); testDateTimeProvider = applicationContext.getBean(TestDateTimeProvider.class); jettyConfig = applicationContext.getBean(JettyConfig.class); stubs = applicationContext.getBeansOfType(Stub.class); databaseHelper = applicationContext.getBean(DatabaseHelper.class); ipTreeUpdater = applicationContext.getBean(IpTreeUpdater.class); sourceContext = applicationContext.getBean(SourceContext.class); unrefCleanup = applicationContext.getBean(UnrefCleanup.class); indexDao = applicationContext.getBean(IndexDao.class); databaseHelper.setup(); applicationContext.getBean(WhoisServer.class).start(); initData(); } private void initData() { databaseHelper.insertUser(User.createWithPlainTextPassword("dbase1", "override1", ObjectType.values())); databaseHelper.insertUser(User.createWithPlainTextPassword("dbase2", "override2", ObjectType.values())); } public void reset() { databaseHelper.setup(); initData(); ipTreeUpdater.rebuild(); ipRanges.setTrusted("127.0.0.1", "0:0:0:0:0:0:0:1"); for (final Stub stub : stubs.values()) { stub.reset(); } } public void stop() { applicationContext.getBean(WhoisServer.class).stop(); } public void dumpSchema() throws Exception { DatabaseHelper.dumpSchema(whoisDataSource); } public String send(final String content) { return mailUpdatesTestSupport.insert(content); } public String send(final String subject, final String body) { return mailUpdatesTestSupport.insert(subject, body); } public MimeMessage getMessage(final String to) throws MessagingException { return mailSender.getMessage(to); } public boolean anyMoreMessages() { return mailSender.anyMoreMessages(); } public void createRpslObjects(final Collection<RpslObject> rpslObjects) { databaseHelper.addObjects(rpslObjects); } public void deleteRpslObject(final RpslObject rpslObject) { final RpslObjectInfo byKey = rpslObjectDao.findByKey(rpslObject.getType(), rpslObject.getKey().toString()); rpslObjectUpdateDao.deleteObject(byKey.getObjectId(), byKey.getKey()); } public String syncupdate(final String data, final boolean isHelp, final boolean isDiff, final boolean isNew, final boolean isRedirect, final boolean doPost, final int responseCode) throws IOException { return syncupdate(jettyConfig, data, isHelp, isDiff, isNew, isRedirect, doPost, responseCode); } public static String syncupdate(final JettyConfig jettyConfig, final String data, final boolean isHelp, final boolean isDiff, final boolean isNew, final boolean isRedirect, final boolean doPost, final int responseCode) throws IOException { if (doPost) { return doPostRequest(getSyncupdatesUrl(jettyConfig, null), getQuery(data, isHelp, isDiff, isNew, isRedirect), responseCode); } else { return doGetRequest(getSyncupdatesUrl(jettyConfig, getQuery(data, isHelp, isDiff, isNew, isRedirect)), responseCode); } } public String aclPost(final String path, final String apiKey, final String data, final int responseCode) throws IOException { Map<String, String> properties = Maps.newLinkedHashMap(); properties.put(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON); properties.put(HttpHeaders.CONTENT_LENGTH, Integer.toString(data.length())); return doPostRequest(getAclUrl(path, apiKey), data, properties, responseCode); } public boolean dnsCheckedFor(final String key) { return ((DnsGatewayStub) dnsGateway).getCheckedUpdates().remove(ciString(key)); } public void setRipeRanges(final String... ripeRanges) { ipRanges.setTrusted(ripeRanges); } public void setTime(LocalDateTime dateTime) { testDateTimeProvider.setTime(dateTime); } public boolean objectExists(final ObjectType objectType, final String pkey) { return 1 == new JdbcTemplate(whoisDataSource).queryForInt("" + "select count(*) " + "from last " + "where object_type = ? " + "and pkey = ? " + "and sequence_id != 0 ", ObjectTypeIds.getId(objectType), pkey); } private static String getSyncupdatesUrl(final JettyConfig jettyConfig, final String query) { final StringBuilder builder = new StringBuilder(); builder.append("http://localhost:"); builder.append(jettyConfig.getPort(Audience.PUBLIC)); builder.append("/whois/syncupdates/"); builder.append(SYNCUPDATES_INSTANCE); if (query != null && query.length() > 0) { builder.append("?"); builder.append(query); } return builder.toString(); } private String getAclUrl(final String path, final String apiKey) { final StringBuilder builder = new StringBuilder(); builder.append("http://localhost:"); builder.append(jettyConfig.getPort(Audience.INTERNAL)); builder.append("/api/acl/"); builder.append(path); builder.append("?apiKey="); builder.append(apiKey); return builder.toString(); } private static String getQuery(final String data, final boolean isHelp, final boolean isDiff, final boolean isNew, final boolean isRedirect) { final StringBuilder builder = new StringBuilder(); int params = 0; if ((data != null) && (data.length() > 0)) { builder.append("DATA="); builder.append(encode(data)); params++; } if (isHelp) { builder.append(params > 0 ? "&" : ""); builder.append("HELP=yes"); params++; } if (isDiff) { builder.append(params > 0 ? "&" : ""); builder.append("DIFF=yes"); params++; } if (isNew) { builder.append(params > 0 ? "&" : ""); builder.append("NEW=yes"); params++; } if (isRedirect) { builder.append(params > 0 ? "&" : ""); builder.append("REDIRECT=yes"); } return builder.toString(); } private static String doGetRequest(final String url, final int responseCode) throws IOException { HttpURLConnection connection = (HttpURLConnection) (new URL(url)).openConnection(); assertThat(connection.getResponseCode(), is(responseCode)); return readResponse(connection); } private static String doPostRequest(final String url, final String data, final int responseCode) throws IOException { Map<String, String> properties = Maps.newLinkedHashMap(); properties.put(HttpHeaders.CONTENT_LENGTH, Integer.toString(data.length())); properties.put(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_FORM_URLENCODED + "; charset=" + CHARSET); return doPostRequest(url, data, properties, responseCode); } private static String doPostRequest(final String url, final String data, final Map<String, String> properties, final int responseCode) throws IOException { HttpURLConnection connection = (HttpURLConnection) (new URL(url)).openConnection(); for (Map.Entry<String, String> entry : properties.entrySet()) { connection.setRequestProperty(entry.getKey(), entry.getValue()); } connection.setDoInput(true); connection.setDoOutput(true); Writer writer = new OutputStreamWriter(connection.getOutputStream()); writer.write(data); writer.close(); assertThat(connection.getResponseCode(), is(responseCode)); return readResponse(connection); } private static String readResponse(final HttpURLConnection connection) throws IOException { final InputStream inputStream; if (connection.getResponseCode() == HttpURLConnection.HTTP_OK) { inputStream = connection.getInputStream(); } else { inputStream = connection.getErrorStream(); } final String contentType = connection.getContentType(); final Matcher matcher = CHARSET_PATTERN.matcher(contentType); final String charsetName = matcher.matches() ? matcher.group(1) : Charset.defaultCharset().name(); final byte[] bytes = FileCopyUtils.copyToByteArray(inputStream); return new String(bytes, charsetName); } private static String encode(final String data) { try { return URLEncoder.encode(data, CHARSET); } catch (UnsupportedEncodingException e) { throw new IllegalStateException(e); } } public DatabaseHelper getDatabaseHelper() { return databaseHelper; } public TagsDao getTagsDao() { return tagsDao; } public PendingUpdateDao getPendingUpdateDao() { return pendingUpdateDao; } public RpslObjectDao getRpslObjectDao() { return rpslObjectDao; } public String query(final String query) { return DummyWhoisClient.query(QueryServer.port, query); } public void reloadTrees() { ipTreeUpdater.update(); } public SourceContext getSourceContext() { return sourceContext; } public void unrefCleanup() { unrefCleanup.run(); } public void setIpRanges(String... ranges) { ipRanges.setTrusted(ranges); } public void rebuildIndexes() { indexDao.rebuild(); } }
/* * Copyright (c) 2007-2011 Madhav Vaidyanathan * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.midisheetmusic; import java.io.Serializable; /** * @class TimeSignature * The TimeSignature class represents * - The time signature of the song, such as 4/4, 3/4, or 6/8 time, and * - The number of pulses per quarter note * - The number of microseconds per quarter note * <p/> * In midi files, all time is measured in "pulses". Each note has * a start time (measured in pulses), and a duration (measured in * pulses). This class is used mainly to convert pulse durations * (like 120, 240, etc) into note durations (half, quarter, eighth, etc). */ public class TimeSignature implements Serializable { private int numerator; /** * Numerator of the time signature */ private int denominator; /** * Denominator of the time signature */ private int quarternote; /** * Number of pulses per quarter note */ private int measure; /** * Number of pulses per measure */ private int tempo; /** Number of microseconds per quarter note */ /** * Create a new time signature, with the given numerator, * denominator, pulses per quarter note, and tempo. */ public TimeSignature(int numerator, int denominator, int quarternote, int tempo) { if (numerator <= 0 || denominator <= 0 || quarternote <= 0) { throw new MidiFileException("Invalid time signature", 0); } /* Midi File gives wrong time signature sometimes */ if (numerator == 5) { numerator = 4; } this.numerator = numerator; this.denominator = denominator; this.quarternote = quarternote; this.tempo = tempo; int beat; if (denominator == 2) beat = quarternote * 2; else beat = quarternote / (denominator / 4); measure = numerator * beat; } /** * Convert a note duration into a stem duration. Dotted durations * are converted into their non-dotted equivalents. */ public static NoteDuration GetStemDuration(NoteDuration dur) { if (dur == NoteDuration.DottedHalf) return NoteDuration.Half; else if (dur == NoteDuration.DottedQuarter) return NoteDuration.Quarter; else if (dur == NoteDuration.DottedEighth) return NoteDuration.Eighth; else return dur; } /** * Get the numerator of the time signature */ public int getNumerator() { return numerator; } /** * Get the denominator of the time signature */ public int getDenominator() { return denominator; } /** * Get the number of pulses per quarter note */ public int getQuarter() { return quarternote; } /** * Get the number of pulses per measure */ public int getMeasure() { return measure; } /** * Get the number of microseconds per quarter note */ public int getTempo() { return tempo; } /** * Return which measure the given time (in pulses) belongs to. */ public int GetMeasure(int time) { return time / measure; } /** * Given a duration in pulses, return the closest note duration. */ public NoteDuration GetNoteDuration(int duration) { int whole = quarternote * 4; /** 1 = 32/32 3/4 = 24/32 1/2 = 16/32 3/8 = 12/32 1/4 = 8/32 3/16 = 6/32 1/8 = 4/32 = 8/64 triplet = 5.33/64 1/16 = 2/32 = 4/64 1/32 = 1/32 = 2/64 **/ if (duration >= 28 * whole / 32) return NoteDuration.Whole; else if (duration >= 20 * whole / 32) return NoteDuration.DottedHalf; else if (duration >= 14 * whole / 32) return NoteDuration.Half; else if (duration >= 10 * whole / 32) return NoteDuration.DottedQuarter; else if (duration >= 7 * whole / 32) return NoteDuration.Quarter; else if (duration >= 5 * whole / 32) return NoteDuration.DottedEighth; else if (duration >= 6 * whole / 64) return NoteDuration.Eighth; else if (duration >= 5 * whole / 64) return NoteDuration.Triplet; else if (duration >= 3 * whole / 64) return NoteDuration.Sixteenth; else return NoteDuration.ThirtySecond; } /** * Return the time period (in pulses) the the given duration spans */ public int DurationToTime(NoteDuration dur) { int eighth = quarternote / 2; int sixteenth = eighth / 2; switch (dur) { case Whole: return quarternote * 4; case DottedHalf: return quarternote * 3; case Half: return quarternote * 2; case DottedQuarter: return 3 * eighth; case Quarter: return quarternote; case DottedEighth: return 3 * sixteenth; case Eighth: return eighth; case Triplet: return quarternote / 3; case Sixteenth: return sixteenth; case ThirtySecond: return sixteenth / 2; default: return 0; } } @Override public String toString() { return String.format("TimeSignature=%1$s/%2$s quarter=%3$s tempo=%4$s", numerator, denominator, quarternote, tempo); } }
package org.endeavour.enterprise.endpoints; import org.apache.commons.lang3.StringUtils; import org.endeavourhealth.common.security.SecurityUtils; import org.endeavourhealth.core.terminology.Snomed; import org.endeavourhealth.core.terminology.SnomedCode; import org.endeavourhealth.enterprise.core.DefinitionItemType; import org.endeavourhealth.enterprise.core.DependencyType; import org.endeavourhealth.enterprise.core.database.models.*; import org.endeavourhealth.enterprise.core.database.models.data.*; import org.endeavourhealth.enterprise.core.json.*; import org.endeavourhealth.enterprise.core.querydocument.QueryDocumentSerializer; import org.endeavourhealth.enterprise.core.querydocument.models.*; import org.endeavourhealth.core.terminology.TerminologyService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.*; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; import java.util.*; @Path("/library") public final class LibraryEndpoint extends AbstractItemEndpoint { private static final Logger LOG = LoggerFactory.getLogger(LibraryEndpoint.class); @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getFolderContents") public Response getFolderContents(@Context SecurityContext sc, @QueryParam("folderUuid") String uuidStr) throws Exception { super.setLogbackMarkers(sc); String folderUuid = uuidStr; String orgUuid = "B6FF900D-8FCD-43D8-AF37-5DB3A87A6EF6"; LOG.trace("GettingFolderContents for folder {}", folderUuid); JsonFolderContentsList ret = new JsonFolderContentsList(); List<ActiveItemEntity> childActiveItems = ActiveItemEntity.retrieveDependentItems(orgUuid, folderUuid, (short)DependencyType.IsContainedWithin.getValue()); HashMap<String, AuditEntity> hmAuditsByAuditUuid = new HashMap<>(); List<AuditEntity> audits = AuditEntity.retrieveForActiveItems(childActiveItems); for (AuditEntity audit: audits) { hmAuditsByAuditUuid.put(audit.getAuditUuid(), audit); } HashMap<String, ItemEntity> hmItemsByItemUuid = new HashMap<>(); List<ItemEntity> items = ItemEntity.retrieveForActiveItems(childActiveItems); for (ItemEntity item: items) { hmItemsByItemUuid.put(item.getItemUuid(), item); } HashMap<String, CohortResultEntity> hmReportsByItemUuid = new HashMap<>(); List<CohortResultEntity> reports = ItemEntity.retrieveForReports(childActiveItems); for (CohortResultEntity report: reports) { hmReportsByItemUuid.put(report.getQueryItemUuid(), report); } for (int i = 0; i < childActiveItems.size(); i++) { ActiveItemEntity activeItem = childActiveItems.get(i); ItemEntity item = hmItemsByItemUuid.get(activeItem.getItemUuid()); Short itemType = activeItem.getItemTypeId(); AuditEntity audit = hmAuditsByAuditUuid.get(item.getAuditUuid()); CohortResultEntity cohort = hmReportsByItemUuid.get(activeItem.getItemUuid()); JsonFolderContent c = new JsonFolderContent(activeItem, item, audit, cohort); ret.addContent(c); if (itemType == DefinitionItemType.Query.getValue()) { } else if (itemType == DefinitionItemType.Test.getValue()) { } else if (itemType == DefinitionItemType.DataSource.getValue()) { } else if (itemType == DefinitionItemType.CodeSet.getValue()) { } else if (itemType == DefinitionItemType.Report.getValue()) { LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); Report report = libraryItem.getReport(); if (report != null && report.getLastRunDate() != null) { if (new Date(report.getLastRunDate()).after(new Date())) c.setIsRunning(true); else c.setIsRunning(false); c.setLastRun(new Date(report.getLastRunDate())); } } else { //throw new RuntimeException("Unexpected content " + item + " in folder"); } } if (ret.getContents() != null) { Collections.sort(ret.getContents()); } clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getLibraryItem") public Response getLibraryItem(@Context SecurityContext sc, @QueryParam("uuid") String uuidStr) throws Exception { super.setLogbackMarkers(sc); String libraryItemUuid = uuidStr; LOG.trace("GettingLibraryItem for UUID {}", libraryItemUuid); ItemEntity item = ItemEntity.retrieveLatestForUUid(libraryItemUuid); String xml = item.getXmlContent(); LibraryItem ret = QueryDocumentSerializer.readLibraryItemFromXml(xml); clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/saveLibraryItem") public Response saveLibraryItem(@Context SecurityContext sc, LibraryItem libraryItem) throws Exception { super.setLogbackMarkers(sc); String userUuid = SecurityUtils.getCurrentUserId(sc).toString(); String orgUuid = "B6FF900D-8FCD-43D8-AF37-5DB3A87A6EF6"; String libraryItemUuid = parseUuidFromStr(libraryItem.getUuid()); String name = libraryItem.getName(); String description = libraryItem.getDescription(); String folderUuid = parseUuidFromStr(libraryItem.getFolderUuid()); Query query = libraryItem.getQuery(); CodeSet codeSet = libraryItem.getCodeSet(); Report report = libraryItem.getReport(); LOG.trace(String.format("SavingLibraryItem UUID %s, Name %s FolderUuid %s", libraryItemUuid, name, folderUuid)); QueryDocument doc = new QueryDocument(); doc.getLibraryItem().add(libraryItem); //work out the item type (query, test etc.) from the content passed up Short type = null; if (query != null) { type = (short)DefinitionItemType.Query.getValue(); } else if (codeSet != null) { type = (short) DefinitionItemType.CodeSet.getValue(); } else if (report != null) { type = (short) DefinitionItemType.Report.getValue(); } else { //if we've been passed no proper content, we might just be wanting to rename an existing item, //so work out the type from what's on the DB already if (libraryItemUuid == null) { throw new BadRequestException("Can't save LibraryItem without some content (e.g. query, test etc.)"); } ActiveItemEntity activeItem = ActiveItemEntity.retrieveForItemUuid(libraryItemUuid); type = activeItem.getItemTypeId(); doc = null; //clear this, because we don't want to overwrite what's on the DB with an empty query doc } boolean inserting = libraryItemUuid == null; if (inserting) { libraryItemUuid = UUID.randomUUID().toString(); libraryItem.setUuid(libraryItemUuid.toString()); } super.saveItem(inserting, libraryItemUuid, orgUuid, userUuid, type.intValue(), name, description, doc, folderUuid); //return the UUID of the libraryItem LibraryItem ret = new LibraryItem(); ret.setUuid(libraryItemUuid.toString()); clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/deleteLibraryItem") public Response deleteLibraryItem(@Context SecurityContext sc, LibraryItem libraryItem) throws Exception { super.setLogbackMarkers(sc); String libraryItemUuid = parseUuidFromStr(libraryItem.getUuid()); String userUuid = SecurityUtils.getCurrentUserId(sc).toString();; String orgUuid = "B6FF900D-8FCD-43D8-AF37-5DB3A87A6EF6"; LOG.trace("DeletingLibraryItem UUID {}", libraryItemUuid); JsonDeleteResponse ret = deleteItem(libraryItemUuid, orgUuid, userUuid); clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getLibraryItemNames") public Response getLibraryItemNames(@Context SecurityContext sc, @QueryParam("itemUuids") List<String> itemUuids) { super.setLogbackMarkers(sc); LOG.trace("getLibraryItemNames", itemUuids); Map<String, String> names = new HashMap<>(); for (String itemUuid : itemUuids) { try { ItemEntity item = ItemEntity.retrieveLatestForUUid(itemUuid); names.put(itemUuid, item.getTitle()); } catch (Exception e) { names.put(itemUuid, "Error!"); LOG.error("Error loading name for library item " + itemUuid); } } return Response .ok() .entity(names) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getContentNamesForReportLibraryItem") public Response getContentNamesForReportLibraryItem(@Context SecurityContext sc, @QueryParam("uuid") String uuidStr) throws Exception { super.setLogbackMarkers(sc); String itemUuid = uuidStr; LOG.trace("getContentNamesforReportLibraryItem for UUID {}", itemUuid); JsonFolderContentsList ret = new JsonFolderContentsList(); ActiveItemEntity activeItem = ActiveItemEntity.retrieveForItemUuid(itemUuid); List<ItemDependencyEntity> dependentItems = ItemDependencyEntity.retrieveForActiveItemType(activeItem, (short)DependencyType.Uses.getValue()); for (ItemDependencyEntity dependentItem: dependentItems) { String dependentItemUuid = dependentItem.getDependentItemUuid(); ItemEntity item = ItemEntity.retrieveLatestForUUid(dependentItemUuid); JsonFolderContent content = new JsonFolderContent(item, null, null); ret.addContent(content); } clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/moveLibraryItems") public Response moveLibraryItems(@Context SecurityContext sc, JsonMoveItems parameters) throws Exception { super.setLogbackMarkers(sc); String userUuid = SecurityUtils.getCurrentUserId(sc).toString();; String orgUuid = "B6FF900D-8FCD-43D8-AF37-5DB3A87A6EF6"; LOG.trace("moveLibraryItems"); super.moveItems(userUuid, orgUuid, parameters); clearLogbackMarkers(); return Response .ok() .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getConceptChildren") public Response getConceptChildren(@Context SecurityContext sc, @QueryParam("id") String id) throws Exception { super.setLogbackMarkers(sc); List<Object[]> concepts = ConceptEntity.findConceptChildren(id); List<JsonCode> ret = new ArrayList<>(); String prevDefinition = ""; for (Object[] conceptEntity: concepts) { String conceptId = conceptEntity[0].toString(); String definition = conceptEntity[1].toString(); String parentType = conceptEntity[2]==null?"":conceptEntity[2].toString(); String parentTypeId = conceptEntity[3]==null?"":conceptEntity[3].toString(); String baseType = conceptEntity[4]==null?"":conceptEntity[4].toString(); String baseTypeId = conceptEntity[5]==null?"":conceptEntity[5].toString(); String dataTypeId = conceptEntity[6].toString(); String conceptTypeId = conceptEntity[7].toString(); String present = conceptEntity[8].toString(); String units = conceptEntity[9]==null?"":conceptEntity[9].toString(); if (definition.equals(prevDefinition)) continue; prevDefinition = definition; if (conceptTypeId.equals("1")|| // don't show resource or unit types conceptTypeId.equals("3")) { continue; } JsonCode code = new JsonCode(); code.setId(conceptId); code.setLabel(definition); code.setDataType(dataTypeId); code.setParentType(parentType); code.setBaseType(baseType); code.setPresent(present); code.setUnits(units); ret.add(code); } clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getConceptParents") public Response getConceptParents(@Context SecurityContext sc, @QueryParam("id") String id) throws Exception { super.setLogbackMarkers(sc); List<Object[]> concepts = ConceptEntity.findConceptParents(id); List<JsonCode> ret = new ArrayList<>(); String prevDefinition = ""; for (Object[] conceptEntity: concepts) { String conceptId = conceptEntity[0].toString(); String definition = conceptEntity[1].toString(); String parentType = conceptEntity[2]==null?"":conceptEntity[2].toString(); String parentTypeId = conceptEntity[3]==null?"":conceptEntity[3].toString(); String baseType = conceptEntity[4]==null?"":conceptEntity[4].toString(); String baseTypeId = conceptEntity[5]==null?"":conceptEntity[5].toString(); String dataTypeId = conceptEntity[6].toString(); String conceptTypeId = conceptEntity[7].toString(); String parentConceptTypeId = conceptEntity[8]==null?"":conceptEntity[8].toString(); String present = conceptEntity[9]==null?"":conceptEntity[9].toString(); String units = conceptEntity[10]==null?"":conceptEntity[10].toString(); if (parentType.equals(prevDefinition)) continue; prevDefinition = parentType; if (conceptTypeId.equals("1")|| // don't show resource or unit types conceptTypeId.equals("3")|| parentConceptTypeId.equals("1")|| // don't show resource or unit types parentConceptTypeId.equals("3")) { continue; } JsonCode code = new JsonCode(); code.setId(parentTypeId); code.setLabel(parentType); code.setDataType(dataTypeId); code.setParentType(parentType); code.setBaseType(baseType); code.setPresent(present); code.setUnits(units); ret.add(code); } clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getConcepts") public Response getConcepts(@Context SecurityContext sc, @QueryParam("term") String term) throws Exception { super.setLogbackMarkers(sc); List<Object[]> concepts = ConceptEntity.findConcept(term); List<JsonCode> results = new ArrayList<>(); String prevDefinition = ""; for (Object[] conceptEntity: concepts) { String conceptId = conceptEntity[0].toString(); String definition = conceptEntity[1].toString(); String parentType = conceptEntity[2]==null?"":conceptEntity[2].toString(); String parentTypeId = conceptEntity[3]==null?"":conceptEntity[3].toString(); String baseType = conceptEntity[4]==null?"":conceptEntity[4].toString(); String baseTypeId = conceptEntity[5]==null?"":conceptEntity[5].toString(); String dataTypeId = conceptEntity[6].toString(); String conceptTypeId = conceptEntity[7].toString(); String present = conceptEntity[8].toString(); String units = conceptEntity[9]==null?"":conceptEntity[9].toString(); if (definition.equals(prevDefinition)) continue; prevDefinition = definition; if (conceptTypeId.equals("1")|| // don't show resource or unit types conceptTypeId.equals("3")) { continue; } JsonCode code = new JsonCode(); code.setId(conceptId); code.setLabel(definition); code.setDataType(dataTypeId); code.setParentType(parentType); code.setBaseType(baseType); code.setPresent(present); code.setUnits(units); results.add(code); } clearLogbackMarkers(); return Response .ok() .entity(results) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getTerms") public Response getTerms(@Context SecurityContext sc, @QueryParam("term") String term, @QueryParam("snomed") String snomed) throws Exception { super.setLogbackMarkers(sc); List<Object[]> terms = null; if (snomed.equals("true")) terms = TermsEntity.findSnomedTerms(term); else terms = TermsEntity.findTerms(term); List<JsonTerm> results = new ArrayList<>(); for (Object[] termEntity: terms) { String originalTerm = termEntity[0].toString(); String snomedTerm = termEntity[1]==null?"":termEntity[1].toString(); String snomedConceptId = termEntity[2]==null?"":termEntity[2].toString(); String originalCode = termEntity[3]==null?"":termEntity[3].toString(); String recordType = termEntity[4].toString(); JsonTerm jTerm = new JsonTerm(); jTerm.setOriginalTerm(originalTerm); jTerm.setSnomedTerm(snomedTerm); jTerm.setSnomedConceptId(snomedConceptId); jTerm.setOriginalCode(originalCode); jTerm.setRecordType(recordType); results.add(jTerm); } clearLogbackMarkers(); return Response .ok() .entity(results) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getTermChildren") public Response getTermChildren(@Context SecurityContext sc, @QueryParam("code") String code) throws Exception { super.setLogbackMarkers(sc); List<Object[]> terms = TermsEntity.getTermChildren(code); List<JsonTerm> results = new ArrayList<>(); for (Object[] termEntity: terms) { String originalTerm = termEntity[0].toString(); String snomedTerm = termEntity[1]==null?"":termEntity[1].toString(); String snomedConceptId = termEntity[2]==null?"":termEntity[2].toString(); String originalCode = termEntity[3]==null?"":termEntity[3].toString(); String recordType = termEntity[4].toString(); JsonTerm jTerm = new JsonTerm(); jTerm.setOriginalTerm(originalTerm); jTerm.setSnomedTerm(snomedTerm); jTerm.setSnomedConceptId(snomedConceptId); jTerm.setOriginalCode(originalCode); jTerm.setRecordType(recordType); results.add(jTerm); } clearLogbackMarkers(); return Response .ok() .entity(results) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getTermParents") public Response getTermParents(@Context SecurityContext sc, @QueryParam("code") String code) throws Exception { super.setLogbackMarkers(sc); List<Object[]> terms = TermsEntity.getTermParents(code); List<JsonTerm> results = new ArrayList<>(); for (Object[] termEntity: terms) { String originalTerm = termEntity[0].toString(); String snomedTerm = termEntity[1]==null?"":termEntity[1].toString(); String snomedConceptId = termEntity[2]==null?"":termEntity[2].toString(); String originalCode = termEntity[3]==null?"":termEntity[3].toString(); String recordType = termEntity[4].toString(); JsonTerm jTerm = new JsonTerm(); jTerm.setOriginalTerm(originalTerm); jTerm.setSnomedTerm(snomedTerm); jTerm.setSnomedConceptId(snomedConceptId); jTerm.setOriginalCode(originalCode); jTerm.setRecordType(recordType); results.add(jTerm); } clearLogbackMarkers(); return Response .ok() .entity(results) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getCodeSets") public Response getCodeSets(@Context SecurityContext sc) throws Exception { super.setLogbackMarkers(sc); String userUuid = SecurityUtils.getCurrentUserId(sc).toString(); String orgUuid = "B6FF900D-8FCD-43D8-AF37-5DB3A87A6EF6"; List<JsonFolderContent> ret = new ArrayList<>(); ActiveItemEntity aI = new ActiveItemEntity(); List<ActiveItemEntity> activeItems = aI.retrieveActiveItemCodeSets(userUuid, orgUuid); for (ActiveItemEntity activeItem: activeItems) { ItemEntity item = ItemEntity.retrieveForActiveItem(activeItem); AuditEntity audit = AuditEntity.retrieveForUuid(item.getAuditUuid()); JsonFolderContent content = new JsonFolderContent(activeItem, item, audit, null); ret.add(content); } clearLogbackMarkers(); return Response .ok() .entity(ret) .build(); } @GET @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/getConceptsFromRead") public Response getConceptsFromRead(@Context SecurityContext sc, @QueryParam("inclusions") String inclusions, @QueryParam("exclusions") String exclusions) throws Exception { super.setLogbackMarkers(sc); return processReadLists(inclusions, exclusions); } private static Response processReadLists(String inclusions, String exclusions) throws Exception { List<String> inclusionCodes = convertStringToList(inclusions); List<String> exclusionCodes = new ArrayList<>(); if (exclusions != null && !exclusions.isEmpty()) { exclusionCodes = convertStringToList(exclusions); } List<String> allCodes = findChildCodes(inclusionCodes); List<String> excludedCodes = new ArrayList<>(); if (exclusionCodes.size() > 0) { excludedCodes = findChildCodes(exclusionCodes); } removeExcludedCodes(allCodes, excludedCodes); List<SnomedCode> snomedCodes = getSnomedFromReadList(allCodes); List<JsonCode> codes = generateResultSet(snomedCodes); clearLogbackMarkers(); return Response .ok() .entity(codes) .build(); } private static List<String> convertStringToList(String codeString) throws Exception { codeString = codeString.replaceAll(" ", ""); String[] codes = codeString.split(","); return Arrays.asList(codes); } private static void removeExcludedCodes(List<String> includedCodes, List<String> excludedCodes) throws Exception { includedCodes.removeAll(excludedCodes); } private static List<String> findChildCodes(List<String> codes) throws Exception { List<Long> parents = new ArrayList<>(); List<String> childCodes = new ArrayList<>(); for(String code : codes) { if (code.endsWith("%")) { String formattedCode = code.replace("%",""); formattedCode = padCode(formattedCode); childCodes.add(formattedCode); parents.add(EmisCsvCodeMapEntity.findCodeIdFromReadCode(formattedCode)); while (parents.size() > 0) { parents = getChildren(parents, childCodes); System.out.println(childCodes); } } else { childCodes.add(code); } } return childCodes; } private static String padCode(String code) throws Exception { return StringUtils.rightPad(code, 5, "."); } private static List<Long> getChildren(List<Long> parents, List<String> children) throws Exception { List<EmisCsvCodeMapEntity> codeMaps = EmisCsvCodeMapEntity.findChildCodes(parents); parents.clear(); for (EmisCsvCodeMapEntity code : codeMaps) { children.add(code.getReadCode()); parents.add(code.getCodeId()); } return parents; } private static List<SnomedCode> getSnomedFromReadList(List<String> readCodes) throws Exception { removeSynonyms(readCodes); List<SnomedCode> snomedCodes = new ArrayList<>(); for (String code : readCodes) { System.out.println("getting the snomed for the following code : " + code); try { snomedCodes.add(TerminologyService.translateRead2ToSnomed(code)); } catch (Exception e) { System.out.println("unable to find snomed for code : " + code); } } return snomedCodes; } private static void removeSynonyms(List<String> readCodes) throws Exception { for (Iterator<String> iterator = readCodes.iterator(); iterator.hasNext();) { String code = iterator.next(); if (code.contains("-")) { iterator.remove(); } } } private static List<JsonCode> generateResultSet(List<SnomedCode> snomedCodes) throws Exception { List<JsonCode> codes = new ArrayList<>(); for (SnomedCode snomed : snomedCodes) { JsonCode code = new JsonCode(); code.setId(snomed.getConceptCode()); code.setLabel(snomed.getTerm()); code.setDataType("11"); code.setParentType(""); code.setBaseType("Observation"); code.setPresent("1"); code.setUnits(""); codes.add(code); } return codes; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.entity; import org.apache.commons.lang3.StringUtils; import org.apache.falcon.FalconException; import org.apache.falcon.entity.store.ConfigurationStore; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.cluster.Cluster; import org.apache.falcon.entity.v0.cluster.ClusterLocationType; import org.apache.falcon.entity.v0.cluster.Interface; import org.apache.falcon.entity.v0.cluster.Interfacetype; import org.apache.falcon.entity.v0.cluster.Location; import org.apache.falcon.entity.v0.cluster.Property; import org.apache.falcon.hadoop.HadoopClientFactory; import org.apache.falcon.security.SecurityUtil; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Helper to get end points relating to the cluster. */ public final class ClusterHelper { public static final String DEFAULT_BROKER_IMPL_CLASS = "org.apache.activemq.ActiveMQConnectionFactory"; public static final String WORKINGDIR = "working"; public static final String NO_USER_BROKER_URL = "NA"; public static final String EMPTY_DIR_NAME = "EMPTY_DIR_DONT_DELETE"; private static final Logger LOG = LoggerFactory.getLogger(ClusterHelper.class); private ClusterHelper() { } public static Cluster getCluster(String cluster) throws FalconException { return ConfigurationStore.get().get(EntityType.CLUSTER, cluster); } public static Configuration getConfiguration(Cluster cluster) { Configuration conf = new Configuration(); final String storageUrl = getStorageUrl(cluster); conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, storageUrl); final String executeEndPoint = getMREndPoint(cluster); conf.set(HadoopClientFactory.MR_JT_ADDRESS_KEY, executeEndPoint); conf.set(HadoopClientFactory.YARN_RM_ADDRESS_KEY, executeEndPoint); if (cluster.getProperties() != null) { for (Property prop : cluster.getProperties().getProperties()) { conf.set(prop.getName(), prop.getValue()); } } return conf; } public static Configuration getConfiguration(String storageUrl, String executeEndPoint, String kerberosPrincipal) { Configuration conf = new Configuration(); conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, storageUrl); conf.set(HadoopClientFactory.MR_JT_ADDRESS_KEY, executeEndPoint); conf.set(HadoopClientFactory.YARN_RM_ADDRESS_KEY, executeEndPoint); if (StringUtils.isNotBlank(kerberosPrincipal)) { conf.set(SecurityUtil.NN_PRINCIPAL, kerberosPrincipal); } return conf; } public static String getOozieUrl(Cluster cluster) { return getInterface(cluster, Interfacetype.WORKFLOW).getEndpoint(); } public static String getStorageUrl(Cluster cluster) { return getNormalizedUrl(cluster, Interfacetype.WRITE); } public static String getReadOnlyStorageUrl(Cluster cluster) { return getNormalizedUrl(cluster, Interfacetype.READONLY); } public static String getMREndPoint(Cluster cluster) { return getInterface(cluster, Interfacetype.EXECUTE).getEndpoint(); } public static String getRegistryEndPoint(Cluster cluster) { final Interface catalogInterface = getInterface(cluster, Interfacetype.REGISTRY); return catalogInterface == null ? null : catalogInterface.getEndpoint(); } public static String getMessageBrokerUrl(Cluster cluster) { final Interface messageInterface = getInterface(cluster, Interfacetype.MESSAGING); return messageInterface == null ? NO_USER_BROKER_URL : messageInterface.getEndpoint(); } public static String getSparkMasterEndPoint(Cluster cluster) { final Interface sparkInterface = getInterface(cluster, Interfacetype.SPARK); return sparkInterface == null ? null : sparkInterface.getEndpoint(); } public static String getMessageBrokerImplClass(Cluster cluster) { if (cluster.getProperties() != null) { for (Property prop : cluster.getProperties().getProperties()) { if (prop.getName().equals("brokerImplClass")) { return prop.getValue(); } } } return DEFAULT_BROKER_IMPL_CLASS; } public static Interface getInterface(Cluster cluster, Interfacetype type) { if (cluster.getInterfaces() == null) { return null; } for (Interface interf : cluster.getInterfaces().getInterfaces()) { if (interf.getType() == type) { return interf; } } return null; } private static String getNormalizedUrl(Cluster cluster, Interfacetype type) { String normalizedUrl = getInterface(cluster, type).getEndpoint(); if (normalizedUrl.endsWith("///")){ return normalizedUrl; } String normalizedPath = new Path(normalizedUrl + "/").toString(); return normalizedPath.substring(0, normalizedPath.length() - 1); } public static Location getLocation(Cluster cluster, ClusterLocationType clusterLocationType) { if (cluster.getLocations() == null) { return null; } for (Location loc : cluster.getLocations().getLocations()) { if (loc.getName().equals(clusterLocationType)) { return loc; } } //Mocking the working location FALCON-910 if (clusterLocationType.equals(ClusterLocationType.WORKING)) { Location staging = getLocation(cluster, ClusterLocationType.STAGING); if (staging != null) { Location working = new Location(); working.setName(ClusterLocationType.WORKING); working.setPath(staging.getPath().charAt(staging.getPath().length() - 1) == '/' ? staging.getPath().concat(WORKINGDIR) : staging.getPath().concat("/").concat(WORKINGDIR)); return working; } } return null; } /** * Parsed the cluster object and checks for the working location. * * @param cluster * @return */ public static boolean checkWorkingLocationExists(Cluster cluster) { for (Location loc : cluster.getLocations().getLocations()) { if (loc.getName().equals(ClusterLocationType.WORKING)) { return true; } } return false; } public static String getPropertyValue(Cluster cluster, String propName) { if (cluster.getProperties() != null) { for (Property prop : cluster.getProperties().getProperties()) { if (prop.getName().equals(propName)) { return prop.getValue(); } } } return null; } public static Map<String, String> getHiveProperties(Cluster cluster) { if (cluster.getProperties() != null) { List<Property> properties = cluster.getProperties().getProperties(); if (properties != null && !properties.isEmpty()) { Map<String, String> hiveProperties = new HashMap<String, String>(); for (Property prop : properties) { if (prop.getName().startsWith("hive.")) { hiveProperties.put(prop.getName(), prop.getValue()); } } return hiveProperties; } } return null; } public static String getEmptyDir(Cluster cluster) { return getStorageUrl(cluster) + getLocation(cluster, ClusterLocationType.STAGING).getPath() + "/" + EMPTY_DIR_NAME; } public static boolean matchInterface(final Cluster oldEntity, final Cluster newEntity, final Interfacetype interfaceType) { Interface oldInterface = getInterface(oldEntity, interfaceType); Interface newInterface = getInterface(newEntity, interfaceType); String oldEndpoint = (oldInterface == null) ? null : oldInterface.getEndpoint(); String newEndpoint = (newInterface == null) ? null : newInterface.getEndpoint(); LOG.debug("Verifying if Interfaces match for cluster {} : Old - {}, New - {}", interfaceType.name(), oldEndpoint, newEndpoint); return StringUtils.isBlank(oldEndpoint) && StringUtils.isBlank(newEndpoint) || StringUtils.isNotBlank(oldEndpoint) && oldEndpoint.equalsIgnoreCase(newEndpoint); } public static boolean matchLocations(final Cluster oldEntity, final Cluster newEntity, final ClusterLocationType locationType) { Location oldLocation = getLocation(oldEntity, locationType); Location newLocation = getLocation(newEntity, locationType); String oldLocationPath = (oldLocation == null) ? null : oldLocation.getPath(); String newLocationPath = (newLocation == null) ? null : newLocation.getPath(); LOG.debug("Verifying if Locations match {} : Old - {}, New - {}", locationType.name(), oldLocationPath, newLocationPath); return StringUtils.isBlank(oldLocationPath) && StringUtils.isBlank(newLocationPath) || StringUtils.isNotBlank(oldLocationPath) && oldLocationPath.equalsIgnoreCase(newLocationPath); } public static boolean matchProperties(final Cluster oldEntity, final Cluster newEntity) { Map<String, String> oldProps = getClusterProperties(oldEntity); Map<String, String> newProps = getClusterProperties(newEntity); return oldProps.equals(newProps); } private static Map<String, String> getClusterProperties(final Cluster cluster) { Map<String, String> returnProps = new HashMap<String, String>(); if (cluster.getProperties() != null) { for (Property prop : cluster.getProperties().getProperties()) { returnProps.put(prop.getName(), prop.getValue()); } } return returnProps; } }
// ex: se sts=4 sw=4 expandtab: /* * Yeti language compiler java bytecode generator. * * Copyright (c) 2007-2013 Madis Janson * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package yjs.lang.compiler; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import yeti.lang.Tag; import yeti.renamed.asm3.AnnotationVisitor; import yeti.renamed.asm3.Attribute; import yeti.renamed.asm3.ByteVector; import yeti.renamed.asm3.ClassReader; import yeti.renamed.asm3.ClassVisitor; import yeti.renamed.asm3.ClassWriter; import yeti.renamed.asm3.FieldVisitor; import yeti.renamed.asm3.Label; import yeti.renamed.asm3.MethodVisitor; import yeti.renamed.asm3.Opcodes; /* * Encoding: * * 00 - format identifier * Follows type description * 00 XX XX - free type variable XXXX * XX, where XX is 01..08 - * primitives (same as YType.type UNIT - MAP_MARKER) * 09 x.. y.. - Function x -> y * 0A e.. i.. t.. - MAP<e,i,t> * 0B <requiredMembers...> FF <allowedMembers...> FF - Struct * 0C <requiredMembers...> FF <allowedMembers...> FF - Variant * 0C F9 ... - Variant with FL_ANY_CASE flag * (0B | 0C) F9? F8 ... - Variant or struct with FL_FLEX_TYPEDEF flag * 0D XX XX <param...> FF - java type * 0E e.. FF - java array e[] * FA XX XX <parameters...> FF - opaque type instance (X is "module:name") * FB XX XX - non-free type variable XXXX * FC ... - mutable field type * FD ... - the following type variable is ORDERED * FE XX XX - reference to non-primitive type * Follows list of type definitions * <typeDef name * typeDef array of type descriptions (FF) * ...> * FF * Follows utf8 encoded direct field mapping. * XX XX XX XX - length * 'F' fieldName 00 function-class 00 * 'P' fieldName 00 - property (field mapping as null) */ class TypeAttr extends Attribute { static final byte END = -1; static final byte REF = -2; static final byte ORDERED = -3; static final byte MUTABLE = -4; static final byte TAINTED = -5; static final byte OPAQUE = -6; static final byte ANYCASE = -7; static final byte SMART = -8; final ModuleType moduleType; private ByteVector encoded; final Compiler compiler; TypeAttr(ModuleType mt, Compiler ctx) { super("YetiModuleType"); moduleType = mt; compiler = ctx; } private static final class EncodeType { ClassWriter cw; ByteVector buf = new ByteVector(); Map refs = new IdentityHashMap(); Map vars = new IdentityHashMap(); Map opaque = new HashMap(); void writeMap(Map m) { if (m != null) for (Iterator i = m.entrySet().iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); YType t = (YType) e.getValue(); if (t.field == YetiType.FIELD_MUTABLE) buf.putByte(MUTABLE); write(t); int name = cw.newUTF8((String) e.getKey()); buf.putShort(name); } buf.putByte(END); } void writeArray(YType[] param) { for (int i = 0; i < param.length; ++i) write(param[i]); buf.putByte(END); } void write(YType type) { type = type.deref(); if (type.type == YetiType.VAR) { Integer id = (Integer) vars.get(type); if (id == null) { vars.put(type, id = new Integer(vars.size())); if (id.intValue() > 0x7fff) throw new RuntimeException("Too many type parameters"); if ((type.flags & YetiType.FL_ORDERED_REQUIRED) != 0) buf.putByte(ORDERED); } buf.putByte((type.flags & YetiType.FL_TAINTED_VAR) == 0 ? YetiType.VAR : TAINTED); buf.putShort(id.intValue()); return; } if (type.type < YetiType.PRIMITIVES.length && YetiType.PRIMITIVES[type.type] != null) { // primitives buf.putByte(type.type); return; } Integer id = (Integer) refs.get(type); if (id != null) { if (id.intValue() > 0x7fff) throw new RuntimeException("Too many type parts"); buf.putByte(REF); buf.putShort(id.intValue()); return; } refs.put(type, new Integer(refs.size())); if (type.type >= YetiType.OPAQUE_TYPES) { Object idstr = opaque.get(new Integer(type.type)); if (idstr == null) idstr = type.requiredMembers.keySet().toArray()[0]; buf.putByte(OPAQUE); buf.putShort(cw.newUTF8(idstr.toString())); writeArray(type.param); return; } buf.putByte(type.type); if (type.type == YetiType.FUN) { write(type.param[0]); write(type.param[1]); } else if (type.type == YetiType.MAP) { writeArray(type.param); } else if (type.type == YetiType.STRUCT || type.type == YetiType.VARIANT) { if ((type.allowedMembers == null || type.allowedMembers.isEmpty()) && (type.requiredMembers == null || type.requiredMembers.isEmpty())) throw new CompileException(0, 0, type.type == YetiType.STRUCT ? "Internal error: empty struct" : "Internal error: empty variant"); if ((type.flags & YetiType.FL_ANY_CASE) != 0) buf.putByte(ANYCASE); if ((type.flags & YetiType.FL_FLEX_TYPEDEF) != 0) buf.putByte(SMART); writeMap(type.allowedMembers); writeMap(type.requiredMembers); } else if (type.type == YetiType.JAVA) { buf.putShort(cw.newUTF8(type.javaType.description)); writeArray(type.param); } else if (type.type == YetiType.JAVA_ARRAY) { write(type.param[0]); } else { throw new RuntimeException("Unknown type: " + type.type); } } void writeTypeDefs(Map typeDefs) { for (Iterator i = typeDefs.entrySet().iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); buf.putShort(cw.newUTF8((String) e.getKey())); writeArray((YType[]) e.getValue()); } buf.putByte(END); } } private static final class DecodeType { private static final int VAR_DEPTH = 1; final ClassReader cr; final byte[] in; final char[] buf; int p; final int end; final Map vars = new HashMap(); final List refs = new ArrayList(); final Map opaqueTypes; DecodeType(ClassReader cr, int off, int len, char[] buf, Map opaqueTypes) { this.cr = cr; in = cr.b; p = off; end = p + len; this.buf = buf; this.opaqueTypes = opaqueTypes; } Map readMap() { if (in[p] == END) { ++p; return null; } Map res = new IdentityHashMap(); while (in[p] != END) { YType t = read(); res.put(cr.readUTF8(p, buf).intern(), t); p += 2; } ++p; return res; } YType[] readArray() { List param = new ArrayList(); while (in[p] != END) param.add(read()); ++p; return (YType[]) param.toArray(new YType[param.size()]); } YType read() { YType t; int tv; if (p >= end) throw new RuntimeException("Invalid type description"); switch (tv = in[p++]) { case YetiType.VAR: case TAINTED: { Integer var = new Integer(cr.readUnsignedShort(p)); p += 2; if ((t = (YType) vars.get(var)) == null) vars.put(var, t = new YType(VAR_DEPTH)); if (tv == TAINTED) t.flags |= YetiType.FL_TAINTED_VAR; return t; } case ORDERED: t = read(); t.flags |= YetiType.FL_ORDERED_REQUIRED; return t; case REF: { int v = cr.readUnsignedShort(p); p += 2; if (refs.size() <= v) throw new RuntimeException("Illegal type reference"); return (YType) refs.get(v); } case MUTABLE: return YetiType.fieldRef(1, read(), YetiType.FIELD_MUTABLE); } if (tv < YetiType.PRIMITIVES.length && tv > 0) return YetiType.PRIMITIVES[tv]; t = new YType(tv, null); refs.add(t); if (t.type == YetiType.FUN) { t.param = new YType[2]; t.param[0] = read(); t.param[1] = read(); } else if (tv == YetiType.MAP) { t.param = readArray(); } else if (tv == YetiType.STRUCT || tv == YetiType.VARIANT) { if (in[p] == ANYCASE) { t.flags |= YetiType.FL_ANY_CASE; ++p; } if (in[p] == SMART) { t.flags |= YetiType.FL_FLEX_TYPEDEF; ++p; } t.allowedMembers = readMap(); t.requiredMembers = readMap(); Map param; if (t.allowedMembers == null) { if ((param = t.requiredMembers) == null) param = new IdentityHashMap(); } else if (t.requiredMembers == null) { param = t.allowedMembers; } else { param = new IdentityHashMap(t.allowedMembers); param.putAll(t.requiredMembers); } t.param = new YType[param.size() + 1]; t.param[0] = new YType(VAR_DEPTH); Iterator i = param.values().iterator(); for (int n = 1; i.hasNext(); ++n) t.param[n] = (YType) i.next(); } else if (tv == YetiType.JAVA) { t.javaType = JavaType.fromDescription(cr.readUTF8(p, buf)); p += 2; t.param = readArray(); } else if (tv == YetiType.JAVA_ARRAY) { t.param = new YType[] { read() }; } else if (tv == OPAQUE) { String idstr = cr.readUTF8(p, buf); p += 2; synchronized (opaqueTypes) { YType old = (YType) opaqueTypes.get(idstr); if (old != null) { t.type = old.type; } else { t.type = opaqueTypes.size() + YetiType.OPAQUE_TYPES; opaqueTypes.put(idstr, t); } } t.requiredMembers = Collections.singletonMap(idstr, YetiType.NO_TYPE); t.param = readArray(); } else { throw new RuntimeException("Unknown type id: " + tv); } return t; } Map readTypeDefs() { Map result = new HashMap(); while (in[p] != END) { String name = cr.readUTF8(p, buf); p += 2; result.put(name.intern(), readArray()); } ++p; return result; } } protected Attribute read(ClassReader cr, int off, int len, char[] buf, int codeOff, Label[] labels) { int hdr = 3; switch (cr.b[off]) { case 0: hdr = 1; // version 0 has only version in header case 1: break; default: throw new RuntimeException("Unknown type encoding: " + cr.b[off]); } DecodeType decoder = new DecodeType(cr, off + hdr, len - hdr, buf, compiler.opaqueTypes); YType t = decoder.read(); Map typeDefs = decoder.readTypeDefs(); return new TypeAttr(new ModuleType(t, typeDefs, hdr != 1, -1), compiler); } protected ByteVector write(ClassWriter cw, byte[] code, int len, int maxStack, int maxLocals) { if (encoded != null) { return encoded; } EncodeType enc = new EncodeType(); Iterator i = moduleType.typeDefs.entrySet().iterator(); while (i.hasNext()) { Map.Entry e = (Map.Entry) i.next(); YType[] def = (YType[]) e.getValue(); YType t = def[def.length - 1]; if (t.type >= YetiType.OPAQUE_TYPES && t.requiredMembers == null) enc.opaque.put(new Integer(t.type), moduleType.name + ':' + e.getKey()); } enc.cw = cw; enc.buf.putByte(1); // encoding version enc.buf.putShort(0); enc.write(moduleType.type); enc.writeTypeDefs(moduleType.typeDefs); return encoded = enc.buf; } } class ModuleType extends YetiParser.Node { final YType type; final Map typeDefs; final boolean directFields; Scope typeScope; String topDoc; String name; boolean deprecated; boolean fromClass; boolean hasSource; long lastModified; private YType[] free; boolean isModule; JSCode jsCode; final JSSym jsModuleVar = new JSSym(); ModuleType(YType type, Map typeDefs, boolean directFields, int depth) { this.typeDefs = typeDefs; this.directFields = directFields; this.type = copy(depth, type); } YType copy(int depth, YType t) { if (t == null) t = type; if (depth == -1) return t; if (free == null) { List freeVars = new ArrayList(); YetiType.getAllTypeVar(freeVars, null, t, false); free = (YType[]) freeVars.toArray(new YType[freeVars.size()]); } return YetiType.copyType(t, YetiType.createFreeVars(free, depth), new IdentityHashMap()); } Tag yetiType() { return TypeDescr.yetiType(type, typeScope != null ? TypePattern.toPattern(typeScope, true) : TypePattern.toPattern(typeDefs), null); } } class YetiTypeVisitor implements ClassVisitor { TypeAttr typeAttr; private boolean deprecated; public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { deprecated = (access & Opcodes.ACC_DEPRECATED) != 0; } public void visitEnd() { } public AnnotationVisitor visitAnnotation(String desc, boolean visible) { return null; } public void visitAttribute(Attribute attr) { if (attr.type == "YetiModuleType") { if (typeAttr != null) throw new RuntimeException( "Multiple YetiModuleType attributes are forbidden"); typeAttr = (TypeAttr) attr; } } public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) { return null; } public void visitInnerClass(String name, String outerName, String innerName, int access) { } public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) { return null; } public void visitOuterClass(String owner, String name, String desc) { } public void visitSource(String source, String debug) { } }
/* * This file is part of TechReborn, licensed under the MIT License (MIT). * * Copyright (c) 2018 TechReborn * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package techreborn.world; import com.google.common.base.Predicate; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import net.minecraft.block.state.IBlockState; import net.minecraft.block.state.pattern.BlockMatcher; import net.minecraft.init.Blocks; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraft.world.biome.Biome; import net.minecraft.world.chunk.IChunkProvider; import net.minecraft.world.gen.IChunkGenerator; import net.minecraft.world.gen.feature.WorldGenMinable; import net.minecraftforge.common.BiomeDictionary; import net.minecraftforge.fml.common.IWorldGenerator; import org.apache.commons.io.FileUtils; import reborncore.common.misc.ChunkCoord; import techreborn.Core; import techreborn.init.ModBlocks; import techreborn.world.config.OreConfig; import techreborn.world.config.WorldGenConfig; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * Created by modmuss50 on 11/03/2016. */ public class TechRebornWorldGen implements IWorldGenerator { public static RubberTreeGenerator treeGenerator = new RubberTreeGenerator(); public final TechRebornRetroGen retroGen = new TechRebornRetroGen(); public File configFile; public boolean jsonInvalid = false; public WorldGenConfig config; WorldGenConfig defaultConfig; private void init() { defaultConfig = new WorldGenConfig(); defaultConfig.overworldOres = new ArrayList<>(); defaultConfig.endOres = new ArrayList<>(); defaultConfig.neatherOres = new ArrayList<>(); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Galena"), 8, 16, 10, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Iridium"), 3, 3, 5, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Ruby"), 6, 3, 10, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Sapphire"), 6, 3, 10, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Bauxite"), 6, 10, 10, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Lead"), 6, 16, 20, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Silver"), 6, 16, 20, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("copper"), 8, 16, 20, 60)); defaultConfig.overworldOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("tin"), 8, 16, 20, 60)); defaultConfig.neatherOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Pyrite"), 6, 3, 10, 126)); defaultConfig.neatherOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Cinnabar"), 6, 3, 10, 126)); defaultConfig.neatherOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Sphalerite"), 6, 3, 10, 126)); defaultConfig.endOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Tungsten"), 6, 3, 10, 250)); defaultConfig.endOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Sheldonite"), 6, 3, 10, 250)); defaultConfig.endOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Peridot"), 6, 3, 10, 250)); defaultConfig.endOres.add(new OreConfig(ModBlocks.ORE.getBlockStateFromName("Sodalite"), 6, 3, 10, 250)); } public void load() { init(); if (configFile.exists()) { loadFromJson(); } else { config = defaultConfig; jsonInvalid = true; } config.overworldOres.addAll(getMissingOres(config.overworldOres, defaultConfig.overworldOres)); config.neatherOres.addAll(getMissingOres(config.neatherOres, defaultConfig.neatherOres)); config.endOres.addAll(getMissingOres(config.endOres, defaultConfig.endOres)); if (jsonInvalid) { save(); } } private List<OreConfig> getMissingOres(List<OreConfig> config, List<OreConfig> defaultOres) { List<OreConfig> missingOres = new ArrayList<>(); for (OreConfig defaultOre : defaultOres) { boolean hasFoundOre = false; for (OreConfig ore : config) { if (ore.blockName.equals(defaultOre.blockName) && ore.meta == defaultOre.meta) { hasFoundOre = true; ore.state = defaultOre.state; // Should allow for states to // be saved/loaded } } if (!hasFoundOre) { missingOres.add(defaultOre); } } return missingOres; } private void loadFromJson() { try { Gson gson = new Gson(); String jsonString = FileUtils.readFileToString(configFile, Charset.defaultCharset()); config = gson.fromJson(jsonString, WorldGenConfig.class); // ArrayUtils.addAll(config.endOres, config.neatherOres, config.overworldOres).stream().forEach(oreConfig -> { // if (oreConfig.minYHeight > oreConfig.maxYHeight) { // printError(oreConfig.blockName + " ore generation value is invalid, the min y height is bigger than the max y height, this ore value will be disabled in code"); // // oreConfig.minYHeight = -1; // oreConfig.maxYHeight = -1; // } // // if (oreConfig.minYHeight < 0 || oreConfig.maxYHeight < 0) { // printError(oreConfig.blockName + " ore generation value is invalid, the min y height or the max y height is less than 0, this ore value will be disabled in code"); // oreConfig.minYHeight = -1; // oreConfig.maxYHeight = -1; // } // // }); } catch (Exception e) { Core.logHelper.error( "The ores.json file was invalid, bad things are about to happen, I will try and save the world now :"); config = defaultConfig; jsonInvalid = true; Core.logHelper.error( "The ores.json file was ignored and the default values loaded, you file will NOT be over written"); e.printStackTrace(); } } public void printError(String string) { Core.logHelper.error("###############-ERROR-####################"); Core.logHelper.error(""); Core.logHelper.error(string); Core.logHelper.error(""); Core.logHelper.error("###############-ERROR-####################"); } private void save() { Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(config); try { FileUtils.writeStringToFile(configFile, json, Charset.defaultCharset()); } catch (IOException e) { e.printStackTrace(); } } public List<OreConfig> getAllGenOresFromList(List<OreConfig> configList) { List<OreConfig> list = new ArrayList<>(); for (OreConfig config : configList) { if (config.veinSize != 0 && config.veinsPerChunk != 0 && config.shouldSpawn) { list.add(config); } } return list; } @Override public void generate(Random random, int chunkX, int chunkZ, World world, IChunkGenerator chunkGenerator, IChunkProvider chunkProvider) { // TODO this could be optimised to not run every chunk gen if (!config.generateTechRebornFeatures) { return; } boolean genTree = false; List<OreConfig> list = new ArrayList<>(); Predicate<IBlockState> predicate = BlockMatcher.forBlock(Blocks.STONE); if (world.provider.isSurfaceWorld()) { list.addAll(getAllGenOresFromList(config.overworldOres)); genTree = true; } else if (world.provider.getDimension() == -1) { list.addAll(getAllGenOresFromList(config.neatherOres)); predicate = BlockMatcher.forBlock(Blocks.NETHERRACK); } else if (world.provider.getDimension() == 1) { list.addAll(getAllGenOresFromList(config.endOres)); predicate = BlockMatcher.forBlock(Blocks.END_STONE); } if (!list.isEmpty() && config.generateOres) { int xPos, yPos, zPos; for (OreConfig ore : list) { WorldGenMinable worldGenMinable = new WorldGenMinable(ore.state, ore.veinSize, predicate); if (ore.state != null) { for (int i = 0; i < ore.veinsPerChunk; i++) { xPos = chunkX * 16 + random.nextInt(16); if (ore.maxYHeight == -1 || ore.minYHeight == -1) { continue; } yPos = ore.minYHeight + random.nextInt(ore.maxYHeight - ore.minYHeight); zPos = chunkZ * 16 + random.nextInt(16); BlockPos pos = new BlockPos(xPos, yPos, zPos); if (ore.veinSize < 4){ // Workaround for small veins for (int j = 1; j < ore.veinSize; j++) { // standard worldgen offset is added here like in WorldGenMinable#generate BlockPos smallVeinPos = pos.add(8, 0, 8); smallVeinPos.add(random.nextInt(2), random.nextInt(2), random.nextInt(2)); IBlockState blockState = world.getBlockState(smallVeinPos); if (blockState.getBlock().isReplaceableOreGen(blockState, world, smallVeinPos, predicate)) { world.setBlockState(smallVeinPos, ore.state, 2); } } } else { try { worldGenMinable.generate(world, random, pos); } catch (ArrayIndexOutOfBoundsException e) { Core.logHelper.error("Something bad is happening during world gen the ore " + ore.blockNiceName + " caused a crash when generating. Report this to the TechReborn devs with a log"); } } } } } } if (genTree && config.rubberTreeConfig.shouldSpawn) { int chance = config.rubberTreeConfig.chance; boolean isValidSpawn = false; Biome biomeGenBase = world.getBiomeForCoordsBody(new BlockPos(chunkX * 16, 72, chunkZ * 16)); if (BiomeDictionary.hasType(biomeGenBase, BiomeDictionary.Type.SWAMP)) { // TODO check the config file for bounds on this, might cause issues chance -= random.nextInt(10) + 10; isValidSpawn = true; } if (BiomeDictionary.hasType(biomeGenBase, BiomeDictionary.Type.FOREST) || BiomeDictionary.hasType(biomeGenBase, BiomeDictionary.Type.JUNGLE)) { chance -= random.nextInt(5) + 3; isValidSpawn = true; } if (!isValidSpawn) { return; } if (chance <= 0) { chance = 1; } if (random.nextInt(chance) == 0) { int x = chunkX * 16; int z = chunkZ * 16; for (int i = 0; i < config.rubberTreeConfig.clusterSize; i++) { treeGenerator.generate(world, random, new BlockPos(x, 72, z)); } } retroGen.markChunk(ChunkCoord.of(chunkX, chunkZ)); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.snowball; import javax.annotation.Generated; import com.amazonaws.services.snowball.model.*; /** * Abstract implementation of {@code AmazonSnowballAsync}. Convenient method forms pass through to the corresponding * overload that takes a request object and an {@code AsyncHandler}, which throws an * {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAmazonSnowballAsync extends AbstractAmazonSnowball implements AmazonSnowballAsync { protected AbstractAmazonSnowballAsync() { } @Override public java.util.concurrent.Future<CancelClusterResult> cancelClusterAsync(CancelClusterRequest request) { return cancelClusterAsync(request, null); } @Override public java.util.concurrent.Future<CancelClusterResult> cancelClusterAsync(CancelClusterRequest request, com.amazonaws.handlers.AsyncHandler<CancelClusterRequest, CancelClusterResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CancelJobResult> cancelJobAsync(CancelJobRequest request) { return cancelJobAsync(request, null); } @Override public java.util.concurrent.Future<CancelJobResult> cancelJobAsync(CancelJobRequest request, com.amazonaws.handlers.AsyncHandler<CancelJobRequest, CancelJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateAddressResult> createAddressAsync(CreateAddressRequest request) { return createAddressAsync(request, null); } @Override public java.util.concurrent.Future<CreateAddressResult> createAddressAsync(CreateAddressRequest request, com.amazonaws.handlers.AsyncHandler<CreateAddressRequest, CreateAddressResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateClusterResult> createClusterAsync(CreateClusterRequest request) { return createClusterAsync(request, null); } @Override public java.util.concurrent.Future<CreateClusterResult> createClusterAsync(CreateClusterRequest request, com.amazonaws.handlers.AsyncHandler<CreateClusterRequest, CreateClusterResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateJobResult> createJobAsync(CreateJobRequest request) { return createJobAsync(request, null); } @Override public java.util.concurrent.Future<CreateJobResult> createJobAsync(CreateJobRequest request, com.amazonaws.handlers.AsyncHandler<CreateJobRequest, CreateJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateLongTermPricingResult> createLongTermPricingAsync(CreateLongTermPricingRequest request) { return createLongTermPricingAsync(request, null); } @Override public java.util.concurrent.Future<CreateLongTermPricingResult> createLongTermPricingAsync(CreateLongTermPricingRequest request, com.amazonaws.handlers.AsyncHandler<CreateLongTermPricingRequest, CreateLongTermPricingResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateReturnShippingLabelResult> createReturnShippingLabelAsync(CreateReturnShippingLabelRequest request) { return createReturnShippingLabelAsync(request, null); } @Override public java.util.concurrent.Future<CreateReturnShippingLabelResult> createReturnShippingLabelAsync(CreateReturnShippingLabelRequest request, com.amazonaws.handlers.AsyncHandler<CreateReturnShippingLabelRequest, CreateReturnShippingLabelResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeAddressResult> describeAddressAsync(DescribeAddressRequest request) { return describeAddressAsync(request, null); } @Override public java.util.concurrent.Future<DescribeAddressResult> describeAddressAsync(DescribeAddressRequest request, com.amazonaws.handlers.AsyncHandler<DescribeAddressRequest, DescribeAddressResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeAddressesResult> describeAddressesAsync(DescribeAddressesRequest request) { return describeAddressesAsync(request, null); } @Override public java.util.concurrent.Future<DescribeAddressesResult> describeAddressesAsync(DescribeAddressesRequest request, com.amazonaws.handlers.AsyncHandler<DescribeAddressesRequest, DescribeAddressesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeClusterResult> describeClusterAsync(DescribeClusterRequest request) { return describeClusterAsync(request, null); } @Override public java.util.concurrent.Future<DescribeClusterResult> describeClusterAsync(DescribeClusterRequest request, com.amazonaws.handlers.AsyncHandler<DescribeClusterRequest, DescribeClusterResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeJobResult> describeJobAsync(DescribeJobRequest request) { return describeJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeJobResult> describeJobAsync(DescribeJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeJobRequest, DescribeJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeReturnShippingLabelResult> describeReturnShippingLabelAsync(DescribeReturnShippingLabelRequest request) { return describeReturnShippingLabelAsync(request, null); } @Override public java.util.concurrent.Future<DescribeReturnShippingLabelResult> describeReturnShippingLabelAsync(DescribeReturnShippingLabelRequest request, com.amazonaws.handlers.AsyncHandler<DescribeReturnShippingLabelRequest, DescribeReturnShippingLabelResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetJobManifestResult> getJobManifestAsync(GetJobManifestRequest request) { return getJobManifestAsync(request, null); } @Override public java.util.concurrent.Future<GetJobManifestResult> getJobManifestAsync(GetJobManifestRequest request, com.amazonaws.handlers.AsyncHandler<GetJobManifestRequest, GetJobManifestResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetJobUnlockCodeResult> getJobUnlockCodeAsync(GetJobUnlockCodeRequest request) { return getJobUnlockCodeAsync(request, null); } @Override public java.util.concurrent.Future<GetJobUnlockCodeResult> getJobUnlockCodeAsync(GetJobUnlockCodeRequest request, com.amazonaws.handlers.AsyncHandler<GetJobUnlockCodeRequest, GetJobUnlockCodeResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetSnowballUsageResult> getSnowballUsageAsync(GetSnowballUsageRequest request) { return getSnowballUsageAsync(request, null); } @Override public java.util.concurrent.Future<GetSnowballUsageResult> getSnowballUsageAsync(GetSnowballUsageRequest request, com.amazonaws.handlers.AsyncHandler<GetSnowballUsageRequest, GetSnowballUsageResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetSoftwareUpdatesResult> getSoftwareUpdatesAsync(GetSoftwareUpdatesRequest request) { return getSoftwareUpdatesAsync(request, null); } @Override public java.util.concurrent.Future<GetSoftwareUpdatesResult> getSoftwareUpdatesAsync(GetSoftwareUpdatesRequest request, com.amazonaws.handlers.AsyncHandler<GetSoftwareUpdatesRequest, GetSoftwareUpdatesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListClusterJobsResult> listClusterJobsAsync(ListClusterJobsRequest request) { return listClusterJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListClusterJobsResult> listClusterJobsAsync(ListClusterJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListClusterJobsRequest, ListClusterJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListClustersResult> listClustersAsync(ListClustersRequest request) { return listClustersAsync(request, null); } @Override public java.util.concurrent.Future<ListClustersResult> listClustersAsync(ListClustersRequest request, com.amazonaws.handlers.AsyncHandler<ListClustersRequest, ListClustersResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListCompatibleImagesResult> listCompatibleImagesAsync(ListCompatibleImagesRequest request) { return listCompatibleImagesAsync(request, null); } @Override public java.util.concurrent.Future<ListCompatibleImagesResult> listCompatibleImagesAsync(ListCompatibleImagesRequest request, com.amazonaws.handlers.AsyncHandler<ListCompatibleImagesRequest, ListCompatibleImagesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListJobsResult> listJobsAsync(ListJobsRequest request) { return listJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListJobsResult> listJobsAsync(ListJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListJobsRequest, ListJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListLongTermPricingResult> listLongTermPricingAsync(ListLongTermPricingRequest request) { return listLongTermPricingAsync(request, null); } @Override public java.util.concurrent.Future<ListLongTermPricingResult> listLongTermPricingAsync(ListLongTermPricingRequest request, com.amazonaws.handlers.AsyncHandler<ListLongTermPricingRequest, ListLongTermPricingResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateClusterResult> updateClusterAsync(UpdateClusterRequest request) { return updateClusterAsync(request, null); } @Override public java.util.concurrent.Future<UpdateClusterResult> updateClusterAsync(UpdateClusterRequest request, com.amazonaws.handlers.AsyncHandler<UpdateClusterRequest, UpdateClusterResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateJobResult> updateJobAsync(UpdateJobRequest request) { return updateJobAsync(request, null); } @Override public java.util.concurrent.Future<UpdateJobResult> updateJobAsync(UpdateJobRequest request, com.amazonaws.handlers.AsyncHandler<UpdateJobRequest, UpdateJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateJobShipmentStateResult> updateJobShipmentStateAsync(UpdateJobShipmentStateRequest request) { return updateJobShipmentStateAsync(request, null); } @Override public java.util.concurrent.Future<UpdateJobShipmentStateResult> updateJobShipmentStateAsync(UpdateJobShipmentStateRequest request, com.amazonaws.handlers.AsyncHandler<UpdateJobShipmentStateRequest, UpdateJobShipmentStateResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateLongTermPricingResult> updateLongTermPricingAsync(UpdateLongTermPricingRequest request) { return updateLongTermPricingAsync(request, null); } @Override public java.util.concurrent.Future<UpdateLongTermPricingResult> updateLongTermPricingAsync(UpdateLongTermPricingRequest request, com.amazonaws.handlers.AsyncHandler<UpdateLongTermPricingRequest, UpdateLongTermPricingResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred.lib.db; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConfigurable; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapreduce.Job; @InterfaceAudience.Public @InterfaceStability.Stable @SuppressWarnings("deprecation") public class DBInputFormat<T extends DBWritable> extends org.apache.hadoop.mapreduce.lib.db.DBInputFormat<T> implements InputFormat<LongWritable, T>, JobConfigurable { /** * A RecordReader that reads records from a SQL table. * Emits LongWritables containing the record number as * key and DBWritables as value. */ protected class DBRecordReader extends org.apache.hadoop.mapreduce.lib.db.DBRecordReader<T> implements RecordReader<LongWritable, T> { /** * The constructor is kept to be compatible with M/R 1.x * * @param split The InputSplit to read data for * @throws SQLException */ protected DBRecordReader(DBInputSplit split, Class<T> inputClass, JobConf job) throws SQLException { super(split, inputClass, job, connection, dbConf, conditions, fieldNames, tableName); } /** * @param split The InputSplit to read data for * @throws SQLException */ protected DBRecordReader(DBInputSplit split, Class<T> inputClass, JobConf job, Connection conn, DBConfiguration dbConfig, String cond, String [] fields, String table) throws SQLException { super(split, inputClass, job, conn, dbConfig, cond, fields, table); } /** {@inheritDoc} */ public LongWritable createKey() { return new LongWritable(); } /** {@inheritDoc} */ public T createValue() { return super.createValue(); } public long getPos() throws IOException { return super.getPos(); } /** {@inheritDoc} */ public boolean next(LongWritable key, T value) throws IOException { return super.next(key, value); } } /** * A RecordReader implementation that just passes through to a wrapped * RecordReader built with the new API. */ private static class DBRecordReaderWrapper<T extends DBWritable> implements RecordReader<LongWritable, T> { private org.apache.hadoop.mapreduce.lib.db.DBRecordReader<T> rr; public DBRecordReaderWrapper( org.apache.hadoop.mapreduce.lib.db.DBRecordReader<T> inner) { this.rr = inner; } public void close() throws IOException { rr.close(); } public LongWritable createKey() { return new LongWritable(); } public T createValue() { return rr.createValue(); } public float getProgress() throws IOException { return rr.getProgress(); } public long getPos() throws IOException { return rr.getPos(); } public boolean next(LongWritable key, T value) throws IOException { return rr.next(key, value); } } /** * A Class that does nothing, implementing DBWritable */ public static class NullDBWritable extends org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable implements DBWritable, Writable { } /** * A InputSplit that spans a set of rows */ protected static class DBInputSplit extends org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit implements InputSplit { /** * Default Constructor */ public DBInputSplit() { } /** * Convenience Constructor * @param start the index of the first row to select * @param end the index of the last row to select */ public DBInputSplit(long start, long end) { super(start, end); } } /** {@inheritDoc} */ public void configure(JobConf job) { super.setConf(job); } /** {@inheritDoc} */ public RecordReader<LongWritable, T> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { // wrap the DBRR in a shim class to deal with API differences. return new DBRecordReaderWrapper<T>( (org.apache.hadoop.mapreduce.lib.db.DBRecordReader<T>) createDBRecordReader( (org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit) split, job)); } /** {@inheritDoc} */ public InputSplit[] getSplits(JobConf job, int chunks) throws IOException { List<org.apache.hadoop.mapreduce.InputSplit> newSplits = super.getSplits(Job.getInstance(job)); InputSplit[] ret = new InputSplit[newSplits.size()]; int i = 0; for (org.apache.hadoop.mapreduce.InputSplit s : newSplits) { org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit split = (org.apache.hadoop.mapreduce.lib.db.DBInputFormat.DBInputSplit)s; ret[i++] = new DBInputSplit(split.getStart(), split.getEnd()); } return ret; } /** * Initializes the map-part of the job with the appropriate input settings. * * @param job The job * @param inputClass the class object implementing DBWritable, which is the * Java object holding tuple fields. * @param tableName The table to read data from * @param conditions The condition which to select data with, eg. '(updated > * 20070101 AND length > 0)' * @param orderBy the fieldNames in the orderBy clause. * @param fieldNames The field names in the table * @see #setInput(JobConf, Class, String, String) */ public static void setInput(JobConf job, Class<? extends DBWritable> inputClass, String tableName,String conditions, String orderBy, String... fieldNames) { job.setInputFormat(DBInputFormat.class); DBConfiguration dbConf = new DBConfiguration(job); dbConf.setInputClass(inputClass); dbConf.setInputTableName(tableName); dbConf.setInputFieldNames(fieldNames); dbConf.setInputConditions(conditions); dbConf.setInputOrderBy(orderBy); } /** * Initializes the map-part of the job with the appropriate input settings. * * @param job The job * @param inputClass the class object implementing DBWritable, which is the * Java object holding tuple fields. * @param inputQuery the input query to select fields. Example : * "SELECT f1, f2, f3 FROM Mytable ORDER BY f1" * @param inputCountQuery the input query that returns the number of records in * the table. * Example : "SELECT COUNT(f1) FROM Mytable" * @see #setInput(JobConf, Class, String, String, String, String...) */ public static void setInput(JobConf job, Class<? extends DBWritable> inputClass, String inputQuery, String inputCountQuery) { job.setInputFormat(DBInputFormat.class); DBConfiguration dbConf = new DBConfiguration(job); dbConf.setInputClass(inputClass); dbConf.setInputQuery(inputQuery); dbConf.setInputCountQuery(inputCountQuery); } }
package slacknotifications.teamcity.extension; import jetbrains.buildServer.controllers.BaseController; import jetbrains.buildServer.serverSide.SBuildServer; import jetbrains.buildServer.serverSide.SProject; import jetbrains.buildServer.serverSide.auth.Permission; import jetbrains.buildServer.serverSide.settings.ProjectSettingsManager; import jetbrains.buildServer.users.SUser; import jetbrains.buildServer.web.openapi.PluginDescriptor; import jetbrains.buildServer.web.openapi.WebControllerManager; import jetbrains.buildServer.web.util.SessionUser; import org.jetbrains.annotations.Nullable; import org.springframework.web.servlet.ModelAndView; import slacknotifications.SlackNotification; import slacknotifications.teamcity.BuildState; import slacknotifications.teamcity.BuildStateEnum; import slacknotifications.teamcity.TeamCityIdResolver; import slacknotifications.teamcity.extension.bean.ProjectSlackNotificationsBean; import slacknotifications.teamcity.extension.bean.ProjectSlackNotificationsBeanJsonSerialiser; import slacknotifications.teamcity.payload.SlackNotificationPayloadManager; import slacknotifications.teamcity.settings.SlackNotificationContentConfig; import slacknotifications.teamcity.settings.SlackNotificationMainSettings; import slacknotifications.teamcity.settings.SlackNotificationProjectSettings; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.HashMap; import java.util.HashSet; import java.util.Set; public class SlackNotificationAjaxEditPageController extends BaseController { protected static final String BEFORE_FINISHED = "BeforeFinished"; protected static final String BUILD_INTERRUPTED = "BuildInterrupted"; protected static final String BUILD_STARTED = "BuildStarted"; protected static final String BUILD_BROKEN = "BuildBroken"; protected static final String BUILD_FIXED = "BuildFixed"; protected static final String BUILD_FAILED = "BuildFailed"; protected static final String BUILD_SUCCESSFUL = "BuildSuccessful"; private final WebControllerManager myWebManager; private final SlackNotificationMainSettings myMainSettings; private SBuildServer myServer; private ProjectSettingsManager mySettings; private final String myPluginPath; private final SlackNotificationPayloadManager myManager; public SlackNotificationAjaxEditPageController(SBuildServer server, WebControllerManager webManager, ProjectSettingsManager settings, SlackNotificationProjectSettings whSettings, SlackNotificationPayloadManager manager, PluginDescriptor pluginDescriptor, SlackNotificationMainSettings mainSettings) { super(server); myWebManager = webManager; myServer = server; mySettings = settings; myPluginPath = pluginDescriptor.getPluginResourcesPath(); myManager = manager; myMainSettings = mainSettings; } public void register(){ myWebManager.registerController("/slacknotifications/ajaxEdit.html", this); } protected static void checkAndAddBuildState(HttpServletRequest r, BuildState state, BuildStateEnum myBuildState, String varName){ if ((r.getParameter(varName) != null) && (r.getParameter(varName).equalsIgnoreCase("on"))){ state.enable(myBuildState); } else { state.disable(myBuildState);; } } protected static void checkAndAddBuildStateIfEitherSet(HttpServletRequest r, BuildState state, BuildStateEnum myBuildState, String varName, String otherVarName){ if ((r.getParameter(varName) != null) && (r.getParameter(varName).equalsIgnoreCase("on"))){ state.enable(myBuildState); } else if ((r.getParameter(otherVarName) != null) && (r.getParameter(otherVarName).equalsIgnoreCase("on"))){ state.enable(myBuildState); } else { state.disable(myBuildState);; } } @Nullable protected ModelAndView doHandle(HttpServletRequest request, HttpServletResponse response) throws Exception { HashMap<String,Object> params = new HashMap<String,Object>(); SUser myUser = SessionUser.getUser(request); SProject myProject = null; SlackNotificationProjectSettings projSettings = null; if (request.getMethod().equalsIgnoreCase("post")){ if ((request.getParameter("projectId") != null) && request.getParameter("projectId").startsWith("project")){ projSettings = (SlackNotificationProjectSettings) mySettings.getSettings(request.getParameter("projectId"), "slackNotifications"); myProject = this.myServer.getProjectManager().findProjectById(request.getParameter("projectId")); if ((projSettings != null) && (myProject != null) && (myUser.isPermissionGrantedForProject(myProject.getProjectId(), Permission.EDIT_PROJECT))){ if ((request.getParameter("submitAction") != null ) && (request.getParameter("submitAction").equals("removeSlackNotification")) && (request.getParameter("removedSlackNotificationId") != null)){ projSettings.deleteSlackNotification(request.getParameter("removedSlackNotificationId"), myProject.getProjectId()); if(projSettings.updateSuccessful()){ myProject.persist(); params.put("messages", "<errors />"); } else { params.put("messages", "<errors><error id=\"messageArea\">The slacknotifications was not found. Have the SlackNotifications been edited on disk or by another user?</error></errors>"); } } else if ((request.getParameter("submitAction") != null ) && (request.getParameter("submitAction").equals("updateSlackNotification"))){ if((request.getParameter("channel") != null ) && (request.getParameter("channel").length() > 0 )){ if (request.getParameter("slackNotificationId") != null){ Boolean enabled = false; Boolean mentionChannelEnabled = false; Boolean mentionSlackUserEnabled = false; Boolean buildTypeAll = false; Boolean buildTypeSubProjects = false; SlackNotificationContentConfig content = new SlackNotificationContentConfig(); Set<String> buildTypes = new HashSet<String>(); if ((request.getParameter("slackNotificationsEnabled") != null ) && (request.getParameter("slackNotificationsEnabled").equalsIgnoreCase("on"))){ enabled = true; } if ((request.getParameter("mentionChannelEnabled") != null ) && (request.getParameter("mentionChannelEnabled").equalsIgnoreCase("on"))){ mentionChannelEnabled = true; } if ((request.getParameter("mentionSlackUserEnabled") != null ) && (request.getParameter("mentionSlackUserEnabled").equalsIgnoreCase("on"))){ mentionSlackUserEnabled = true; } content.setEnabled((request.getParameter("customContentEnabled") != null ) && (request.getParameter("customContentEnabled").equalsIgnoreCase("on"))); if (content.isEnabled()){ if ((request.getParameter("maxCommitsToDisplay") != null ) && (request.getParameter("maxCommitsToDisplay").length() > 0)){ content.setMaxCommitsToDisplay(convertToInt(request.getParameter("maxCommitsToDisplay"), SlackNotificationContentConfig.DEFAULT_MAX_COMMITS)); } content.setShowBuildAgent((request.getParameter("showBuildAgent") != null ) && (request.getParameter("showBuildAgent").equalsIgnoreCase("on"))); content.setShowCommits((request.getParameter("showCommits") != null ) && (request.getParameter("showCommits").equalsIgnoreCase("on"))); content.setShowCommitters((request.getParameter("showCommitters") != null) && (request.getParameter("showCommitters").equalsIgnoreCase("on"))); content.setShowElapsedBuildTime((request.getParameter("showElapsedBuildTime") != null) && (request.getParameter("showElapsedBuildTime").equalsIgnoreCase("on"))); content.setShowFailureReason((request.getParameter("showFailureReason") != null) && (request.getParameter("showFailureReason").equalsIgnoreCase("on"))); if ((request.getParameter("botName") != null ) && (request.getParameter("botName").length() > 0)){ content.setBotName(request.getParameter("botName")); } if ((request.getParameter("iconUrl") != null ) && (request.getParameter("iconUrl").length() > 0)){ content.setIconUrl(request.getParameter("iconUrl")); } } BuildState states = new BuildState(); checkAndAddBuildState(request, states, BuildStateEnum.BUILD_SUCCESSFUL, BUILD_SUCCESSFUL); checkAndAddBuildState(request, states, BuildStateEnum.BUILD_FAILED, BUILD_FAILED); checkAndAddBuildState(request, states, BuildStateEnum.BUILD_FIXED, BUILD_FIXED); checkAndAddBuildState(request, states, BuildStateEnum.BUILD_BROKEN, BUILD_BROKEN); checkAndAddBuildState(request, states, BuildStateEnum.BUILD_STARTED, BUILD_STARTED); checkAndAddBuildState(request, states, BuildStateEnum.BUILD_INTERRUPTED, BUILD_INTERRUPTED); checkAndAddBuildState(request, states, BuildStateEnum.BEFORE_BUILD_FINISHED, BEFORE_FINISHED); checkAndAddBuildStateIfEitherSet(request, states, BuildStateEnum.BUILD_FINISHED, BUILD_SUCCESSFUL, BUILD_FAILED); checkAndAddBuildState(request, states, BuildStateEnum.RESPONSIBILITY_CHANGED, "ResponsibilityChanged"); if ((request.getParameter("buildTypeSubProjects") != null ) && (request.getParameter("buildTypeSubProjects").equalsIgnoreCase("on"))){ buildTypeSubProjects = true; } if ((request.getParameter("buildTypeAll") != null ) && (request.getParameter("buildTypeAll").equalsIgnoreCase("on"))){ buildTypeAll = true; } else { if (request.getParameterValues("buildTypeId") != null){ String[] types = request.getParameterValues("buildTypeId"); for (String string : types) { buildTypes.add(string); } } } if (request.getParameter("slackNotificationId").equals("new")){ projSettings.addNewSlackNotification(myProject.getProjectId(),request.getParameter("channel"), request.getParameter("team"), enabled, states, buildTypeAll, buildTypeSubProjects, buildTypes, mentionChannelEnabled, mentionSlackUserEnabled); if(projSettings.updateSuccessful()){ myProject.persist(); params.put("messages", "<errors />"); } else { params.put("message", "<errors><error id=\"\">" + projSettings.getUpdateMessage() + "</error>"); } } else { projSettings.updateSlackNotification(myProject.getProjectId(),request.getParameter("slackNotificationId"), request.getParameter("channel"), enabled, states, buildTypeAll, buildTypeSubProjects, buildTypes, mentionChannelEnabled, mentionSlackUserEnabled, content); if(projSettings.updateSuccessful()){ myProject.persist(); params.put("messages", "<errors />"); } else { params.put("message", "<errors><error id=\"\">" + projSettings.getUpdateMessage() + "</error>"); } } } // TODO Need to handle slackNotificationId being null } else { if ((request.getParameter("channel") == null ) || (request.getParameter("channel").length() == 0)){ params.put("messages", "<errors><error id=\"emptySlackNotificationChannel\">Please enter a channel.</error></errors>"); } } } } else { params.put("messages", "<errors><error id=\"messageArea\">You do not appear to have permission to edit SlackNotifications.</error></errors>"); } } } if (request.getMethod().equalsIgnoreCase("get") && request.getParameter("projectId") != null && request.getParameter("projectId").startsWith("project")){ SlackNotificationProjectSettings projSettings1 = (SlackNotificationProjectSettings) mySettings.getSettings(request.getParameter("projectId"), "slackNotifications"); SProject project = this.myServer.getProjectManager().findProjectById(request.getParameter("projectId")); String message = projSettings1.getSlackNotificationsAsString(); params.put("haveProject", "true"); params.put("messages", message); params.put("projectId", project.getProjectId()); params.put("projectExternalId", TeamCityIdResolver.getExternalProjectId(project)); params.put("projectName", project.getName()); params.put("slackNotificationCount", projSettings1.getSlackNotificationsCount()); if (projSettings1.getSlackNotificationsCount() == 0){ params.put("noSlackNotifications", "true"); params.put("slackNotifications", "false"); } else { params.put("noSlackNotifications", "false"); params.put("slackNotifications", "true"); params.put("slackNotificationList", projSettings.getSlackNotificationsAsList()); params.put("slackNotificationsDisabled", !projSettings.isEnabled()); params.put("slackNotificationsEnabledAsChecked", projSettings.isEnabledAsChecked()); params.put("projectSlackNotificationsAsJson", ProjectSlackNotificationsBeanJsonSerialiser.serialise(ProjectSlackNotificationsBean.build(projSettings, project, myMainSettings))); } } else { params.put("haveProject", "false"); } return new ModelAndView(myPluginPath + "SlackNotification/ajaxEdit.jsp", params); } private int convertToInt(String s, int defaultValue){ try{ int myInt = Integer.parseInt(s); return myInt; } catch (NumberFormatException e){ return defaultValue; } } }
package org.chain; import org.chain.filters.Action; import org.chain.filters.ManySelector; import org.chain.filters.Selector; import org.chain.filters.WhereComparator; import java.util.Collection; import java.util.Comparator; import java.util.NoSuchElementException; interface ChainEngine<T> { //mod /** * Performs an action using each element in the {@link Collection}. * @param collection The collection to operate on. * @param action The action or modification to perform on each element. */ void each(final Collection<T> collection, final Action<T> action); /** * Sorts the {@link Collection} based on the {@link Comparator} that is passed in. * <br/>{ [ 3, 1, 4, 2 ] } => { [ 1, 2, 3, 4 ] } * @param collection The collection to operate on. * @param comparator Defines the order to sort the list by. * @return A new sorted version of the {@link Collection}. */ Collection<T> sort(final Collection<T> collection, final Comparator<T> comparator); /** * Reverses the order of the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } => { [ 4, 3, 2, 1 ] } * @param collection The collection to operate on. * @return A new reversed order version of the {@link Collection}. */ Collection<T> reverse(final Collection<T> collection); /** * Appends a {@link Collection} to the end of the contained {@link Collection}. * <br/>{ [ 1, 2 ] } concatenate { [ 3, 4 ] } => { [ 1, 2, 3, 4 ] } * @param collection1 The collection to operate on. * @param collection2 The {@link Collection} to append to the end of the contained {@link Collection}. * @return A new {@link Collection} that is a concatenated version of the contained {@link Collection} and the {@link Collection} passed in. */ Collection<T> concatenate(final Collection<T> collection1, final Collection<T> collection2); /** * Builds a new {@link Collection} that contains all distinct elements from both {@link Collection}s. * <br/>{ [ 1, 2, 3 ] } union { [ 2, 3, 4 ] } => { [ 1, 2, 3, 4 ] } * @param collection1 The collection to operate on. * @param collection2 The {@link Collection} to union with the contained {@link Collection}. * @param comparator Defines how to check if an element is distinct. * @return A new {@link Collection} containing all distinct elements from both {@link Collection}s. */ Collection<T> union(final Collection<T> collection1, final Collection<T> collection2, final Comparator<T> comparator); /** * Builds a new {@link Collection} that contains all elements found in both {@link Collection}s. * <br/>{ [ 1, 2, 3 ] } intersect { [ 2, 3, 4 ] } => { [ 2, 3 ] } * @param collection1 The collection to operate on. * @param collection2 The {@link Collection} to intersect with the contained {@link Collection}. * @param comparator Defines how to check if an element is found in both {@link Collection}s. * @return A new {@link Collection} containing all elements found in both {@link Collection}s. */ Collection<T> intersect(final Collection<T> collection1, final Collection<T> collection2, final Comparator<T> comparator); /** * Builds a new {@link Collection} that contains all elements not found in both {@link Collection}s. * <br/>{ [ 1, 2, 3 ] } diverge { [ 2, 3, 4 ] } => { [ 1, 4 ] } * @param collection1 The collection to operate on. * @param collection2 The {@link Collection} to diverge with the contained {@link Collection}. * @param comparator Defines how to check if an element is found in both {@link Collection}s. * @return A new {@link Collection} containing all elements not found in both {@link Collection}s. */ Collection<T> diverge(Collection<T> collection1, Collection<T> collection2, Comparator<T> comparator); /** * Removes all elements found in the {@link Collection} already. * <br/>{ [ 1, 1, 2, 2, 3 ] } => { [ 1, 2, 3 ] } * @param collection The collection to operate on. * @param comparator Defines how to check if an element is found in the {@link Collection} already. * @return A new {@link Collection} containing all elements not found in the {@link Collection} already. */ Collection<T> distinct(final Collection<T> collection, final Comparator<T> comparator); //query /** * Selects a field from each element and concatenates them all into a new {@link Collection}. * <br/>{ [ {a:1}, {a:2}, {a:3}, {a:4} ] } select(a) => { [ 1, 2, 3, 4 ] } * @param collection The collection to operate on. * @param selector Defines which field to select from each element. * @param <TD> The type of the field to be selected from each element. * @return A new {@link Collection} containing all of the fields that were selected from each element. */ <TD> Collection<TD> select(final Collection<T> collection, final Selector<T, TD> selector); /** * Selects a field containing a {@link Collection} from each element and concatenates all subelements into a new {@link Collection}. * <br/>{ [ {a:[1, 2]}, {a:[3, 4]} ] } select(a) => { [ 1, 2, 3, 4 ] } * @param collection The collection to operate on. * @param selector Defines which field to select from each element. * @param <TD> The type of the field to be selected from each element. * @return A new {@link Collection} containing all of the fields that were selected from each element. */ <TD> Collection<TD> selectMany(final Collection<T> collection, final ManySelector<T, TD> selector); /** * Selects all elements that meet the condition specified into a new {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } where(a >= 2) => { [ 2, 3, 4 ] } * @param collection The collection to operate on. * @param comparator Defines the condition to be met for each element. * @return A new {@link Collection} containing all elements that meet the condition specified. */ Collection<T> where(final Collection<T> collection, final WhereComparator<T> comparator); /** * Checks if the {@link Collection} is either null or empty. * <br/>{ [ 1, 2, 3, 4 ] } isNullOrEmpty => false * <br/>{ [ ] } isNullOrEmpty => true * <br/>{ } isNullOrEmpty => true * @param collection The collection to operate on. * @return True if the {@link Collection} is null or contains zero elements. */ Boolean isNullOrEmpty(final Collection<T> collection); /** * Checks if all elements meet the condition specified in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } all(a >= 0) => true * <br/>{ [ 1, 2, 3, 4 ] } all(a >= 2) => false * @param collection The collection to operate on. * @param comparator Defines the condition to be met for each element. * @return True if all elements meet the condition specified in the {@link Collection}. */ Boolean all(final Collection<T> collection, final WhereComparator<T> comparator); /** * Checks if any element meet the condition specified in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } any(a >= 2) => true * <br/>{ [ 1, 2, 3, 4 ] } any(a >= 200) => false * @param collection The collection to operate on. * @param comparator Defines the condition to be met for each element. * @return True if any element meet the condition specified in the {@link Collection}. */ Boolean any(final Collection<T> collection, final WhereComparator<T> comparator); /** * Checks if any elements are in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } any => true * <br/>{ [ ] } any => false * @param collection The collection to operate on. * @return True if any elements are in the {@link Collection}. */ Boolean any(final Collection<T> collection); /** * Checks if no elements meet the condition specified in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } none(a >= 2) => false * <br/>{ [ 1, 2, 3, 4 ] } none(a >= 200) => true * @param collection The collection to operate on. * @param comparator Defines the condition to be met for each element. * @return True if no elements meet the condition specified in the {@link Collection}. */ Boolean none(final Collection<T> collection, final WhereComparator<T> comparator); /** * Checks if any elements are in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } none => false * <br/>{ [ ] } none => true * @param collection The collection to operate on. * @return True if no elements are in the {@link Collection}. */ Boolean none(final Collection<T> collection); /** * Counts how many elements meet the condition specified in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } count(a >= 2) => 3 * <br/>{ [ 1, 2, 3, 4 ] } count(a >= 200) => 0 * @param collection The collection to operate on. * @param comparator Defines the condition to be met for each element. * @return How many elements meet the condition specified in the {@link Collection}. */ Integer count(final Collection<T> collection, final WhereComparator<T> comparator); /** * Counts how many elements are in the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } count => 4 * @param collection The collection to operate on. * @return How many elements are in the {@link Collection}. */ Integer count(final Collection<T> collection); //access /** * <br/>{ [ 1, 2, 3, 4 ] } first => { 1 } * <br/>{ [ ] } first => NoSuchElementException * <br/>{ } first => NoSuchElementException * @param collection The collection to operate on. * @return The first element from the collection. * @throws NoSuchElementException If no matching element found. */ T first(final Collection<T> collection); /** * <br/>{ [ 1, 2, 3, 4 ] } firstOrNull => { 1 } * <br/>{ [ ] } firstOrNull => { null } * @param collection The collection to operate on. * @return The first element of the {@link Collection} or null if the {@link Collection} is empty. */ T firstOrNull(final Collection<T> collection); /** * Selects {@link Collection} for the first element that meets the condition. * <br/>{ [ 1, 2, 3, 4 ] } first(a > 2) => { 3 } * <br/>{ [ 1, 2, 3, 4 ] } first(a > 200) => NoSuchElementException * <br/>{ [ ] } first(a > 2) => NoSuchElementException * @param collection The collection to operate on. * @param comparator Defines the condition to be met for the element to be found. * @return The first element that meets the condition specified. * @throws NoSuchElementException If no matching element found. */ T first(final Collection<T> collection, final WhereComparator<T> comparator); /** * Selects {@link Collection} for the first element that meets the condition, and returns null if no match found. * <br/>{ [ 1, 2, 3, 4 ] } firstOrNull(a > 2) => { 3 } * <br/>{ [ 1, 2, 3, 4 ] } firstOrNull(a > 200) => { null } * <br/>{ [ ] } firstOrNull(a > 2) => { null } * @param collection The collection to operate on. * @param comparator Defines the condition to be met for the element to be found. * @return The first element that meets the condition specified, or null if no match found. */ T firstOrNull(final Collection<T> collection, final WhereComparator<T> comparator); /** * <br/>{ [ 1, 2, 3, 4 ] } last => { 4 } * <br/>{ [ ] } last => NoSuchElementException * <br/>{ } last => NoSuchElementException * @param collection The collection to operate on. * @return The last element from the collection. * @throws NoSuchElementException If no matching element found. */ T last(final Collection<T> collection); /** * <br/>{ [ 1, 2, 3, 4 ] } lastOrNull => { 4 } * <br/>{ [ ] } lastOrNull => { null } * @param collection The collection to operate on. * @return The last element of the {@link Collection} or null if the {@link Collection} is empty. */ T lastOrNull(final Collection<T> collection); /** * Selects {@link Collection} for the last element that meets the condition. * <br/>{ [ 1, 2, 3, 4 ] } last(a > 2) => { 3 } * <br/>{ [ 1, 2, 3, 4 ] } last(a > 200) => NoSuchElementException * <br/>{ [ ] } last(a > 2) => NoSuchElementException * <br/>{ } last(a > 2) => NoSuchElementException * @param collection The collection to operate on. * @param comparator Defines the condition to be met for the element to be found. * @return The last element that meets the condition specified. * @throws NoSuchElementException If no matching element found. */ T last(final Collection<T> collection, final WhereComparator<T> comparator); /** * Selects {@link Collection} for the first element that meets the condition, and returns null if no match found. * <br/>{ [ 1, 2, 3, 4 ] } lastOrNull(a > 2) => { 3 } * <br/>{ [ 1, 2, 3, 4 ] } lastOrNull(a > 200) => { null } * <br/>{ [ ] } lastOrNull(a > 2) => { null } * @param collection The collection to operate on. * @param comparator Defines the condition to be met for the element to be found. * @return The last element that meets the condition specified, or null if no match found. */ T lastOrNull(final Collection<T> collection, final WhereComparator<T> comparator); /** * <br/>{ [ 1, 2, 3, 4 ] } at(2) => { 3 } * @param collection The collection to operate on. * @param index * @return The element from the collection at the given index. */ T at(final Collection<T> collection, final int index); /** * Skips the given number of elements in the {@link Collection} and returns the rest in a new {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } skip(2) => { [ 3, 4 ] } * @param collection The collection to operate on. * @return A new {@link Collection} containing all elements with the specified number removed from the front of the array. */ Collection<T> skip(final Collection<T> collection, final int numberToSkip); /** * Takes the given number of elements from the front of the {@link Collection}. * <br/>{ [ 1, 2, 3, 4 ] } take(2) => { [ 1, 2 ] } * @param collection The collection to operate on. * @return A new {@link Collection} containing the given number of elements from the front of the {@link Collection}. */ Collection<T> take(final Collection<T> collection, final int numberToTake); }
/* * Copyright 2019 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns.time; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.errorprone.BugPattern.SeverityLevel.ERROR; import static com.google.errorprone.matchers.Matchers.anyOf; import static com.google.errorprone.matchers.Matchers.packageStartsWith; import static com.google.errorprone.matchers.method.MethodMatchers.instanceMethod; import static com.google.errorprone.matchers.method.MethodMatchers.staticMethod; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.DAY_OF_WEEK; import static java.time.temporal.ChronoField.DAY_OF_YEAR; import static java.time.temporal.ChronoField.EPOCH_DAY; import static java.time.temporal.ChronoField.HOUR_OF_DAY; import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import static java.time.temporal.ChronoField.NANO_OF_DAY; import static java.time.temporal.ChronoField.NANO_OF_SECOND; import static java.time.temporal.ChronoField.SECOND_OF_DAY; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; import static java.time.temporal.ChronoField.YEAR; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.errorprone.BugPattern; import com.google.errorprone.VisitorState; import com.google.errorprone.bugpatterns.BugChecker; import com.google.errorprone.bugpatterns.BugChecker.MethodInvocationTreeMatcher; import com.google.errorprone.matchers.Description; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.util.ASTHelpers; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.tools.javac.code.Type; import java.time.DateTimeException; import java.time.temporal.ChronoField; import java.util.Arrays; import java.util.List; /** * This checker errors on calls to {@code java.time} methods using values that are guaranteed to * throw a {@link DateTimeException}. * * @author kak@google.com (Kurt Alfred Kluever) */ @BugPattern( name = "InvalidJavaTimeConstant", summary = "This checker errors on calls to java.time methods using values that are guaranteed to " + "throw a DateTimeException.", severity = ERROR) public final class InvalidJavaTimeConstant extends BugChecker implements MethodInvocationTreeMatcher { @AutoValue abstract static class MatcherWithUnits { abstract Matcher<ExpressionTree> matcher(); abstract ImmutableList<ChronoField> units(); } @AutoValue abstract static class Param { abstract String type(); abstract ChronoField unit(); } private static Param intP(ChronoField unit) { return new AutoValue_InvalidJavaTimeConstant_Param("int", unit); } private static Param longP(ChronoField unit) { return new AutoValue_InvalidJavaTimeConstant_Param("long", unit); } private static Param monthP(ChronoField unit) { return new AutoValue_InvalidJavaTimeConstant_Param("java.time.Month", unit); } @AutoValue abstract static class JavaTimeType { abstract String className(); abstract ImmutableList<MatcherWithUnits> methods(); public static Builder builder() { return new AutoValue_InvalidJavaTimeConstant_JavaTimeType.Builder(); } @AutoValue.Builder public abstract static class Builder { public abstract Builder setClassName(String className); abstract String className(); abstract ImmutableList.Builder<MatcherWithUnits> methodsBuilder(); public Builder addStaticMethod(String methodName, Param... params) { methodsBuilder() .add( new AutoValue_InvalidJavaTimeConstant_MatcherWithUnits( staticMethod() .onClass(className()) .named(methodName) .withParameters(getParameterTypes(params)), getParameterUnits(params))); return this; } public Builder addInstanceMethod(String methodName, Param... params) { methodsBuilder() .add( new AutoValue_InvalidJavaTimeConstant_MatcherWithUnits( instanceMethod() .onExactClass(className()) .named(methodName) .withParameters(getParameterTypes(params)), getParameterUnits(params))); return this; } private static ImmutableList<ChronoField> getParameterUnits(Param... params) { return Arrays.stream(params).map(p -> p.unit()).collect(toImmutableList()); } private static String[] getParameterTypes(Param... params) { return Arrays.stream(params).map(p -> p.type()).toArray(String[]::new); } public abstract JavaTimeType build(); } } private static final JavaTimeType DAY_OF_WEEK_APIS = JavaTimeType.builder() .setClassName("java.time.DayOfWeek") .addStaticMethod("of", intP(DAY_OF_WEEK)) .build(); private static final JavaTimeType MONTH_APIS = JavaTimeType.builder() .setClassName("java.time.Month") .addStaticMethod("of", intP(MONTH_OF_YEAR)) .build(); private static final JavaTimeType YEAR_APIS = JavaTimeType.builder() .setClassName("java.time.Year") .addStaticMethod("of", intP(YEAR)) .addInstanceMethod("atDay", intP(DAY_OF_YEAR)) .addInstanceMethod("atMonth", intP(MONTH_OF_YEAR)) .build(); private static final JavaTimeType YEAR_MONTH_APIS = JavaTimeType.builder() .setClassName("java.time.YearMonth") .addStaticMethod("of", intP(YEAR), intP(MONTH_OF_YEAR)) .addStaticMethod("of", intP(YEAR), monthP(MONTH_OF_YEAR)) .addInstanceMethod("atDay", intP(DAY_OF_MONTH)) .addInstanceMethod("withMonth", intP(MONTH_OF_YEAR)) .addInstanceMethod("withYear", intP(YEAR)) .build(); private static final JavaTimeType MONTH_DAY_APIS = JavaTimeType.builder() .setClassName("java.time.MonthDay") .addStaticMethod("of", intP(MONTH_OF_YEAR), intP(DAY_OF_MONTH)) .addStaticMethod("of", monthP(MONTH_OF_YEAR), intP(DAY_OF_MONTH)) .addInstanceMethod("atYear", intP(YEAR)) .addInstanceMethod("withDayOfMonth", intP(DAY_OF_MONTH)) .addInstanceMethod("withMonth", intP(MONTH_OF_YEAR)) .build(); private static final JavaTimeType LOCAL_TIME_APIS = JavaTimeType.builder() .setClassName("java.time.LocalTime") .addStaticMethod("of", intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR)) .addStaticMethod("of", intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR), intP(SECOND_OF_MINUTE)) .addStaticMethod( "of", intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR), intP(SECOND_OF_MINUTE), intP(NANO_OF_SECOND)) .addStaticMethod("ofNanoOfDay", longP(NANO_OF_DAY)) .addStaticMethod("ofSecondOfDay", longP(SECOND_OF_DAY)) .addInstanceMethod("withHour", intP(HOUR_OF_DAY)) .addInstanceMethod("withMinute", intP(MINUTE_OF_HOUR)) .addInstanceMethod("withNano", intP(NANO_OF_SECOND)) .addInstanceMethod("withSecond", intP(SECOND_OF_MINUTE)) .build(); private static final JavaTimeType LOCAL_DATE_APIS = JavaTimeType.builder() .setClassName("java.time.LocalDate") .addStaticMethod("of", intP(YEAR), intP(MONTH_OF_YEAR), intP(DAY_OF_MONTH)) .addStaticMethod("of", intP(YEAR), monthP(MONTH_OF_YEAR), intP(DAY_OF_MONTH)) .addStaticMethod("ofEpochDay", longP(EPOCH_DAY)) .addStaticMethod("ofYearDay", intP(YEAR), intP(DAY_OF_YEAR)) .addInstanceMethod("withDayOfMonth", intP(DAY_OF_MONTH)) .addInstanceMethod("withDayOfYear", intP(DAY_OF_YEAR)) .addInstanceMethod("withMonth", intP(MONTH_OF_YEAR)) .addInstanceMethod("withYear", intP(YEAR)) .build(); private static final JavaTimeType LOCAL_DATE_TIME_APIS = JavaTimeType.builder() .setClassName("java.time.LocalDateTime") .addStaticMethod( "of", intP(YEAR), intP(MONTH_OF_YEAR), intP(DAY_OF_MONTH), intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR)) .addStaticMethod( "of", intP(YEAR), monthP(MONTH_OF_YEAR), intP(DAY_OF_MONTH), intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR)) .addStaticMethod( "of", intP(YEAR), intP(MONTH_OF_YEAR), intP(DAY_OF_MONTH), intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR), intP(SECOND_OF_MINUTE)) .addStaticMethod( "of", intP(YEAR), monthP(MONTH_OF_YEAR), intP(DAY_OF_MONTH), intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR), intP(SECOND_OF_MINUTE)) .addStaticMethod( "of", intP(YEAR), intP(MONTH_OF_YEAR), intP(DAY_OF_MONTH), intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR), intP(SECOND_OF_MINUTE), intP(NANO_OF_SECOND)) .addStaticMethod( "of", intP(YEAR), monthP(MONTH_OF_YEAR), intP(DAY_OF_MONTH), intP(HOUR_OF_DAY), intP(MINUTE_OF_HOUR), intP(SECOND_OF_MINUTE), intP(NANO_OF_SECOND)) .addInstanceMethod("withDayOfMonth", intP(DAY_OF_MONTH)) .addInstanceMethod("withDayOfYear", intP(DAY_OF_YEAR)) .addInstanceMethod("withHour", intP(HOUR_OF_DAY)) .addInstanceMethod("withMinute", intP(MINUTE_OF_HOUR)) .addInstanceMethod("withMonth", intP(MONTH_OF_YEAR)) .addInstanceMethod("withNano", intP(NANO_OF_SECOND)) .addInstanceMethod("withSecond", intP(SECOND_OF_MINUTE)) .addInstanceMethod("withYear", intP(YEAR)) .build(); private static final ImmutableMap<String, JavaTimeType> APIS = Maps.uniqueIndex( ImmutableList.of( LOCAL_TIME_APIS, LOCAL_DATE_APIS, LOCAL_DATE_TIME_APIS, DAY_OF_WEEK_APIS, MONTH_APIS, YEAR_APIS, MONTH_DAY_APIS, YEAR_MONTH_APIS), JavaTimeType::className); private static final Matcher<ExpressionTree> JAVA_MATCHER = anyOf(packageStartsWith("java."), packageStartsWith("tck.java.")); @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { // Allow the JDK to do whatever it wants (unit tests, etc.) if (JAVA_MATCHER.matches(tree, state)) { return Description.NO_MATCH; } // Get the receiver of the method invocation and make sure it's not null Type receiverType = ASTHelpers.getReceiverType(tree); if (receiverType == null) { return Description.NO_MATCH; } // If the receiver is not one of our java.time types, then return early JavaTimeType type = APIS.get(receiverType.toString()); if (type == null) { return Description.NO_MATCH; } // Otherwise, check the method matchers we have for that type for (MatcherWithUnits matcherWithUnits : type.methods()) { if (matcherWithUnits.matcher().matches(tree, state)) { List<? extends ExpressionTree> arguments = tree.getArguments(); for (int i = 0; i < arguments.size(); i++) { ExpressionTree argument = arguments.get(i); Number constant = ASTHelpers.constValue(argument, Number.class); if (constant != null) { try { matcherWithUnits.units().get(i).checkValidValue(constant.longValue()); } catch (DateTimeException invalid) { return buildDescription(argument).setMessage(invalid.getMessage()).build(); } } } // we short-circuit the loop here; only 1 method matcher will ever match, so there's no // sense in checking the rest of them return Description.NO_MATCH; } } return Description.NO_MATCH; } }
package org.wdrp.core.algorithm.td; import static org.junit.Assert.assertEquals; import java.util.Arrays; import org.junit.Test; import org.wdrp.core.model.TDArc; import org.wdrp.core.model.TDGraph; public class TDDijkstraAlgorithmTest extends TDTestBase { @Test public void testTDEdgeCost() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int cost = a.getEdgeCost(g1.getArc(0, 1), 0); assertEquals(cost, 4); cost = a.getEdgeCost(g1.getArc(0, 1), 1); assertEquals(cost, 5); cost = a.getEdgeCost(g1.getArc(0, 1), 2); assertEquals(cost, 6); cost = a.getEdgeCost(g1.getArc(0, 1), 3); assertEquals(cost, 7); cost = a.getEdgeCost(g1.getArc(0, 1), 4); assertEquals(cost, 8); cost = a.getEdgeCost(g1.getArc(0, 1), 5); assertEquals(cost, 5+5); cost = a.getEdgeCost(g1.getArc(0, 1), 6); assertEquals(cost, 5+6); cost = a.getEdgeCost(g1.getArc(0, 1), 10); assertEquals(cost, 10+9); cost = a.getEdgeCost(g1.getArc(0, 1), 19); assertEquals(cost, 19+4); cost = a.getEdgeCost(g1.getArc(0, 1), 20); assertEquals(cost, Integer.MAX_VALUE); } @Test public void testTDEdgeCost1() { TDGraph tdg = new TDGraph(60,2); tdg.addNode(0); tdg.addNode(1); int[] costs = {10,20}; tdg.addEdge(0, new TDArc(1, costs)); TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(tdg); int cost; cost = a.getEdgeCost(tdg.getArc(0, 1), 0); assertEquals(10, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 30); assertEquals(40, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 59); assertEquals(69, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 60); assertEquals(80, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 90); assertEquals(110, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 119); assertEquals(139, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 120); assertEquals(Integer.MAX_VALUE, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 130); assertEquals(Integer.MAX_VALUE, cost); cost = a.getEdgeCost(tdg.getArc(0, 1), 180); assertEquals(Integer.MAX_VALUE, cost); } @Test public void computeSPSourceSourceValidDepartureTime() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,0,0); assertEquals(arrivalTime,0); } @Test public void computeSPSourceSourceInvalidDepartureTime() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,0,200); assertEquals(arrivalTime, 200); } @Test public void computeSPSourceTargetValidDepartureTime() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,1,0); assertEquals(arrivalTime, 4); assertEquals(a.getVisitedNodes().toString(), "{1, 0}"); } @Test public void computeSPSourceTargetInvalidDepartureTime() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,1,100); assertEquals(arrivalTime,-1); } @Test public void computeSPSourceTargetLateDepartureTime() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,1,19); assertEquals(arrivalTime,23); } @Test public void computeSPSourceTargetValidDepartureTime2() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,2,0); assertEquals(arrivalTime,7); } @Test public void computeSPSourceTargetValidDepartureTime3() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,2,1); assertEquals(arrivalTime,9); } @Test public void computeSPSourceTargetValidDepartureTime4() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,2,4); assertEquals(arrivalTime,12); } @Test public void computeSPSourceTargetValidDepartureTime5() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,5,0); assertEquals(arrivalTime,19); } @Test public void computeSPSourceTargetValidDepartureTime6() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,5,1); assertEquals(arrivalTime,25); } @Test public void computeSPSourceTargetValidDepartureTime7() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,5,2); assertEquals(arrivalTime,25); } @Test public void computeSPSourceTargetValidDepartureTime8() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,5,3); assertEquals(arrivalTime,26); } @Test public void computeSPSourceTargetValidDepartureTime9() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int arrivalTime = a.computeEarliestArrivalTime(0,5,5); assertEquals(arrivalTime,-1); } @Test public void computeEATimes() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int[] eaTimes = a.computeEarliestArrivalTimes(0, 5, 0, 3); assertEquals(Arrays.toString(eaTimes), "[19, 25, 25, 26]"); } @Test public void computeBestDepartureTime1() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int bestDepartureTime = a.computeDepartureTime(0, 5, 0, 5); assertEquals(bestDepartureTime, 0); } @Test public void computeBestDepartureTime2() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int bestDepartureTime = a.computeDepartureTime(0, 5, 1, 3); assertEquals(bestDepartureTime, 2); } @Test public void computeBestDepartureTime3() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g1); int bestDepartureTime = a.computeDepartureTime(0, 5, 2, 3); assertEquals(bestDepartureTime, 2); } @Test public void computeEATimeOnTwoMinGraph() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(tdGraphTwoMin); int eaTime; eaTime = a.computeEarliestArrivalTime(0, 0, 0); assertEquals(eaTime, 0); eaTime = a.computeEarliestArrivalTime(0, 0, 1); assertEquals(eaTime, 60); eaTime = a.computeEarliestArrivalTime(0, 1, 0); assertEquals(eaTime, 10); eaTime = a.computeEarliestArrivalTime(0, 1, 1); assertEquals(eaTime, 70); eaTime = a.computeEarliestArrivalTime(0, 2, 0); assertEquals(eaTime, 20); eaTime = a.computeEarliestArrivalTime(0, 2, 1); assertEquals(eaTime, -1); eaTime = a.computeEarliestArrivalTime(1, 2, 0); assertEquals(eaTime, 10); eaTime = a.computeEarliestArrivalTime(1, 2, 1); assertEquals(eaTime, -1); eaTime = a.computeEarliestArrivalTime(0, 3, 0); assertEquals(eaTime, 15); eaTime = a.computeEarliestArrivalTime(0, 3, 1); assertEquals(eaTime, 75); eaTime = a.computeEarliestArrivalTime(0, 7, 0); assertEquals(eaTime, -1); eaTime = a.computeTravelTime(8, 9, 0); assertEquals(eaTime, 20); eaTime = a.computeTravelTime(8, 9, 1); assertEquals(eaTime, 30); } @Test public void testComputeTravelTime() { TDDijkstraAlgorithm a = new TDDijkstraAlgorithm(g2); int eaTime; eaTime = a.computeTravelTime(0, 5, 0); assertEquals(eaTime, 170); } }
package org.jboss.as.quickstarts.hellosport.rest; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.PUT; import javax.ws.rs.DELETE; import javax.ws.rs.POST; import javax.ws.rs.Path; import java.lang.annotation.Annotation; import java.util.List; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import javax.persistence.Query; import javax.naming.InitialContext; import javax.transaction.UserTransaction; import org.jboss.as.quickstarts.hellosport.model.Player; import org.jboss.as.quickstarts.hellosport.model.Team; import org.jboss.as.quickstarts.hellosport.rest.SportService; //Dummy comment @Path("/NFL") public class NflService implements SportService{ @SportService(ServiceType.NFL) @PersistenceContext(unitName="NFL", type=PersistenceContextType.EXTENDED) private EntityManager emNfl; @GET() @Path("players") @Produces("application/json") @SportService(ServiceType.NFL) public List<Player> listPlayers() { Query query = emNfl.createQuery("FROM org.jboss.as.quickstarts.hellosport.model.Player order by last_name, first_name"); @SuppressWarnings("unchecked") List <Player> player = query.getResultList(); return player; } @GET() @Path("teams") @Produces("application/json") @SportService(ServiceType.NFL) public List<Team> listTeams() { Query query = emNfl.createQuery("FROM org.jboss.as.quickstarts.hellosport.model.Team order by conference, division, city, name"); @SuppressWarnings("unchecked") List <Team> team = query.getResultList(); return team; } @GET() @Path("team/{teamId}") @Produces("application/json") @SportService(ServiceType.NFL) public List<Player> listTeam(@PathParam("teamId") int iTeamId) { Query query = emNfl.createQuery("FROM org.jboss.as.quickstarts.hellosport.model.Player where team_id = ?1 order by last_name, first_name"); query.setParameter(1,iTeamId); @SuppressWarnings("unchecked") List <Player> player = query.getResultList(); return player; } @PUT() @Path("trade/{playerId}/newteam/{teamId}") @Produces("application/json") @SportService(ServiceType.MLB) public Player tradePlayer(@PathParam("playerId") int iPlayerId, @PathParam("teamId") int iTeamId) { Player p = new Player(); UserTransaction utx = null; try { p = emNfl.find(Player.class, iPlayerId); System.out.println("BEFORE: Player id : "+p.getId()+" Player name = "+p.getFirstName()+" "+p.getLastName()+" Position = "+p.getPosition()+" team ID = "+p.getTeamId()); p.setTeamId(iTeamId); utx = (UserTransaction) new InitialContext().lookup("java:comp/UserTransaction"); utx.begin(); emNfl.merge(p); emNfl.flush(); utx.commit(); System.out.println("AFTER: Player id : "+p.getId()+" Player name = "+p.getFirstName()+" "+p.getLastName()+" Position = "+p.getPosition()+" team ID = "+p.getTeamId()); } catch (Exception e) { if (utx == null) {} else { try { utx.rollback(); } catch (Exception ex) { System.out.println("Exception = "+ex.getMessage()); } } System.out.println("Exception = "+e.getMessage()); } return p; } @DELETE() @Path("release/{playerId}") @Produces("application/json") @SportService(ServiceType.MLB) public Player releasePlayer(@PathParam("playerId") int iPlayerId) { Player p = new Player(); UserTransaction utx = null; try { p = emNfl.find(Player.class, iPlayerId); System.out.println("BEFORE: Player id : "+p.getId()+" Player name = "+p.getFirstName()+" "+p.getLastName()+" Position = "+p.getPosition()+" team ID = "+p.getTeamId()); utx = (UserTransaction) new InitialContext().lookup("java:comp/UserTransaction"); utx.begin(); emNfl.remove(emNfl.contains(p) ? p : emNfl.merge(p)); emNfl.flush(); utx.commit(); System.out.println("Removed: Player id : "+p.getId()+" Player name = "+p.getFirstName()+" "+p.getLastName()+" Position = "+p.getPosition()+" team ID = "+p.getTeamId()); } catch (Exception e) { if (utx == null) {} else { try { utx.rollback(); } catch (Exception ex) { System.out.println("Exception = "+ex.getMessage()); } } System.out.println("Exception = "+e.getMessage()); } return p; } @POST() @Path("add") @Consumes("application/json") @Produces("application/json") @SportService(ServiceType.MLB) public Player addPlayer(Player p) { UserTransaction utx = null; p.setId(0); try { System.out.println("INPUT: Player id : "+p.getId()+" Player name = "+p.getFirstName()+" "+p.getLastName()+" Position = "+p.getPosition()+" team ID = "+p.getTeamId()); utx = (UserTransaction) new InitialContext().lookup("java:comp/UserTransaction"); utx.begin(); p = emNfl.merge(p); emNfl.flush(); utx.commit(); System.out.println("ADDED: Player id : "+p.getId()+" Player name = "+p.getFirstName()+" "+p.getLastName()+" Position = "+p.getPosition()+" team ID = "+p.getTeamId()); } catch (Exception e) { if (utx == null) {} else { try { utx.rollback(); } catch (Exception ex) { System.out.println("Exception = "+ex.getMessage()); } } System.out.println("Exception = "+e.getMessage()); } return p; } @Override public Class<? extends Annotation> annotationType() { // TODO Auto-generated method stub return null; } @Override public ServiceType value() { // TODO Auto-generated method stub return null; } }
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model.tables.records; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) @javax.persistence.Entity @javax.persistence.Table(name = "instance", schema = "cattle") public class InstanceRecord extends org.jooq.impl.UpdatableRecordImpl<io.cattle.platform.core.model.tables.records.InstanceRecord> implements io.cattle.platform.db.jooq.utils.TableRecordJaxb, io.cattle.platform.core.model.Instance { private static final long serialVersionUID = -79467212; /** * Setter for <code>cattle.instance.id</code>. */ @Override public void setId(java.lang.Long value) { setValue(0, value); } /** * Getter for <code>cattle.instance.id</code>. */ @javax.persistence.Id @javax.persistence.Column(name = "id", unique = true, nullable = false, precision = 19) @Override public java.lang.Long getId() { return (java.lang.Long) getValue(0); } /** * Setter for <code>cattle.instance.name</code>. */ @Override public void setName(java.lang.String value) { setValue(1, value); } /** * Getter for <code>cattle.instance.name</code>. */ @javax.persistence.Column(name = "name", length = 255) @Override public java.lang.String getName() { return (java.lang.String) getValue(1); } /** * Setter for <code>cattle.instance.account_id</code>. */ @Override public void setAccountId(java.lang.Long value) { setValue(2, value); } /** * Getter for <code>cattle.instance.account_id</code>. */ @javax.persistence.Column(name = "account_id", precision = 19) @Override public java.lang.Long getAccountId() { return (java.lang.Long) getValue(2); } /** * Setter for <code>cattle.instance.kind</code>. */ @Override public void setKind(java.lang.String value) { setValue(3, value); } /** * Getter for <code>cattle.instance.kind</code>. */ @javax.persistence.Column(name = "kind", nullable = false, length = 255) @Override public java.lang.String getKind() { return (java.lang.String) getValue(3); } /** * Setter for <code>cattle.instance.uuid</code>. */ @Override public void setUuid(java.lang.String value) { setValue(4, value); } /** * Getter for <code>cattle.instance.uuid</code>. */ @javax.persistence.Column(name = "uuid", unique = true, nullable = false, length = 128) @Override public java.lang.String getUuid() { return (java.lang.String) getValue(4); } /** * Setter for <code>cattle.instance.description</code>. */ @Override public void setDescription(java.lang.String value) { setValue(5, value); } /** * Getter for <code>cattle.instance.description</code>. */ @javax.persistence.Column(name = "description", length = 1024) @Override public java.lang.String getDescription() { return (java.lang.String) getValue(5); } /** * Setter for <code>cattle.instance.state</code>. */ @Override public void setState(java.lang.String value) { setValue(6, value); } /** * Getter for <code>cattle.instance.state</code>. */ @javax.persistence.Column(name = "state", nullable = false, length = 128) @Override public java.lang.String getState() { return (java.lang.String) getValue(6); } /** * Setter for <code>cattle.instance.created</code>. */ @Override public void setCreated(java.util.Date value) { setValue(7, value); } /** * Getter for <code>cattle.instance.created</code>. */ @javax.persistence.Column(name = "created") @Override public java.util.Date getCreated() { return (java.util.Date) getValue(7); } /** * Setter for <code>cattle.instance.removed</code>. */ @Override public void setRemoved(java.util.Date value) { setValue(8, value); } /** * Getter for <code>cattle.instance.removed</code>. */ @javax.persistence.Column(name = "removed") @Override public java.util.Date getRemoved() { return (java.util.Date) getValue(8); } /** * Setter for <code>cattle.instance.remove_time</code>. */ @Override public void setRemoveTime(java.util.Date value) { setValue(9, value); } /** * Getter for <code>cattle.instance.remove_time</code>. */ @javax.persistence.Column(name = "remove_time") @Override public java.util.Date getRemoveTime() { return (java.util.Date) getValue(9); } /** * Setter for <code>cattle.instance.data</code>. */ @Override public void setData(java.util.Map<String,Object> value) { setValue(10, value); } /** * Getter for <code>cattle.instance.data</code>. */ @javax.persistence.Column(name = "data", length = 16777215) @Override public java.util.Map<String,Object> getData() { return (java.util.Map<String,Object>) getValue(10); } /** * Setter for <code>cattle.instance.allocation_state</code>. */ @Override public void setAllocationState(java.lang.String value) { setValue(11, value); } /** * Getter for <code>cattle.instance.allocation_state</code>. */ @javax.persistence.Column(name = "allocation_state", length = 255) @Override public java.lang.String getAllocationState() { return (java.lang.String) getValue(11); } /** * Setter for <code>cattle.instance.compute</code>. */ @Override public void setCompute(java.lang.Long value) { setValue(12, value); } /** * Getter for <code>cattle.instance.compute</code>. */ @javax.persistence.Column(name = "compute", precision = 19) @Override public java.lang.Long getCompute() { return (java.lang.Long) getValue(12); } /** * Setter for <code>cattle.instance.memory_mb</code>. */ @Override public void setMemoryMb(java.lang.Long value) { setValue(13, value); } /** * Getter for <code>cattle.instance.memory_mb</code>. */ @javax.persistence.Column(name = "memory_mb", precision = 19) @Override public java.lang.Long getMemoryMb() { return (java.lang.Long) getValue(13); } /** * Setter for <code>cattle.instance.image_id</code>. */ @Override public void setImageId(java.lang.Long value) { setValue(14, value); } /** * Getter for <code>cattle.instance.image_id</code>. */ @javax.persistence.Column(name = "image_id", precision = 19) @Override public java.lang.Long getImageId() { return (java.lang.Long) getValue(14); } /** * Setter for <code>cattle.instance.offering_id</code>. */ @Override public void setOfferingId(java.lang.Long value) { setValue(15, value); } /** * Getter for <code>cattle.instance.offering_id</code>. */ @javax.persistence.Column(name = "offering_id", precision = 19) @Override public java.lang.Long getOfferingId() { return (java.lang.Long) getValue(15); } /** * Setter for <code>cattle.instance.hostname</code>. */ @Override public void setHostname(java.lang.String value) { setValue(16, value); } /** * Getter for <code>cattle.instance.hostname</code>. */ @javax.persistence.Column(name = "hostname", length = 255) @Override public java.lang.String getHostname() { return (java.lang.String) getValue(16); } /** * Setter for <code>cattle.instance.zone_id</code>. */ @Override public void setZoneId(java.lang.Long value) { setValue(17, value); } /** * Getter for <code>cattle.instance.zone_id</code>. */ @javax.persistence.Column(name = "zone_id", precision = 19) @Override public java.lang.Long getZoneId() { return (java.lang.Long) getValue(17); } /** * Setter for <code>cattle.instance.instance_triggered_stop</code>. */ @Override public void setInstanceTriggeredStop(java.lang.String value) { setValue(18, value); } /** * Getter for <code>cattle.instance.instance_triggered_stop</code>. */ @javax.persistence.Column(name = "instance_triggered_stop", length = 128) @Override public java.lang.String getInstanceTriggeredStop() { return (java.lang.String) getValue(18); } /** * Setter for <code>cattle.instance.agent_id</code>. */ @Override public void setAgentId(java.lang.Long value) { setValue(19, value); } /** * Getter for <code>cattle.instance.agent_id</code>. */ @javax.persistence.Column(name = "agent_id", precision = 19) @Override public java.lang.Long getAgentId() { return (java.lang.Long) getValue(19); } /** * Setter for <code>cattle.instance.domain</code>. */ @Override public void setDomain(java.lang.String value) { setValue(20, value); } /** * Getter for <code>cattle.instance.domain</code>. */ @javax.persistence.Column(name = "domain", length = 128) @Override public java.lang.String getDomain() { return (java.lang.String) getValue(20); } /** * Setter for <code>cattle.instance.first_running</code>. */ @Override public void setFirstRunning(java.util.Date value) { setValue(21, value); } /** * Getter for <code>cattle.instance.first_running</code>. */ @javax.persistence.Column(name = "first_running") @Override public java.util.Date getFirstRunning() { return (java.util.Date) getValue(21); } /** * Setter for <code>cattle.instance.token</code>. */ @Override public void setToken(java.lang.String value) { setValue(22, value); } /** * Getter for <code>cattle.instance.token</code>. */ @javax.persistence.Column(name = "token", length = 255) @Override public java.lang.String getToken() { return (java.lang.String) getValue(22); } /** * Setter for <code>cattle.instance.userdata</code>. */ @Override public void setUserdata(java.lang.String value) { setValue(23, value); } /** * Getter for <code>cattle.instance.userdata</code>. */ @javax.persistence.Column(name = "userdata", length = 65535) @Override public java.lang.String getUserdata() { return (java.lang.String) getValue(23); } /** * Setter for <code>cattle.instance.system_container</code>. */ @Override public void setSystemContainer(java.lang.String value) { setValue(24, value); } /** * Getter for <code>cattle.instance.system_container</code>. */ @javax.persistence.Column(name = "system_container", length = 128) @Override public java.lang.String getSystemContainer() { return (java.lang.String) getValue(24); } /** * Setter for <code>cattle.instance.registry_credential_id</code>. */ @Override public void setRegistryCredentialId(java.lang.Long value) { setValue(25, value); } /** * Getter for <code>cattle.instance.registry_credential_id</code>. */ @javax.persistence.Column(name = "registry_credential_id", precision = 19) @Override public java.lang.Long getRegistryCredentialId() { return (java.lang.Long) getValue(25); } /** * Setter for <code>cattle.instance.external_id</code>. */ @Override public void setExternalId(java.lang.String value) { setValue(26, value); } /** * Getter for <code>cattle.instance.external_id</code>. */ @javax.persistence.Column(name = "external_id", length = 128) @Override public java.lang.String getExternalId() { return (java.lang.String) getValue(26); } /** * Setter for <code>cattle.instance.native_container</code>. */ @Override public void setNativeContainer(java.lang.Boolean value) { setValue(27, value); } /** * Getter for <code>cattle.instance.native_container</code>. */ @javax.persistence.Column(name = "native_container", nullable = false, precision = 1) @Override public java.lang.Boolean getNativeContainer() { return (java.lang.Boolean) getValue(27); } /** * Setter for <code>cattle.instance.network_container_id</code>. */ @Override public void setNetworkContainerId(java.lang.Long value) { setValue(28, value); } /** * Getter for <code>cattle.instance.network_container_id</code>. */ @javax.persistence.Column(name = "network_container_id", precision = 19) @Override public java.lang.Long getNetworkContainerId() { return (java.lang.Long) getValue(28); } /** * Setter for <code>cattle.instance.health_state</code>. */ @Override public void setHealthState(java.lang.String value) { setValue(29, value); } /** * Getter for <code>cattle.instance.health_state</code>. */ @javax.persistence.Column(name = "health_state", length = 128) @Override public java.lang.String getHealthState() { return (java.lang.String) getValue(29); } /** * Setter for <code>cattle.instance.start_count</code>. */ @Override public void setStartCount(java.lang.Long value) { setValue(30, value); } /** * Getter for <code>cattle.instance.start_count</code>. */ @javax.persistence.Column(name = "start_count", precision = 19) @Override public java.lang.Long getStartCount() { return (java.lang.Long) getValue(30); } /** * Setter for <code>cattle.instance.create_index</code>. */ @Override public void setCreateIndex(java.lang.Long value) { setValue(31, value); } /** * Getter for <code>cattle.instance.create_index</code>. */ @javax.persistence.Column(name = "create_index", precision = 19) @Override public java.lang.Long getCreateIndex() { return (java.lang.Long) getValue(31); } /** * Setter for <code>cattle.instance.deployment_unit_uuid</code>. */ @Override public void setDeploymentUnitUuid(java.lang.String value) { setValue(32, value); } /** * Getter for <code>cattle.instance.deployment_unit_uuid</code>. */ @javax.persistence.Column(name = "deployment_unit_uuid", length = 128) @Override public java.lang.String getDeploymentUnitUuid() { return (java.lang.String) getValue(32); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public org.jooq.Record1<java.lang.Long> key() { return (org.jooq.Record1) super.key(); } // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public void from(io.cattle.platform.core.model.Instance from) { setId(from.getId()); setName(from.getName()); setAccountId(from.getAccountId()); setKind(from.getKind()); setUuid(from.getUuid()); setDescription(from.getDescription()); setState(from.getState()); setCreated(from.getCreated()); setRemoved(from.getRemoved()); setRemoveTime(from.getRemoveTime()); setData(from.getData()); setAllocationState(from.getAllocationState()); setCompute(from.getCompute()); setMemoryMb(from.getMemoryMb()); setImageId(from.getImageId()); setOfferingId(from.getOfferingId()); setHostname(from.getHostname()); setZoneId(from.getZoneId()); setInstanceTriggeredStop(from.getInstanceTriggeredStop()); setAgentId(from.getAgentId()); setDomain(from.getDomain()); setFirstRunning(from.getFirstRunning()); setToken(from.getToken()); setUserdata(from.getUserdata()); setSystemContainer(from.getSystemContainer()); setRegistryCredentialId(from.getRegistryCredentialId()); setExternalId(from.getExternalId()); setNativeContainer(from.getNativeContainer()); setNetworkContainerId(from.getNetworkContainerId()); setHealthState(from.getHealthState()); setStartCount(from.getStartCount()); setCreateIndex(from.getCreateIndex()); setDeploymentUnitUuid(from.getDeploymentUnitUuid()); } /** * {@inheritDoc} */ @Override public <E extends io.cattle.platform.core.model.Instance> E into(E into) { into.from(this); return into; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached InstanceRecord */ public InstanceRecord() { super(io.cattle.platform.core.model.tables.InstanceTable.INSTANCE); } /** * Create a detached, initialised InstanceRecord */ public InstanceRecord(java.lang.Long id, java.lang.String name, java.lang.Long accountId, java.lang.String kind, java.lang.String uuid, java.lang.String description, java.lang.String state, java.util.Date created, java.util.Date removed, java.util.Date removeTime, java.util.Map<String,Object> data, java.lang.String allocationState, java.lang.Long compute, java.lang.Long memoryMb, java.lang.Long imageId, java.lang.Long offeringId, java.lang.String hostname, java.lang.Long zoneId, java.lang.String instanceTriggeredStop, java.lang.Long agentId, java.lang.String domain, java.util.Date firstRunning, java.lang.String token, java.lang.String userdata, java.lang.String systemContainer, java.lang.Long registryCredentialId, java.lang.String externalId, java.lang.Boolean nativeContainer, java.lang.Long networkContainerId, java.lang.String healthState, java.lang.Long startCount, java.lang.Long createIndex, java.lang.String deploymentUnitUuid) { super(io.cattle.platform.core.model.tables.InstanceTable.INSTANCE); setValue(0, id); setValue(1, name); setValue(2, accountId); setValue(3, kind); setValue(4, uuid); setValue(5, description); setValue(6, state); setValue(7, created); setValue(8, removed); setValue(9, removeTime); setValue(10, data); setValue(11, allocationState); setValue(12, compute); setValue(13, memoryMb); setValue(14, imageId); setValue(15, offeringId); setValue(16, hostname); setValue(17, zoneId); setValue(18, instanceTriggeredStop); setValue(19, agentId); setValue(20, domain); setValue(21, firstRunning); setValue(22, token); setValue(23, userdata); setValue(24, systemContainer); setValue(25, registryCredentialId); setValue(26, externalId); setValue(27, nativeContainer); setValue(28, networkContainerId); setValue(29, healthState); setValue(30, startCount); setValue(31, createIndex); setValue(32, deploymentUnitUuid); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.configuration; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.annotation.docs.Documentation; import org.apache.flink.configuration.description.Description; import static org.apache.flink.configuration.ConfigOptions.key; import static org.apache.flink.configuration.description.LinkElement.link; import static org.apache.flink.configuration.description.TextElement.text; /** * Configuration options for the JobManager. */ @PublicEvolving public class JobManagerOptions { /** * The config parameter defining the network address to connect to * for communication with the job manager. * * <p>This value is only interpreted in setups where a single JobManager with static * name or address exists (simple standalone setups, or container setups with dynamic * service name resolution). It is not used in many high-availability setups, when a * leader-election service (like ZooKeeper) is used to elect and discover the JobManager * leader from potentially multiple standby JobManagers. */ public static final ConfigOption<String> ADDRESS = key("jobmanager.rpc.address") .noDefaultValue() .withDescription("The config parameter defining the network address to connect to" + " for communication with the job manager." + " This value is only interpreted in setups where a single JobManager with static" + " name or address exists (simple standalone setups, or container setups with dynamic" + " service name resolution). It is not used in many high-availability setups, when a" + " leader-election service (like ZooKeeper) is used to elect and discover the JobManager" + " leader from potentially multiple standby JobManagers."); /** * The config parameter defining the network port to connect to * for communication with the job manager. * * <p>Like {@link JobManagerOptions#ADDRESS}, this value is only interpreted in setups where * a single JobManager with static name/address and port exists (simple standalone setups, * or container setups with dynamic service name resolution). * This config option is not used in many high-availability setups, when a * leader-election service (like ZooKeeper) is used to elect and discover the JobManager * leader from potentially multiple standby JobManagers. */ public static final ConfigOption<Integer> PORT = key("jobmanager.rpc.port") .defaultValue(6123) .withDescription("The config parameter defining the network port to connect to" + " for communication with the job manager." + " Like " + ADDRESS.key() + ", this value is only interpreted in setups where" + " a single JobManager with static name/address and port exists (simple standalone setups," + " or container setups with dynamic service name resolution)." + " This config option is not used in many high-availability setups, when a" + " leader-election service (like ZooKeeper) is used to elect and discover the JobManager" + " leader from potentially multiple standby JobManagers."); /** * JVM heap size for the JobManager with memory size. */ @Documentation.CommonOption(position = Documentation.CommonOption.POSITION_MEMORY) public static final ConfigOption<String> JOB_MANAGER_HEAP_MEMORY = key("jobmanager.heap.size") .defaultValue("1024m") .withDescription("JVM heap size for the JobManager."); /** * JVM heap size (in megabytes) for the JobManager. * @deprecated use {@link #JOB_MANAGER_HEAP_MEMORY} */ @Deprecated public static final ConfigOption<Integer> JOB_MANAGER_HEAP_MEMORY_MB = key("jobmanager.heap.mb") .defaultValue(1024) .withDescription("JVM heap size (in megabytes) for the JobManager."); /** * The maximum number of prior execution attempts kept in history. */ public static final ConfigOption<Integer> MAX_ATTEMPTS_HISTORY_SIZE = key("jobmanager.execution.attempts-history-size") .defaultValue(16) .withDeprecatedKeys("job-manager.max-attempts-history-size") .withDescription("The maximum number of prior execution attempts kept in history."); /** * This option specifies the failover strategy, i.e. how the job computation recovers from task failures. * * <p>The options "individual" and "region-legacy" are intentionally not included * as they have some known limitations or issues: * <ul> * <li>"individual" strategy only works when all tasks are not connected, in which case the "region" * failover strategy would also restart failed tasks individually. * <li>"region-legacy" strategy is not able to backtrack missing input result partitions. * </ul> * The new "region" strategy supersedes "individual" and "region-legacy" strategies and should always work. */ public static final ConfigOption<String> EXECUTION_FAILOVER_STRATEGY = key("jobmanager.execution.failover-strategy") .defaultValue("full") .withDescription(Description.builder() .text("This option specifies how the job computation recovers from task failures. " + "Accepted values are:") .list( text("'full': Restarts all tasks to recover the job."), text("'region': Restarts all tasks that could be affected by the task failure. " + "More details can be found %s.", link( "../dev/task_failure_recovery.html#restart-pipelined-region-failover-strategy", "here")) ).build()); /** * The location where the JobManager stores the archives of completed jobs. */ public static final ConfigOption<String> ARCHIVE_DIR = key("jobmanager.archive.fs.dir") .noDefaultValue() .withDescription("Dictionary for JobManager to store the archives of completed jobs."); /** * The job store cache size in bytes which is used to keep completed * jobs in memory. */ public static final ConfigOption<Long> JOB_STORE_CACHE_SIZE = key("jobstore.cache-size") .defaultValue(50L * 1024L * 1024L) .withDescription("The job store cache size in bytes which is used to keep completed jobs in memory."); /** * The time in seconds after which a completed job expires and is purged from the job store. */ public static final ConfigOption<Long> JOB_STORE_EXPIRATION_TIME = key("jobstore.expiration-time") .defaultValue(60L * 60L) .withDescription("The time in seconds after which a completed job expires and is purged from the job store."); /** * The timeout in milliseconds for requesting a slot from Slot Pool. */ public static final ConfigOption<Long> SLOT_REQUEST_TIMEOUT = key("slot.request.timeout") .defaultValue(5L * 60L * 1000L) .withDescription("The timeout in milliseconds for requesting a slot from Slot Pool."); /** * The timeout in milliseconds for a idle slot in Slot Pool. */ public static final ConfigOption<Long> SLOT_IDLE_TIMEOUT = key("slot.idle.timeout") // default matches heartbeat.timeout so that sticky allocation is not lost on timeouts for local recovery .defaultValue(HeartbeatManagerOptions.HEARTBEAT_TIMEOUT.defaultValue()) .withDescription("The timeout in milliseconds for a idle slot in Slot Pool."); /** * Config parameter determining the scheduler implementation. */ @Documentation.ExcludeFromDocumentation("SchedulerNG is still in development.") public static final ConfigOption<String> SCHEDULER = key("jobmanager.scheduler") .defaultValue("legacy") .withDescription(Description.builder() .text("Determines which scheduler implementation is used to schedule tasks. Accepted values are:") .list( text("'legacy': legacy scheduler"), text("'ng': new generation scheduler")) .build()); /** * Config parameter controlling whether partitions should already be released during the job execution. */ @Documentation.ExcludeFromDocumentation("User normally should not be expected to deactivate this feature. " + "We aim at removing this flag eventually.") public static final ConfigOption<Boolean> PARTITION_RELEASE_DURING_JOB_EXECUTION = key("jobmanager.partition.release-during-job-execution") .defaultValue(true) .withDescription("Controls whether partitions should already be released during the job execution."); // --------------------------------------------------------------------------------------------- private JobManagerOptions() { throw new IllegalAccessError(); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import com.google.common.collect.ImmutableList; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFQueueStatsEntryVer15 implements OFQueueStatsEntry { private static final Logger logger = LoggerFactory.getLogger(OFQueueStatsEntryVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int MINIMUM_LENGTH = 48; // maximum OF message length: 16 bit, unsigned final static int MAXIMUM_LENGTH = 0xFFFF; private final static OFPort DEFAULT_PORT_NO = OFPort.ANY; private final static long DEFAULT_QUEUE_ID = 0x0L; private final static U64 DEFAULT_TX_BYTES = U64.ZERO; private final static U64 DEFAULT_TX_PACKETS = U64.ZERO; private final static U64 DEFAULT_TX_ERRORS = U64.ZERO; private final static long DEFAULT_DURATION_SEC = 0x0L; private final static long DEFAULT_DURATION_NSEC = 0x0L; private final static List<OFQueueStatsProp> DEFAULT_PROPERTIES = ImmutableList.<OFQueueStatsProp>of(); // OF message fields private final OFPort portNo; private final long queueId; private final U64 txBytes; private final U64 txPackets; private final U64 txErrors; private final long durationSec; private final long durationNsec; private final List<OFQueueStatsProp> properties; // // Immutable default instance final static OFQueueStatsEntryVer15 DEFAULT = new OFQueueStatsEntryVer15( DEFAULT_PORT_NO, DEFAULT_QUEUE_ID, DEFAULT_TX_BYTES, DEFAULT_TX_PACKETS, DEFAULT_TX_ERRORS, DEFAULT_DURATION_SEC, DEFAULT_DURATION_NSEC, DEFAULT_PROPERTIES ); // package private constructor - used by readers, builders, and factory OFQueueStatsEntryVer15(OFPort portNo, long queueId, U64 txBytes, U64 txPackets, U64 txErrors, long durationSec, long durationNsec, List<OFQueueStatsProp> properties) { if(portNo == null) { throw new NullPointerException("OFQueueStatsEntryVer15: property portNo cannot be null"); } if(txBytes == null) { throw new NullPointerException("OFQueueStatsEntryVer15: property txBytes cannot be null"); } if(txPackets == null) { throw new NullPointerException("OFQueueStatsEntryVer15: property txPackets cannot be null"); } if(txErrors == null) { throw new NullPointerException("OFQueueStatsEntryVer15: property txErrors cannot be null"); } if(properties == null) { throw new NullPointerException("OFQueueStatsEntryVer15: property properties cannot be null"); } this.portNo = portNo; this.queueId = U32.normalize(queueId); this.txBytes = txBytes; this.txPackets = txPackets; this.txErrors = txErrors; this.durationSec = U32.normalize(durationSec); this.durationNsec = U32.normalize(durationNsec); this.properties = properties; } // Accessors for OF message fields @Override public OFPort getPortNo() { return portNo; } @Override public long getQueueId() { return queueId; } @Override public U64 getTxBytes() { return txBytes; } @Override public U64 getTxPackets() { return txPackets; } @Override public U64 getTxErrors() { return txErrors; } @Override public long getDurationSec() { return durationSec; } @Override public long getDurationNsec() { return durationNsec; } @Override public List<OFQueueStatsProp> getProperties() { return properties; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } public OFQueueStatsEntry.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFQueueStatsEntry.Builder { final OFQueueStatsEntryVer15 parentMessage; // OF message fields private boolean portNoSet; private OFPort portNo; private boolean queueIdSet; private long queueId; private boolean txBytesSet; private U64 txBytes; private boolean txPacketsSet; private U64 txPackets; private boolean txErrorsSet; private U64 txErrors; private boolean durationSecSet; private long durationSec; private boolean durationNsecSet; private long durationNsec; private boolean propertiesSet; private List<OFQueueStatsProp> properties; BuilderWithParent(OFQueueStatsEntryVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public OFPort getPortNo() { return portNo; } @Override public OFQueueStatsEntry.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } @Override public long getQueueId() { return queueId; } @Override public OFQueueStatsEntry.Builder setQueueId(long queueId) { this.queueId = queueId; this.queueIdSet = true; return this; } @Override public U64 getTxBytes() { return txBytes; } @Override public OFQueueStatsEntry.Builder setTxBytes(U64 txBytes) { this.txBytes = txBytes; this.txBytesSet = true; return this; } @Override public U64 getTxPackets() { return txPackets; } @Override public OFQueueStatsEntry.Builder setTxPackets(U64 txPackets) { this.txPackets = txPackets; this.txPacketsSet = true; return this; } @Override public U64 getTxErrors() { return txErrors; } @Override public OFQueueStatsEntry.Builder setTxErrors(U64 txErrors) { this.txErrors = txErrors; this.txErrorsSet = true; return this; } @Override public long getDurationSec() { return durationSec; } @Override public OFQueueStatsEntry.Builder setDurationSec(long durationSec) { this.durationSec = durationSec; this.durationSecSet = true; return this; } @Override public long getDurationNsec() { return durationNsec; } @Override public OFQueueStatsEntry.Builder setDurationNsec(long durationNsec) { this.durationNsec = durationNsec; this.durationNsecSet = true; return this; } @Override public List<OFQueueStatsProp> getProperties() { return properties; } @Override public OFQueueStatsEntry.Builder setProperties(List<OFQueueStatsProp> properties) { this.properties = properties; this.propertiesSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFQueueStatsEntry build() { OFPort portNo = this.portNoSet ? this.portNo : parentMessage.portNo; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); long queueId = this.queueIdSet ? this.queueId : parentMessage.queueId; U64 txBytes = this.txBytesSet ? this.txBytes : parentMessage.txBytes; if(txBytes == null) throw new NullPointerException("Property txBytes must not be null"); U64 txPackets = this.txPacketsSet ? this.txPackets : parentMessage.txPackets; if(txPackets == null) throw new NullPointerException("Property txPackets must not be null"); U64 txErrors = this.txErrorsSet ? this.txErrors : parentMessage.txErrors; if(txErrors == null) throw new NullPointerException("Property txErrors must not be null"); long durationSec = this.durationSecSet ? this.durationSec : parentMessage.durationSec; long durationNsec = this.durationNsecSet ? this.durationNsec : parentMessage.durationNsec; List<OFQueueStatsProp> properties = this.propertiesSet ? this.properties : parentMessage.properties; if(properties == null) throw new NullPointerException("Property properties must not be null"); // return new OFQueueStatsEntryVer15( portNo, queueId, txBytes, txPackets, txErrors, durationSec, durationNsec, properties ); } } static class Builder implements OFQueueStatsEntry.Builder { // OF message fields private boolean portNoSet; private OFPort portNo; private boolean queueIdSet; private long queueId; private boolean txBytesSet; private U64 txBytes; private boolean txPacketsSet; private U64 txPackets; private boolean txErrorsSet; private U64 txErrors; private boolean durationSecSet; private long durationSec; private boolean durationNsecSet; private long durationNsec; private boolean propertiesSet; private List<OFQueueStatsProp> properties; @Override public OFPort getPortNo() { return portNo; } @Override public OFQueueStatsEntry.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } @Override public long getQueueId() { return queueId; } @Override public OFQueueStatsEntry.Builder setQueueId(long queueId) { this.queueId = queueId; this.queueIdSet = true; return this; } @Override public U64 getTxBytes() { return txBytes; } @Override public OFQueueStatsEntry.Builder setTxBytes(U64 txBytes) { this.txBytes = txBytes; this.txBytesSet = true; return this; } @Override public U64 getTxPackets() { return txPackets; } @Override public OFQueueStatsEntry.Builder setTxPackets(U64 txPackets) { this.txPackets = txPackets; this.txPacketsSet = true; return this; } @Override public U64 getTxErrors() { return txErrors; } @Override public OFQueueStatsEntry.Builder setTxErrors(U64 txErrors) { this.txErrors = txErrors; this.txErrorsSet = true; return this; } @Override public long getDurationSec() { return durationSec; } @Override public OFQueueStatsEntry.Builder setDurationSec(long durationSec) { this.durationSec = durationSec; this.durationSecSet = true; return this; } @Override public long getDurationNsec() { return durationNsec; } @Override public OFQueueStatsEntry.Builder setDurationNsec(long durationNsec) { this.durationNsec = durationNsec; this.durationNsecSet = true; return this; } @Override public List<OFQueueStatsProp> getProperties() { return properties; } @Override public OFQueueStatsEntry.Builder setProperties(List<OFQueueStatsProp> properties) { this.properties = properties; this.propertiesSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } // @Override public OFQueueStatsEntry build() { OFPort portNo = this.portNoSet ? this.portNo : DEFAULT_PORT_NO; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); long queueId = this.queueIdSet ? this.queueId : DEFAULT_QUEUE_ID; U64 txBytes = this.txBytesSet ? this.txBytes : DEFAULT_TX_BYTES; if(txBytes == null) throw new NullPointerException("Property txBytes must not be null"); U64 txPackets = this.txPacketsSet ? this.txPackets : DEFAULT_TX_PACKETS; if(txPackets == null) throw new NullPointerException("Property txPackets must not be null"); U64 txErrors = this.txErrorsSet ? this.txErrors : DEFAULT_TX_ERRORS; if(txErrors == null) throw new NullPointerException("Property txErrors must not be null"); long durationSec = this.durationSecSet ? this.durationSec : DEFAULT_DURATION_SEC; long durationNsec = this.durationNsecSet ? this.durationNsec : DEFAULT_DURATION_NSEC; List<OFQueueStatsProp> properties = this.propertiesSet ? this.properties : DEFAULT_PROPERTIES; if(properties == null) throw new NullPointerException("Property properties must not be null"); return new OFQueueStatsEntryVer15( portNo, queueId, txBytes, txPackets, txErrors, durationSec, durationNsec, properties ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFQueueStatsEntry> { @Override public OFQueueStatsEntry readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); // pad: 6 bytes bb.skipBytes(6); OFPort portNo = OFPort.read4Bytes(bb); long queueId = U32.f(bb.readInt()); U64 txBytes = U64.ofRaw(bb.readLong()); U64 txPackets = U64.ofRaw(bb.readLong()); U64 txErrors = U64.ofRaw(bb.readLong()); long durationSec = U32.f(bb.readInt()); long durationNsec = U32.f(bb.readInt()); List<OFQueueStatsProp> properties = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFQueueStatsPropVer15.READER); OFQueueStatsEntryVer15 queueStatsEntryVer15 = new OFQueueStatsEntryVer15( portNo, queueId, txBytes, txPackets, txErrors, durationSec, durationNsec, properties ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", queueStatsEntryVer15); return queueStatsEntryVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFQueueStatsEntryVer15Funnel FUNNEL = new OFQueueStatsEntryVer15Funnel(); static class OFQueueStatsEntryVer15Funnel implements Funnel<OFQueueStatsEntryVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFQueueStatsEntryVer15 message, PrimitiveSink sink) { // FIXME: skip funnel of length // skip pad (6 bytes) message.portNo.putTo(sink); sink.putLong(message.queueId); message.txBytes.putTo(sink); message.txPackets.putTo(sink); message.txErrors.putTo(sink); sink.putLong(message.durationSec); sink.putLong(message.durationNsec); FunnelUtils.putList(message.properties, sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFQueueStatsEntryVer15> { @Override public void write(ByteBuf bb, OFQueueStatsEntryVer15 message) { int startIndex = bb.writerIndex(); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); // pad: 6 bytes bb.writeZero(6); message.portNo.write4Bytes(bb); bb.writeInt(U32.t(message.queueId)); bb.writeLong(message.txBytes.getValue()); bb.writeLong(message.txPackets.getValue()); bb.writeLong(message.txErrors.getValue()); bb.writeInt(U32.t(message.durationSec)); bb.writeInt(U32.t(message.durationNsec)); ChannelUtils.writeList(bb, message.properties); // update length field int length = bb.writerIndex() - startIndex; if (length > MAXIMUM_LENGTH) { throw new IllegalArgumentException("OFQueueStatsEntryVer15: message length (" + length + ") exceeds maximum (0xFFFF)"); } bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFQueueStatsEntryVer15("); b.append("portNo=").append(portNo); b.append(", "); b.append("queueId=").append(queueId); b.append(", "); b.append("txBytes=").append(txBytes); b.append(", "); b.append("txPackets=").append(txPackets); b.append(", "); b.append("txErrors=").append(txErrors); b.append(", "); b.append("durationSec=").append(durationSec); b.append(", "); b.append("durationNsec=").append(durationNsec); b.append(", "); b.append("properties=").append(properties); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFQueueStatsEntryVer15 other = (OFQueueStatsEntryVer15) obj; if (portNo == null) { if (other.portNo != null) return false; } else if (!portNo.equals(other.portNo)) return false; if( queueId != other.queueId) return false; if (txBytes == null) { if (other.txBytes != null) return false; } else if (!txBytes.equals(other.txBytes)) return false; if (txPackets == null) { if (other.txPackets != null) return false; } else if (!txPackets.equals(other.txPackets)) return false; if (txErrors == null) { if (other.txErrors != null) return false; } else if (!txErrors.equals(other.txErrors)) return false; if( durationSec != other.durationSec) return false; if( durationNsec != other.durationNsec) return false; if (properties == null) { if (other.properties != null) return false; } else if (!properties.equals(other.properties)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((portNo == null) ? 0 : portNo.hashCode()); result = prime * (int) (queueId ^ (queueId >>> 32)); result = prime * result + ((txBytes == null) ? 0 : txBytes.hashCode()); result = prime * result + ((txPackets == null) ? 0 : txPackets.hashCode()); result = prime * result + ((txErrors == null) ? 0 : txErrors.hashCode()); result = prime * (int) (durationSec ^ (durationSec >>> 32)); result = prime * (int) (durationNsec ^ (durationNsec >>> 32)); result = prime * result + ((properties == null) ? 0 : properties.hashCode()); return result; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.indices.CloseIndexRequest; import org.elasticsearch.client.indices.CreateDataStreamRequest; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.DataStreamsStatsRequest; import org.elasticsearch.client.indices.GetDataStreamRequest; import org.elasticsearch.client.indices.DeleteAliasRequest; import org.elasticsearch.client.indices.DeleteComposableIndexTemplateRequest; import org.elasticsearch.client.indices.DeleteDataStreamRequest; import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.client.indices.GetComposableIndexTemplateRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.GetMappingsRequest; import org.elasticsearch.client.indices.ComposableIndexTemplateExistRequest; import org.elasticsearch.client.indices.IndexTemplatesExistRequest; import org.elasticsearch.client.indices.PutIndexTemplateRequest; import org.elasticsearch.client.indices.PutComposableIndexTemplateRequest; import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.indices.ReloadAnalyzersRequest; import org.elasticsearch.client.indices.ResizeRequest; import org.elasticsearch.client.indices.SimulateIndexTemplateRequest; import org.elasticsearch.client.indices.UnfreezeIndexRequest; import org.elasticsearch.client.indices.rollover.RolloverRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import java.io.IOException; import java.util.Locale; final class IndicesRequestConverters { private IndicesRequestConverters() {} static Request putDataStream(CreateDataStreamRequest createDataStreamRequest) { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_data_stream") .addPathPart(createDataStreamRequest.getName()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); return request; } static Request deleteDataStream(DeleteDataStreamRequest deleteDataStreamRequest) { String name = deleteDataStreamRequest.getName(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_data_stream").addPathPart(name).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); return request; } static Request getDataStreams(GetDataStreamRequest dataStreamRequest) { final String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_stream") .addPathPart(dataStreamRequest.getName()) .build(); return new Request(HttpGet.METHOD_NAME, endpoint); } static Request dataStreamsStats(DataStreamsStatsRequest dataStreamsStatsRequest) { String[] expressions = dataStreamsStatsRequest.indices() == null ? Strings.EMPTY_ARRAY : dataStreamsStatsRequest.indices(); final String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_stream") .addCommaSeparatedPathParts(expressions) .addPathPartAsIs("_stats") .build(); return new Request(HttpGet.METHOD_NAME, endpoint); } static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) { String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices()); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(deleteIndexRequest.timeout()); parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout()); parameters.withIndicesOptions(deleteIndexRequest.indicesOptions()); request.addParameters(parameters.asMap()); return request; } static Request openIndex(OpenIndexRequest openIndexRequest) { String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(openIndexRequest.timeout()); parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout()); parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards()); parameters.withIndicesOptions(openIndexRequest.indicesOptions()); request.addParameters(parameters.asMap()); return request; } static Request closeIndex(CloseIndexRequest closeIndexRequest) { String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(closeIndexRequest.timeout()); parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout()); parameters.withIndicesOptions(closeIndexRequest.indicesOptions()); parameters.withWaitForActiveShards(closeIndexRequest.waitForActiveShards()); request.addParameters(parameters.asMap()); return request; } static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPart(createIndexRequest.index()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(createIndexRequest.timeout()); parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(indicesAliasesRequest.timeout()); parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request putMapping(PutMappingRequest putMappingRequest) throws IOException { Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(putMappingRequest.timeout()); parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); parameters.withIndicesOptions(putMappingRequest.indicesOptions()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request getMappings(GetMappingsRequest getMappingsRequest) { String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); parameters.withLocal(getMappingsRequest.local()); request.addParameters(parameters.asMap()); return request; } static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) { String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); String endpoint = new RequestConverters.EndpointBuilder() .addCommaSeparatedPathParts(indices) .addPathPartAsIs("_mapping") .addPathPartAsIs("field") .addCommaSeparatedPathParts(fields) .build(); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); request.addParameters(parameters.asMap()); return request; } static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withIndicesOptions(refreshRequest.indicesOptions()); request.addParameters(parameters.asMap()); return request; } static Request flush(FlushRequest flushRequest) { String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withIndicesOptions(flushRequest.indicesOptions()); parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing())); parameters.putParam("force", Boolean.toString(flushRequest.force())); request.addParameters(parameters.asMap()); return request; } static Request forceMerge(ForceMergeRequest forceMergeRequest) { String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withIndicesOptions(forceMergeRequest.indicesOptions()); parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments())); parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes())); parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush())); request.addParameters(parameters.asMap()); return request; } static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) { String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions()); parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache())); parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache())); parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache())); parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields())); request.addParameters(parameters.asMap()); return request; } static Request existsAlias(GetAliasesRequest getAliasesRequest) { if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) && (getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) { throw new IllegalArgumentException("existsAlias requires at least an alias or an index"); } String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases)); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(getAliasesRequest.indicesOptions()); params.withLocal(getAliasesRequest.local()); request.addParameters(params.asMap()); return request; } static Request split(ResizeRequest resizeRequest) throws IOException { if (IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.exists(resizeRequest.getSettings()) == false) { throw new IllegalArgumentException("index.number_of_shards is required for split operations"); } return resize(resizeRequest, ResizeType.SPLIT); } @Deprecated static Request split(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException { if (resizeRequest.getResizeType() != ResizeType.SPLIT) { throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices split request"); } return resize(resizeRequest); } static Request shrink(ResizeRequest resizeRequest) throws IOException { return resize(resizeRequest, ResizeType.SHRINK); } @Deprecated static Request shrink(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException { if (resizeRequest.getResizeType() != ResizeType.SHRINK) { throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices shrink request"); } return resize(resizeRequest); } static Request clone(ResizeRequest resizeRequest) throws IOException { return resize(resizeRequest, ResizeType.CLONE); } @Deprecated static Request clone(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException { if (resizeRequest.getResizeType() != ResizeType.CLONE) { throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices clone request"); } return resize(resizeRequest); } private static Request resize(ResizeRequest resizeRequest, ResizeType type) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex()) .addPathPartAsIs("_" + type.name().toLowerCase(Locale.ROOT)) .addPathPart(resizeRequest.getTargetIndex()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withTimeout(resizeRequest.timeout()); params.withMasterTimeout(resizeRequest.masterNodeTimeout()); params.withWaitForActiveShards(resizeRequest.getWaitForActiveShards()); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } @Deprecated private static Request resize(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex()) .addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT)) .addPathPart(resizeRequest.getTargetIndexRequest().index()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withTimeout(resizeRequest.timeout()); params.withMasterTimeout(resizeRequest.masterNodeTimeout()); params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards()); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request rollover(RolloverRequest rolloverRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover") .addPathPart(rolloverRequest.getNewIndexName()).build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withTimeout(rolloverRequest.timeout()); params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); if (rolloverRequest.isDryRun()) { params.putParam("dry_run", Boolean.TRUE.toString()); } request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request getSettings(GetSettingsRequest getSettingsRequest) { String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); String endpoint = RequestConverters.endpoint(indices, "_settings", names); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(getSettingsRequest.indicesOptions()); params.withLocal(getSettingsRequest.local()); params.withIncludeDefaults(getSettingsRequest.includeDefaults()); params.withMasterTimeout(getSettingsRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request getIndex(GetIndexRequest getIndexRequest) { String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); String endpoint = RequestConverters.endpoint(indices); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(getIndexRequest.indicesOptions()); params.withLocal(getIndexRequest.local()); params.withIncludeDefaults(getIndexRequest.includeDefaults()); params.withHuman(getIndexRequest.humanReadable()); params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request indicesExist(GetIndexRequest getIndexRequest) { if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { throw new IllegalArgumentException("indices are mandatory"); } String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); Request request = new Request(HttpHead.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexRequest.local()); params.withHuman(getIndexRequest.humanReadable()); params.withIndicesOptions(getIndexRequest.indicesOptions()); params.withIncludeDefaults(getIndexRequest.includeDefaults()); request.addParameters(params.asMap()); return request; } static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) throws IOException { String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices(); Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings")); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(updateSettingsRequest.timeout()); parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout()); parameters.withIndicesOptions(updateSettingsRequest.indicesOptions()); parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addPathPart(putIndexTemplateRequest.name()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); if (putIndexTemplateRequest.create()) { params.putParam("create", Boolean.TRUE.toString()); } if (Strings.hasText(putIndexTemplateRequest.cause())) { params.putParam("cause", putIndexTemplateRequest.cause()); } request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request putIndexTemplate(PutComposableIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template") .addPathPart(putIndexTemplateRequest.name()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); if (putIndexTemplateRequest.create()) { params.putParam("create", Boolean.TRUE.toString()); } if (Strings.hasText(putIndexTemplateRequest.cause())) { params.putParam("cause", putIndexTemplateRequest.cause()); } request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request simulateIndexTemplate(SimulateIndexTemplateRequest simulateIndexTemplateRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template", "_simulate_index") .addPathPart(simulateIndexTemplateRequest.indexName()).build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withMasterTimeout(simulateIndexTemplateRequest.masterNodeTimeout()); PutComposableIndexTemplateRequest putComposableIndexTemplateRequest = simulateIndexTemplateRequest.indexTemplateV2Request(); if (putComposableIndexTemplateRequest != null) { if (putComposableIndexTemplateRequest.create()) { params.putParam("create", Boolean.TRUE.toString()); } if (Strings.hasText(putComposableIndexTemplateRequest.cause())) { params.putParam("cause", putComposableIndexTemplateRequest.cause()); } request.setEntity(RequestConverters.createEntity(putComposableIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); } request.addParameters(params.asMap()); return request; } static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); String endpoint = RequestConverters.endpoint(indices, "_validate/query"); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(validateQueryRequest.indicesOptions()); params.putParam("explain", Boolean.toString(validateQueryRequest.explain())); params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards())); params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite())); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; } static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); String endpoint = RequestConverters.endpoint(indices, "_alias", aliases); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(getAliasesRequest.indicesOptions()); params.withLocal(getAliasesRequest.local()); request.addParameters(params.asMap()); return request; } static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) { final String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_template") .addCommaSeparatedPathParts(getIndexTemplatesRequest.names()) .build(); final Request request = new Request(HttpGet.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request getIndexTemplates(GetComposableIndexTemplateRequest getIndexTemplatesRequest) { final String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_index_template") .addPathPart(getIndexTemplatesRequest.name()) .build(); final Request request = new Request(HttpGet.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request templatesExist(IndexTemplatesExistRequest indexTemplatesExistRequest) { final String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_template") .addCommaSeparatedPathParts(indexTemplatesExistRequest.names()) .build(); final Request request = new Request(HttpHead.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(indexTemplatesExistRequest.isLocal()); params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request templatesExist(ComposableIndexTemplateExistRequest indexTemplatesExistRequest) { final String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_index_template") .addPathPart(indexTemplatesExistRequest.name()) .build(); final Request request = new Request(HttpHead.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(indexTemplatesExistRequest.isLocal()); params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request analyze(AnalyzeRequest request) throws IOException { RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder(); String index = request.index(); if (index != null) { builder.addPathPart(index); } builder.addPathPartAsIs("_analyze"); Request req = new Request(HttpGet.METHOD_NAME, builder.build()); req.setEntity(RequestConverters.createEntity(request, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return req; } static Request freezeIndex(FreezeIndexRequest freezeIndexRequest) { String endpoint = RequestConverters.endpoint(freezeIndexRequest.getIndices(), "_freeze"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(freezeIndexRequest.timeout()); parameters.withMasterTimeout(freezeIndexRequest.masterNodeTimeout()); parameters.withIndicesOptions(freezeIndexRequest.indicesOptions()); parameters.withWaitForActiveShards(freezeIndexRequest.getWaitForActiveShards()); request.addParameters(parameters.asMap()); return request; } static Request unfreezeIndex(UnfreezeIndexRequest unfreezeIndexRequest) { String endpoint = RequestConverters.endpoint(unfreezeIndexRequest.getIndices(), "_unfreeze"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(unfreezeIndexRequest.timeout()); parameters.withMasterTimeout(unfreezeIndexRequest.masterNodeTimeout()); parameters.withIndicesOptions(unfreezeIndexRequest.indicesOptions()); parameters.withWaitForActiveShards(unfreezeIndexRequest.getWaitForActiveShards()); request.addParameters(parameters.asMap()); return request; } static Request deleteTemplate(DeleteIndexTemplateRequest deleteIndexTemplateRequest) { String name = deleteIndexTemplateRequest.name(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addPathPart(name).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request deleteIndexTemplate(DeleteComposableIndexTemplateRequest deleteIndexTemplateRequest) { String name = deleteIndexTemplateRequest.getName(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template").addPathPart(name).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } static Request reloadAnalyzers(ReloadAnalyzersRequest reloadAnalyzersRequest) { String endpoint = RequestConverters.endpoint(reloadAnalyzersRequest.getIndices(), "_reload_search_analyzers"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withIndicesOptions(reloadAnalyzersRequest.indicesOptions()); request.addParameters(parameters.asMap()); return request; } static Request deleteAlias(DeleteAliasRequest deleteAliasRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPart(deleteAliasRequest.getIndex()) .addPathPartAsIs("_alias") .addPathPart(deleteAliasRequest.getAlias()).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(deleteAliasRequest.timeout()); parameters.withMasterTimeout(deleteAliasRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); return request; } }
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.rendering.nui.layouts; import org.terasology.math.Rect2i; import org.terasology.math.Vector2i; import org.terasology.rendering.nui.BaseInteractionListener; import org.terasology.rendering.nui.Canvas; import org.terasology.rendering.nui.CoreLayout; import org.terasology.rendering.nui.InteractionListener; import org.terasology.rendering.nui.LayoutConfig; import org.terasology.rendering.nui.LayoutHint; import org.terasology.rendering.nui.SubRegion; import org.terasology.rendering.nui.UIWidget; import org.terasology.rendering.nui.widgets.UIScrollbar; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; /** * @author Immortius */ public class ScrollableArea extends CoreLayout { private static final int SCROLL_MULTIPLIER = -42; @LayoutConfig private UIWidget content; @LayoutConfig private boolean stickToBottom; @LayoutConfig private boolean verticalScrollbar = true; @LayoutConfig private boolean horizontalScrollbar; private UIScrollbar verticalBar = new UIScrollbar(true); private UIScrollbar horizontalBar = new UIScrollbar(false); private boolean moveToBottomPending; private boolean moveToTopPending; private InteractionListener scrollListener = new BaseInteractionListener() { @Override public boolean onMouseWheel(int wheelTurns, Vector2i pos) { // If there are two scrollbars, we assume vertical has priority if (verticalScrollbar) { verticalBar.setValue(verticalBar.getValue() + wheelTurns * SCROLL_MULTIPLIER); } else if (horizontalScrollbar) { horizontalBar.setValue(horizontalBar.getValue() + wheelTurns * SCROLL_MULTIPLIER); } return true; } }; @Override public void onDraw(Canvas canvas) { int availableWidth = canvas.size().x; int availableHeight = canvas.size().y; // First, try to layout it without any scroll bars Vector2i contentSize = canvas.calculateRestrictedSize(content, new Vector2i(availableWidth, availableHeight)); if (contentSize.x <= availableWidth && contentSize.y <= availableHeight) { canvas.drawWidget(content, Rect2i.createFromMinAndSize(new Vector2i(0, 0), new Vector2i(availableWidth, availableHeight))); return; } // Second, try to layout it just with vertical bar (if supported) if (verticalScrollbar) { int scrollbarWidth = canvas.calculateRestrictedSize(verticalBar, new Vector2i(availableWidth, availableHeight)).x; int scrollbarHeight = canvas.calculateRestrictedSize(verticalBar, new Vector2i(availableWidth, availableHeight)).y; contentSize = canvas.calculateRestrictedSize(content, new Vector2i(availableWidth - scrollbarWidth, availableHeight)); if (horizontalScrollbar && contentSize.x > availableWidth - scrollbarWidth) { if (contentSize.y > availableHeight - scrollbarHeight) { layoutWithBothScrollbars(canvas, contentSize, availableWidth, availableHeight, scrollbarWidth, scrollbarHeight); } else { contentSize = canvas.calculateRestrictedSize(content, new Vector2i(availableWidth, availableHeight - scrollbarHeight)); layoutWithJustHorizontal(canvas, contentSize, availableWidth, availableHeight, scrollbarHeight); } } else { layoutWithJustVertical(canvas, contentSize, availableWidth, availableHeight, scrollbarWidth); } } else if (horizontalScrollbar) { // Well we know that just horizontal is allowed int scrollbarHeight = canvas.calculateRestrictedSize(verticalBar, new Vector2i(availableWidth, availableHeight)).y; availableHeight -= scrollbarHeight; contentSize = canvas.calculateRestrictedSize(content, new Vector2i(availableWidth, availableHeight - scrollbarHeight)); layoutWithJustHorizontal(canvas, contentSize, availableWidth, availableHeight, scrollbarHeight); } else { throw new IllegalStateException("ScrollableArea without any scrollbar allowed, what's the point of that?!"); } } private void layoutWithBothScrollbars(Canvas canvas, Vector2i contentSize, int availableWidth, int availableHeight, int scrollbarWidth, int scrollbarHeight) { availableWidth -= scrollbarWidth; availableHeight -= scrollbarHeight; boolean atBottom = verticalBar.getRange() == verticalBar.getValue(); Rect2i contentRegion = Rect2i.createFromMinAndSize(0, 0, availableWidth, availableHeight); verticalBar.setRange(contentSize.y - contentRegion.height()); horizontalBar.setRange(contentSize.x - contentRegion.width()); if ((stickToBottom && atBottom) || moveToBottomPending) { verticalBar.setValue(verticalBar.getRange()); moveToBottomPending = false; } if (moveToTopPending) { verticalBar.setValue(0); moveToTopPending = false; } canvas.addInteractionRegion(scrollListener); canvas.drawWidget(verticalBar, Rect2i.createFromMinAndSize(availableWidth, 0, scrollbarWidth, availableHeight)); canvas.drawWidget(horizontalBar, Rect2i.createFromMinAndSize(0, availableHeight, availableWidth, scrollbarHeight)); try (SubRegion ignored = canvas.subRegion(contentRegion, true)) { canvas.drawWidget(content, Rect2i.createFromMinAndSize(-horizontalBar.getValue(), -verticalBar.getValue(), contentSize.x, contentSize.y)); } } private void layoutWithJustVertical(Canvas canvas, Vector2i contentSize, int availableWidth, int availableHeight, int scrollbarWidth) { availableWidth -= scrollbarWidth; boolean atBottom = verticalBar.getRange() == verticalBar.getValue(); Rect2i contentRegion = Rect2i.createFromMinAndSize(0, 0, availableWidth, availableHeight); verticalBar.setRange(contentSize.y - contentRegion.height()); if ((stickToBottom && atBottom) || moveToBottomPending) { verticalBar.setValue(verticalBar.getRange()); moveToBottomPending = false; } if (moveToTopPending) { verticalBar.setValue(0); moveToTopPending = false; } canvas.addInteractionRegion(scrollListener); canvas.drawWidget(verticalBar, Rect2i.createFromMinAndSize(availableWidth, 0, scrollbarWidth, availableHeight)); try (SubRegion ignored = canvas.subRegion(contentRegion, true)) { canvas.drawWidget(content, Rect2i.createFromMinAndSize(0, -verticalBar.getValue(), availableWidth, contentSize.y)); } } private void layoutWithJustHorizontal(Canvas canvas, Vector2i contentSize, int availableWidth, int availableHeight, int scrollbarHeight) { availableHeight -= scrollbarHeight; Rect2i contentRegion = Rect2i.createFromMinAndSize(0, 0, availableWidth, availableHeight); horizontalBar.setRange(contentSize.x - contentRegion.width()); canvas.addInteractionRegion(scrollListener); canvas.drawWidget(horizontalBar, Rect2i.createFromMinAndSize(0, availableHeight, availableWidth, scrollbarHeight)); try (SubRegion ignored = canvas.subRegion(contentRegion, true)) { canvas.drawWidget(content, Rect2i.createFromMinAndSize(-horizontalBar.getValue(), 0, contentSize.x, availableHeight)); } } public void setContent(UIWidget widget) { this.content = widget; } @Override public Vector2i getPreferredContentSize(Canvas canvas, Vector2i sizeHint) { return canvas.calculatePreferredSize(content); } @Override public Vector2i getMaxContentSize(Canvas canvas) { return new Vector2i(Integer.MAX_VALUE, Integer.MAX_VALUE); } @Override public Iterator<UIWidget> iterator() { if (content != null) { return Arrays.asList(content).iterator(); } return Collections.emptyIterator(); } @Override public void addWidget(UIWidget element, LayoutHint hint) { content = element; } public boolean isStickToBottom() { return stickToBottom; } public void setStickToBottom(boolean stickToBottom) { this.stickToBottom = stickToBottom; } public void moveToBottom() { moveToBottomPending = true; } public void moveToTop() { moveToTopPending = true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.milton.zsync; import io.milton.common.RangeUtils; import io.milton.common.StreamUtils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; import java.nio.channels.FileChannel; import java.util.Enumeration; import java.util.List; import java.util.NoSuchElementException; import org.apache.commons.lang.StringUtils; import io.milton.http.Range; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An object that performs the server side operations needed to assemble the * file from a ZSync PUT. * <p/> * * These operations consist of copying byte ranges into the new file. The * {@link #moveBlocks} method copies ranges from the previous file according to * a list of RelocateRanges, while the {@link #sendRanges} method copies the new * data received in the upload. Both of these methods are overloaded with * versions that accept File rather than byte[] arguments for dealing with large * files that should not be loaded into memory all at once.<p/> * * To assemble the file from a ZSync upload, the server should construct an * UploadReader, passing to the constructor the file to be updated and an * InputStream containing the upload data. It should then invoke the * {@link #assemble()} method, which will return a temporary file that was * created. * * @author Nick * */ public class UploadReader { private static final Logger log = LoggerFactory.getLogger(UploadReader.class); /** * Copies blocks of data from the in array to the out array. * * @param in The byte array containing the server's file being replaced * @param rlist The List of RelocateRanges received from the upload * @param blockSize The block size used in rlist * @param out The byte array of the file being assembled */ public static void moveBlocks(byte[] in, List<RelocateRange> rlist, int blockSize, byte[] out) { for (RelocateRange reloc : rlist) { int startBlock = (int) reloc.getBlockRange().getStart().longValue(); int finishBlock = (int) reloc.getBlockRange().getFinish().longValue(); int startByte = startBlock * blockSize; int newOffset = (int) reloc.getOffset(); int numBytes = (finishBlock - startBlock) * blockSize; System.arraycopy(in, startByte, out, newOffset, numBytes); } } /** * Copies blocks of data from the input File to the output File. For each * RelocateRange A-B/C in relocRanges, the block starting at A and ending at * B-1 is copied from inFile and written to byte C of outFile. * * @param inFile The server's File being replaced * @param relocRanges The Enumeration of RelocateRanges parsed from the * Upload's relocStream * @param blocksize The block size used in relocRanges * @param outFile The File being assembled * @throws IOException */ public static void moveBlocks(File inFile, Enumeration<RelocateRange> relocRanges, int blocksize, File outFile) throws IOException { /* * Because transferFrom can supposedly throw Exceptions when copying large Files, * this method invokes moveRange to copy incrementally */ /*The FileChannels should be obtained from a RandomAccessFile rather than a *Stream, or the position() method will not work correctly */ FileChannel rc = null; FileChannel wc = null; try { rc = new RandomAccessFile(inFile, "r").getChannel(); wc = new RandomAccessFile(outFile, "rw").getChannel(); while (relocRanges.hasMoreElements()) { moveRange(rc, relocRanges.nextElement(), blocksize, wc); } } finally { Util.close(rc); Util.close(wc); } } /** * Copies a Range of blocks from rc into a new offset of wc * * @param rc A FileChannel for the input File * @param reloc The RelocateRange specifying the Range to be copied and its * new offset * @param blockSize The block size used by reloc * @param wc The FileChannel for the output File * @throws IOException */ private static void moveRange(FileChannel rc, RelocateRange reloc, int blockSize, FileChannel wc) throws IOException { long MAX_BUFFER = 16384; long startBlock = reloc.getBlockRange().getStart(); long finishBlock = reloc.getBlockRange().getFinish(); long bytesLeft = (finishBlock - startBlock) * blockSize; //bytes left to copy long readAtOnce = 0; //number of bytes to attempt to read long bytesRead = 0; //number of bytes actually read long currOffset = reloc.getOffset(); //current write position if (finishBlock * blockSize > rc.size() || startBlock < 0) { throw new RuntimeException("Invalid RelocateRange: Source file does not contain blocks " + reloc.getBlockRange().getRange()); } rc.position(startBlock * blockSize); while (bytesLeft > 0) { readAtOnce = Math.min(bytesLeft, MAX_BUFFER); /*Because transferFrom does not update the write channel's position, * it needs to be set manually */ bytesRead = wc.transferFrom(rc, currOffset, readAtOnce); bytesLeft -= bytesRead; currOffset += bytesRead; } } /** * Copies bytes from the in array into Ranges of the out array. The in array * is expected to contain the queued bytes in the same order as the ranges * List. * * @param in An array containing the queued bytes corresponding to the * ranges List * @param ranges The List of target Ranges * @param out The byte array for the file being assembled */ public static void sendRanges(byte[] in, List<Range> ranges, byte[] out) { int pos = 0; for (Range r : ranges) { int length = (int) (r.getFinish() - r.getStart()); System.arraycopy(in, pos, out, r.getStart().intValue(), length); pos += length; } } /** * Inserts the data from each DataRange into the output File, at the * appropriate offset * * @param byteRanges The Enumeration of Range/InputStream pairs parsed from * the Upload's dataStream * @param outFile The output File being assembled * @throws IOException */ public static void sendRanges(Enumeration<ByteRange> byteRanges, File outFile) throws IOException { int BUFFER_SIZE = 16384; byte[] buffer = new byte[BUFFER_SIZE]; RandomAccessFile randAccess = null; try { randAccess = new RandomAccessFile(outFile, "rw"); while (byteRanges.hasMoreElements()) { ByteRange byteRange = byteRanges.nextElement(); Range range = byteRange.getRange(); InputStream data = byteRange.getDataQueue(); sendBytes(data, range, buffer, randAccess); } } finally { Util.close(randAccess); } } /** * Reads a number of bytes from the InputStream equal to the size of the * specified Range and writes them into that Range of the RandomAccessFile. * * @param dataIn The InputStream containing the data to be copied * @param range The target location in the RandomAccessFile * @param buffer A byte array used to transfer data from dataIn to fileOut * @param fileOut A RandomAccessFile for the File being assembled * @throws IOException */ private static void sendBytes(InputStream dataIn, Range range, byte[] buffer, RandomAccessFile fileOut) throws IOException { long bytesLeft = (range.getFinish() - range.getStart()); int bytesRead = 0; int readAtOnce = 0; fileOut.seek(range.getStart()); while (bytesLeft > 0) { readAtOnce = (int) Math.min(buffer.length, bytesLeft); bytesRead = dataIn.read(buffer, 0, readAtOnce); fileOut.write(buffer, 0, bytesRead); bytesLeft -= bytesRead; if (bytesLeft > 0 && bytesRead < 0) { throw new RuntimeException("Unable to copy byte Range: " + range.getRange() + ". End of InputStream reached with " + bytesLeft + " bytes left."); } } } /** * Copies the contents of the source file to the destination file and sets * the destination file's length. * * @param inFile The source file * @param outFile The destination file * @param length The desired length of the destination file * @throws IOException */ private static void copyFile(File inFile, File outFile, long length) throws IOException { InputStream fIn = null; OutputStream fOut = null; RandomAccessFile randAccess = null; try { fIn = new FileInputStream(inFile); fOut = new FileOutputStream(outFile); RangeUtils.sendBytes(fIn, fOut, inFile.length()); } finally { StreamUtils.close(fIn); StreamUtils.close(fOut); } try { randAccess = new RandomAccessFile(outFile, "rw"); randAccess.setLength(length); } finally { Util.close(randAccess); } } private File serverCopy; private File uploadedCopy; private Upload uploadData; /** * Constructor that parses the InputStream into an Upload object and * initializes a temporary file that will contain the assembled upload * * @param serverFile The server file to be updated * @param uploadIn A stream containing the ZSync PUT data * @throws IOException */ public UploadReader(File serverFile, InputStream uploadIn) throws IOException { this.serverCopy = serverFile; this.uploadData = Upload.parse(uploadIn); this.uploadedCopy = File.createTempFile("zsync-upload", "newFile"); } /** * Invokes the methods to put together the uploaded file. * * @return The assembled File * @throws IOException */ public File assemble() throws IOException { if (uploadData.getBlocksize() <= 0) { throw new RuntimeException("Invalid blocksize specified: " + uploadData.getBlocksize()); } if (uploadData.getFilelength() <= 0) { throw new RuntimeException("Invalid file length specified: " + uploadData.getFilelength()); } if (StringUtils.isBlank(uploadData.getSha1())) { throw new RuntimeException("No SHA1 checksum provided."); } InputStream relocIn = null; InputStream dataIn = null; try { relocIn = uploadData.getRelocStream(); dataIn = uploadData.getDataStream(); Enumeration<RelocateRange> relocEnum = new RelocateParser(relocIn); Enumeration<ByteRange> dataEnum = new ByteRangeParser(dataIn); copyFile(serverCopy, uploadedCopy, uploadData.getFilelength()); moveBlocks(serverCopy, relocEnum, (int) uploadData.getBlocksize(), uploadedCopy); sendRanges(dataEnum, uploadedCopy); } finally { StreamUtils.close(relocIn); StreamUtils.close(dataIn); } return uploadedCopy; } /** * Returns the expected SHA1 checksum String received in the upload * * @return A SHA1 checksum */ public String getChecksum() { return uploadData.getSha1(); } /** * An object that wraps the relocate stream of Upload ( * {@link Upload#getRelocStream} )in an Enumeration of RelocateRanges. The * relocate stream is expected to contain a comma separated list of * RelocateRanges, e.g.<p/> * * 10-20/123, 100-200/789 * <p/> * * A few whitespaces at the beginning or end of the list are ignored, as are * those surrounding the commas. * * @author Nick * */ private static class RelocateParser implements Enumeration<RelocateRange> { private InputStream relocIn; private String nextToken; private byte[] COMMA = new byte[1]; /** * Constructs the Enumeration of RelocateRanges from an InputStream * * @param relocIn An InputStream obtained from * {@link Upload#getRelocStream()} */ public RelocateParser(InputStream relocIn) { try { this.relocIn = relocIn; this.COMMA[0] = ",".getBytes(Upload.CHARSET)[0]; this.nextToken = Upload.readToken(relocIn, COMMA, 64); } catch (Exception ex) { throw new RuntimeException(ex); } } @Override public boolean hasMoreElements() { return !StringUtils.isBlank(nextToken); } @Override public RelocateRange nextElement() { if (!this.hasMoreElements()) { throw new NoSuchElementException("No more RelocateRanges"); } try { RelocateRange reloc = RelocateRange.parse(nextToken); nextToken = Upload.readToken(relocIn, COMMA, 64); return reloc; } catch (Exception ex) { throw new RuntimeException(ex); } } } /** * An object that wraps the data stream portion of an Upload in an * Enumeration of ByteRanges. </p> * * In order for the parsing to work, the proper number of bytes must be read * from each ByteRange returned by {@link #nextElement()} prior to the next * invocation of {@link #hasMoreElements()}. * * @author Nick * */ private static class ByteRangeParser implements Enumeration<ByteRange> { /*The dataStream portion of an Upload*/ private InputStream dataQueue; /*The Range of the next ByteRange. A null value means that the next Range has not *been loaded or that the end of the data section has been reached. */ private Range nextRange; /*Whether an attempt has been made to read the next Range KV pair*/ private boolean rangeloaded; private byte[] COLON = {":".getBytes(Upload.CHARSET)[0]}; /** * Constructs the Enumeration from the specified InputStream * * @param in The InputStream obtained from * {@link Upload#getDataStream()} * @throws UnsupportedEncodingException */ public ByteRangeParser(InputStream in) throws UnsupportedEncodingException { this.dataQueue = in; this.rangeloaded = false; } @Override public boolean hasMoreElements() { /* * If rangeloaded == false, attempt to read the next Range KV pair and set rangeloaded = true. * If rangeloaded == true and nextRange == null, there are no further ByteRanges. * */ try { if (rangeloaded) { return nextRange != null; } String nextKey = Upload.readToken(dataQueue, COLON, 64).trim(); if (StringUtils.isBlank(nextKey)) { nextRange = null; } else if (!nextKey.equalsIgnoreCase(Upload.RANGE)) { throw new RuntimeException("Invalid key. Expected: " + Upload.RANGE + "\tActual: " + nextKey); } else { nextRange = Range.parse(Upload.readValue(dataQueue, 64).trim()); } rangeloaded = true; return nextRange != null; } catch (Exception ex) { throw new RuntimeException(ex); } } @Override public ByteRange nextElement() { if (!hasMoreElements()) { throw new NoSuchElementException("No more ByteRanges"); } this.rangeloaded = false; //Reset rangeloaded return new ByteRange(nextRange, dataQueue); } } }