gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.buildinit.plugins.internal; import org.gradle.api.internal.DocumentationRegistry; import org.gradle.buildinit.plugins.internal.model.Description; import org.gradle.buildinit.plugins.internal.modifiers.BuildInitTestFramework; import org.gradle.buildinit.plugins.internal.modifiers.Language; import org.gradle.buildinit.plugins.internal.modifiers.ModularizationOption; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; public abstract class JvmProjectInitDescriptor extends LanguageLibraryProjectInitDescriptor { protected final Description description; protected final TemplateLibraryVersionProvider libraryVersionProvider; private final DocumentationRegistry documentationRegistry; public JvmProjectInitDescriptor(Description description, TemplateLibraryVersionProvider libraryVersionProvider, DocumentationRegistry documentationRegistry) { this.description = description; this.libraryVersionProvider = libraryVersionProvider; this.documentationRegistry = documentationRegistry; } protected boolean isSingleProject(InitSettings settings) { return settings.getModularizationOption() == ModularizationOption.SINGLE_PROJECT; } protected String applicationConventionPlugin(InitSettings settings) { return settings.getPackageName() + "." + getLanguage().getName() + "-application-conventions"; } protected String libraryConventionPlugin(InitSettings settings) { return settings.getPackageName() + "." + getLanguage().getName() + "-library-conventions"; } private String commonConventionPlugin(InitSettings settings) { return settings.getPackageName() + "." + getLanguage().getName() + "-common-conventions"; } @Override public String getId() { return getLanguage().getName() + "-" + getComponentType().toString(); } @Override public Language getLanguage() { return description.getLanguage(); } @Override public boolean supportsPackage() { return true; } @Override public BuildInitTestFramework getDefaultTestFramework() { return description.getDefaultTestFramework(); } @Override public Set<BuildInitTestFramework> getTestFrameworks() { return description.getSupportedTestFrameworks(); } @Override public Optional<String> getFurtherReading(InitSettings settings) { String multi = isSingleProject(settings) ? "" : "_multi_project"; return Optional.of(documentationRegistry.getSampleFor("building_" + getLanguage().getName() + "_" + getComponentType().pluralName() + multi)); } @Override public void generateProjectBuildScript(String projectName, InitSettings settings, BuildScriptBuilder buildScriptBuilder) { if (isSingleProject(settings)) { addMavenCentral(buildScriptBuilder); String languagePlugin = description.getPluginName(); if (languagePlugin != null) { String pluginVersionProperty = description.getPluginVersionProperty(); String pluginVersion = pluginVersionProperty == null ? null : libraryVersionProvider.getVersion(pluginVersionProperty); buildScriptBuilder.plugin("Apply the " + languagePlugin + " Plugin to add support for " + getLanguage() + ".", languagePlugin, pluginVersion); } buildScriptBuilder.fileComment("This generated file contains a sample " + getLanguage() + " " + getComponentType() + " project to get you started."); buildScriptBuilder.fileComment("For more details take a look at the 'Building Java & JVM projects' chapter in the Gradle") .fileComment("User Manual available at " + documentationRegistry.getDocumentationFor("building_java_projects")); addStandardDependencies(buildScriptBuilder, false); if (settings.isUseTestSuites()) { // Spock test framework requires that we also have the Groovy plugin applied if (getLanguage() != Language.GROOVY && settings.getTestFramework() == BuildInitTestFramework.SPOCK) { buildScriptBuilder.plugin("Apply the groovy plugin to also add support for Groovy (needed for Spock)", "groovy"); } configureDefaultTestSuite(buildScriptBuilder, settings.getTestFramework(), libraryVersionProvider); } else { addTestFramework(settings.getTestFramework(), buildScriptBuilder); } } } @Override public void generateConventionPluginBuildScript(String conventionPluginName, InitSettings settings, BuildScriptBuilder buildScriptBuilder) { if ("common".equals(conventionPluginName)) { addMavenCentral(buildScriptBuilder); String languagePlugin = description.getPluginName() == null ? "java" : description.getPluginName(); buildScriptBuilder.plugin("Apply the " + languagePlugin + " Plugin to add support for " + getLanguage() + ".", languagePlugin); addStandardDependencies(buildScriptBuilder, true); addDependencyConstraints(buildScriptBuilder); if (settings.isUseTestSuites()) { configureDefaultTestSuite(buildScriptBuilder, settings.getTestFramework(), libraryVersionProvider); } else { addTestFramework(settings.getTestFramework(), buildScriptBuilder); } } else { buildScriptBuilder.plugin("Apply the common convention plugin for shared build configuration between library and application projects.", commonConventionPlugin(settings)); if ("library".equals(conventionPluginName)) { applyLibraryPlugin(buildScriptBuilder); } if ("application".equals(conventionPluginName)) { applyApplicationPlugin(buildScriptBuilder); } } } @Override public void generateSources(InitSettings settings, TemplateFactory templateFactory) { for (String subproject : settings.getSubprojects()) { List<String> sourceTemplates = new ArrayList<>(); List<String> testSourceTemplates = new ArrayList<>(); List<String> integrationTestSourceTemplates = new ArrayList<>(); sourceTemplates(subproject, settings, templateFactory, sourceTemplates); testSourceTemplates(subproject, settings, templateFactory, testSourceTemplates); List<TemplateOperation> templateOps = new ArrayList<>(sourceTemplates.size() + testSourceTemplates.size() + integrationTestSourceTemplates.size()); sourceTemplates.stream().map(t -> templateFactory.fromSourceTemplate(templatePath(t), "main", subproject, templateLanguage(t))).forEach(templateOps::add); testSourceTemplates.stream().map(t -> templateFactory.fromSourceTemplate(templatePath(t), "test", subproject, templateLanguage(t))).forEach(templateOps::add); integrationTestSourceTemplates.stream().map(t -> templateFactory.fromSourceTemplate(templatePath(t), "integrationTest", subproject, templateLanguage(t))).forEach(templateOps::add); templateFactory.whenNoSourcesAvailable(subproject, templateOps).generate(); } } private String templatePath(String baseFileName) { return getLanguage().getName() + getComponentType().toString() + "/" + baseFileName + "." + templateLanguage(baseFileName).getExtension() +".template"; } private Language templateLanguage(String baseFileName) { if (baseFileName.startsWith("groovy/")) { return Language.GROOVY; } return getLanguage(); } protected abstract void sourceTemplates(String subproject, InitSettings settings, TemplateFactory templateFactory, List<String> templates); protected abstract void testSourceTemplates(String subproject, InitSettings settings, TemplateFactory templateFactory, List<String> templates); protected void applyApplicationPlugin(BuildScriptBuilder buildScriptBuilder) { buildScriptBuilder.plugin( "Apply the application plugin to add support for building a CLI application in Java.", "application"); } protected void applyLibraryPlugin(BuildScriptBuilder buildScriptBuilder) { buildScriptBuilder.plugin( "Apply the java-library plugin for API and implementation separation.", "java-library"); } private void addMavenCentral(BuildScriptBuilder buildScriptBuilder) { buildScriptBuilder.repositories().mavenCentral("Use Maven Central for resolving dependencies."); } private void addStandardDependencies(BuildScriptBuilder buildScriptBuilder, boolean constraintsDefined) { if (getLanguage() == Language.GROOVY) { String groovyVersion = libraryVersionProvider.getVersion("groovy"); String groovyAllCoordinates = constraintsDefined ? "org.codehaus.groovy:groovy-all" : "org.codehaus.groovy:groovy-all:" + groovyVersion; buildScriptBuilder.implementationDependency("Use the latest Groovy version for building this library", groovyAllCoordinates); } if (getLanguage() == Language.KOTLIN) { buildScriptBuilder.dependencies().platformDependency("implementation", "Align versions of all Kotlin components", "org.jetbrains.kotlin:kotlin-bom"); buildScriptBuilder.implementationDependency("Use the Kotlin JDK 8 standard library.", "org.jetbrains.kotlin:kotlin-stdlib-jdk8"); } if (getLanguage() == Language.SCALA) { String scalaVersion = libraryVersionProvider.getVersion("scala"); String scalaLibraryVersion = libraryVersionProvider.getVersion("scala-library"); String scalaCoordinates = constraintsDefined ? "org.scala-lang:scala-library" : "org.scala-lang:scala-library:" + scalaLibraryVersion; buildScriptBuilder.implementationDependency("Use Scala " + scalaVersion + " in our library project", scalaCoordinates); } } private void addDependencyConstraints(BuildScriptBuilder buildScriptBuilder) { String commonsTextVersion = libraryVersionProvider.getVersion("commons-text"); buildScriptBuilder.implementationDependencyConstraint("Define dependency versions as constraints", "org.apache.commons:commons-text:" + commonsTextVersion); if (getLanguage() == Language.GROOVY) { buildScriptBuilder.implementationDependencyConstraint(null, "org.codehaus.groovy:groovy-all:" + libraryVersionProvider.getVersion("groovy")); } if (getLanguage() == Language.KOTLIN) { buildScriptBuilder.dependencies().platformDependency("implementation", "Align versions of all Kotlin components", "org.jetbrains.kotlin:kotlin-bom"); buildScriptBuilder.implementationDependencyConstraint(null, "org.jetbrains.kotlin:kotlin-stdlib-jdk8"); } if (getLanguage() == Language.SCALA) { String scalaLibraryVersion = libraryVersionProvider.getVersion("scala-library"); buildScriptBuilder.implementationDependencyConstraint(null, "org.scala-lang:scala-library:" + scalaLibraryVersion); } } private void addTestFramework(BuildInitTestFramework testFramework, BuildScriptBuilder buildScriptBuilder) { switch (testFramework) { case SPOCK: if (getLanguage() != Language.GROOVY) { buildScriptBuilder .plugin("Apply the groovy plugin to also add support for Groovy (needed for Spock)", "groovy") .testImplementationDependency("Use the latest Groovy version for Spock testing", "org.codehaus.groovy:groovy:" + libraryVersionProvider.getVersion("groovy")); } buildScriptBuilder.testImplementationDependency("Use the awesome Spock testing and specification framework even with Java", "org.spockframework:spock-core:" + libraryVersionProvider.getVersion("spock"), "junit:junit:" + libraryVersionProvider.getVersion("junit")); buildScriptBuilder.taskMethodInvocation( "Use JUnit Platform for unit tests.", "test", "Test", "useJUnitPlatform"); break; case TESTNG: buildScriptBuilder .testImplementationDependency( "Use TestNG framework, also requires calling test.useTestNG() below", "org.testng:testng:" + libraryVersionProvider.getVersion("testng")) .taskMethodInvocation( "Use TestNG for unit tests.", "test", "Test", "useTestNG"); break; case JUNIT_JUPITER: buildScriptBuilder.testImplementationDependency( "Use JUnit Jupiter for testing.", "org.junit.jupiter:junit-jupiter:" + libraryVersionProvider.getVersion("junit-jupiter")); buildScriptBuilder.taskMethodInvocation( "Use JUnit Platform for unit tests.", "test", "Test", "useJUnitPlatform"); break; case SCALATEST: String scalaVersion = libraryVersionProvider.getVersion("scala"); String scalaTestVersion = libraryVersionProvider.getVersion("scalatest"); String scalaTestPlusJunitVersion = libraryVersionProvider.getVersion("scalatestplus-junit"); String junitVersion = libraryVersionProvider.getVersion("junit"); String scalaXmlVersion = libraryVersionProvider.getVersion("scala-xml"); buildScriptBuilder.testImplementationDependency("Use Scalatest for testing our library", "junit:junit:" + junitVersion, "org.scalatest:scalatest_" + scalaVersion + ":" + scalaTestVersion, "org.scalatestplus:junit-4-13_" + scalaVersion + ":" + scalaTestPlusJunitVersion) .testRuntimeOnlyDependency("Need scala-xml at test runtime", "org.scala-lang.modules:scala-xml_" + scalaVersion + ":" + scalaXmlVersion); break; case KOTLINTEST: buildScriptBuilder.testImplementationDependency("Use the Kotlin test library.", "org.jetbrains.kotlin:kotlin-test"); buildScriptBuilder.testImplementationDependency("Use the Kotlin JUnit integration.", "org.jetbrains.kotlin:kotlin-test-junit"); break; default: buildScriptBuilder.testImplementationDependency("Use JUnit test framework.", "junit:junit:" + libraryVersionProvider.getVersion("junit")); break; } } }
package com.orientechnologies.orient.core.storage.impl.local.paginated; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.orientechnologies.orient.core.command.OCommandOutputListener; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.db.tool.ODatabaseCompare; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.intent.OIntentMassiveInsert; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.storage.OStorage; /** * @author Andrey Lomakin * @since 18.06.13 */ @Test public class LocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords { private ODatabaseDocumentTx testDocumentTx; private ODatabaseDocumentTx baseDocumentTx; private File buildDir; private ExecutorService executorService = Executors.newCachedThreadPool(); @BeforeClass public void beforeClass() { OGlobalConfiguration.MVRBTREE_RID_BINARY_THRESHOLD.setValue(-1); OGlobalConfiguration.STORAGE_COMPRESSION_METHOD.setValue("nothing"); String buildDirectory = System.getProperty("buildDirectory", "."); buildDirectory += "/localPaginatedStorageRestoreFromWALAndAddAdditionalRecords"; buildDir = new File(buildDirectory); if (buildDir.exists()) buildDir.delete(); buildDir.mkdir(); } @AfterClass public void afterClass() { buildDir.delete(); } @BeforeMethod public void beforeMethod() { baseDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/baseLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords"); if (baseDocumentTx.exists()) { baseDocumentTx.open("admin", "admin"); baseDocumentTx.drop(); } baseDocumentTx.create(); createSchema(baseDocumentTx); } @AfterMethod public void afterMethod() { testDocumentTx.open("admin", "admin"); testDocumentTx.drop(); baseDocumentTx.open("admin", "admin"); baseDocumentTx.drop(); } public void testRestoreAndAddNewItems() throws Exception { List<Future<Void>> futures = new ArrayList<Future<Void>>(); baseDocumentTx.declareIntent(new OIntentMassiveInsert()); Random random = new Random(); long[] seeds = new long[5]; for (int i = 0; i < 5; i++) { seeds[i] = random.nextLong(); System.out.println("Seed [" + i + "] = " + seeds[i]); } for (long seed : seeds) futures.add(executorService.submit(new DataPropagationTask(seed))); for (Future<Void> future : futures) future.get(); futures.clear(); Thread.sleep(1500); copyDataFromTestWithoutClose(); OStorage storage = baseDocumentTx.getStorage(); baseDocumentTx.close(); storage.close(); testDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/testLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords"); testDocumentTx.open("admin", "admin"); testDocumentTx.close(); long dataAddSeed = random.nextLong(); System.out.println("Data add seed = " + dataAddSeed); for (int i = 0; i < 1; i++) futures.add(executorService.submit(new DataPropagationTask(dataAddSeed))); for (Future<Void> future : futures) future.get(); ODatabaseCompare databaseCompare = new ODatabaseCompare(testDocumentTx.getURL(), baseDocumentTx.getURL(), "admin", "admin", new OCommandOutputListener() { @Override public void onMessage(String text) { System.out.println(text); } }); Assert.assertTrue(databaseCompare.compare()); } private void copyDataFromTestWithoutClose() throws Exception { final Path testStoragePath = Paths.get(baseDocumentTx.getURL().substring("plocal:".length())); Path buildPath = Paths.get(buildDir.toURI()); final Path copyTo = buildPath.resolve("testLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords"); Files.copy(testStoragePath, copyTo); Files.walkFileTree(testStoragePath, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Path fileToCopy = copyTo.resolve(testStoragePath.relativize(file)); if (fileToCopy.endsWith("baseLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.wmr")) fileToCopy = fileToCopy.getParent().resolve("testLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.wmr"); else if (fileToCopy.endsWith("baseLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.0.wal")) fileToCopy = fileToCopy.getParent().resolve("testLocalPaginatedStorageRestoreFromWALAndAddAdditionalRecords.0.wal"); Files.copy(file, fileToCopy); return FileVisitResult.CONTINUE; } }); } private void createSchema(ODatabaseDocumentTx databaseDocumentTx) { ODatabaseRecordThreadLocal.INSTANCE.set(databaseDocumentTx); OSchema schema = databaseDocumentTx.getMetadata().getSchema(); OClass testOneClass = schema.createClass("TestOne"); testOneClass.createProperty("intProp", OType.INTEGER); testOneClass.createProperty("stringProp", OType.STRING); testOneClass.createProperty("stringSet", OType.EMBEDDEDSET, OType.STRING); testOneClass.createProperty("linkMap", OType.LINKMAP, OType.STRING); OClass testTwoClass = schema.createClass("TestTwo"); testTwoClass.createProperty("stringList", OType.EMBEDDEDLIST, OType.STRING); } public class DataPropagationTask implements Callable<Void> { private ODatabaseDocumentTx baseDB; private ODatabaseDocumentTx testDB; private long seed; public DataPropagationTask(long seed) { this.seed = seed; baseDB = new ODatabaseDocumentTx(baseDocumentTx.getURL()); baseDB.open("admin", "admin"); if (testDocumentTx != null) { testDB = new ODatabaseDocumentTx(testDocumentTx.getURL()); testDB.open("admin", "admin"); } } @Override public Void call() throws Exception { Random random = new Random(seed); ODatabaseRecordThreadLocal.INSTANCE.set(baseDB); try { List<ORID> testTwoList = new ArrayList<ORID>(); List<ORID> firstDocs = new ArrayList<ORID>(); OClass classOne = baseDB.getMetadata().getSchema().getClass("TestOne"); OClass classTwo = baseDB.getMetadata().getSchema().getClass("TestTwo"); for (int i = 0; i < 10000; i++) { ODocument docOne = new ODocument(classOne); docOne.field("intProp", random.nextInt()); byte[] stringData = new byte[256]; random.nextBytes(stringData); String stringProp = new String(stringData); docOne.field("stringProp", stringProp); Set<String> stringSet = new HashSet<String>(); for (int n = 0; n < 5; n++) { stringSet.add("str" + random.nextInt()); } docOne.field("stringSet", stringSet); saveDoc(docOne); firstDocs.add(docOne.getIdentity()); if (random.nextBoolean()) { ODocument docTwo = new ODocument(classTwo); List<String> stringList = new ArrayList<String>(); for (int n = 0; n < 5; n++) { stringList.add("strnd" + random.nextInt()); } docTwo.field("stringList", stringList); saveDoc(docTwo); testTwoList.add(docTwo.getIdentity()); } if (!testTwoList.isEmpty()) { int startIndex = random.nextInt(testTwoList.size()); int endIndex = random.nextInt(testTwoList.size() - startIndex) + startIndex; Map<String, ORID> linkMap = new HashMap<String, ORID>(); for (int n = startIndex; n < endIndex; n++) { ORID docTwoRid = testTwoList.get(n); linkMap.put(docTwoRid.toString(), docTwoRid); } docOne.field("linkMap", linkMap); saveDoc(docOne); } boolean deleteDoc = random.nextDouble() <= 0.2; if (deleteDoc) { ORID rid = firstDocs.remove(random.nextInt(firstDocs.size())); deleteDoc(rid); } } } finally { baseDB.close(); if (testDB != null) testDB.close(); } return null; } private void saveDoc(ODocument document) { ODatabaseRecordThreadLocal.INSTANCE.set(baseDB); ODocument testDoc = new ODocument(); document.copyTo(testDoc); document.save(); if (testDB != null) { ODatabaseRecordThreadLocal.INSTANCE.set(testDB); testDoc.save(); Assert.assertEquals(testDoc.getIdentity(), document.getIdentity()); ODatabaseRecordThreadLocal.INSTANCE.set(baseDB); } } private void deleteDoc(ORID rid) { baseDB.delete(rid); if (testDB != null) { ODatabaseRecordThreadLocal.INSTANCE.set(testDB); Assert.assertNotNull(testDB.load(rid)); testDB.delete(rid); Assert.assertNull(testDB.load(rid)); ODatabaseRecordThreadLocal.INSTANCE.set(baseDB); } } } }
// Copyright 2018 Tremolo Security, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.tremolosecurity.scalejs.ws; import static org.apache.directory.ldap.client.api.search.FilterBuilder.*; import java.io.IOException; import java.lang.reflect.Type; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.novell.ldap.LDAPAttribute; import com.novell.ldap.LDAPAttributeSet; import com.novell.ldap.LDAPEntry; import com.novell.ldap.LDAPException; import com.novell.ldap.LDAPSearchResult; import com.novell.ldap.LDAPSearchResults; import com.tremolosecurity.config.util.ConfigManager; import com.tremolosecurity.config.util.UrlHolder; import com.tremolosecurity.config.xml.ApplicationType; import com.tremolosecurity.config.xml.FilterConfigType; import com.tremolosecurity.config.xml.ParamType; import com.tremolosecurity.config.xml.ParamWithValueType; import com.tremolosecurity.config.xml.UrlType; import com.tremolosecurity.provisioning.service.util.TremoloUser; import com.tremolosecurity.provisioning.service.util.WFCall; import com.tremolosecurity.proxy.ProxySys; import com.tremolosecurity.proxy.auth.AuthInfo; import com.tremolosecurity.proxy.filter.HttpFilter; import com.tremolosecurity.proxy.filter.HttpFilterChain; import com.tremolosecurity.proxy.filter.HttpFilterConfig; import com.tremolosecurity.proxy.filter.HttpFilterRequest; import com.tremolosecurity.proxy.filter.HttpFilterResponse; import com.tremolosecurity.proxy.util.ProxyConstants; import com.tremolosecurity.saml.Attribute; import com.tremolosecurity.scalejs.cfg.ScaleConfig; import com.tremolosecurity.scalejs.data.ScaleError; import com.tremolosecurity.scalejs.sdk.UiDecisions; import com.tremolosecurity.scalejs.util.ScaleJSUtils; import com.tremolosecurity.scalejs.ws.ScaleMain; import com.tremolosecurity.server.GlobalEntries; import com.tremolosecurity.scalejs.operators.config.AttributeConfig; import com.tremolosecurity.scalejs.operators.config.OperatorsConfig; import com.tremolosecurity.scalejs.operators.data.OpsSearch; import com.tremolosecurity.scalejs.operators.data.OpsUpdate; import com.tremolosecurity.scalejs.operators.data.OpsUserData; import com.tremolosecurity.proxy.auth.AuthController; import org.apache.directory.ldap.client.api.search.FilterBuilder; import org.apache.logging.log4j.Logger; import net.sourceforge.myvd.types.Filter; import net.sourceforge.myvd.util.EntryUtil; /** * ScaleJSOperator * * */ public class ScaleJSOperator implements HttpFilter { static Logger logger = org.apache.logging.log4j.LogManager.getLogger(ScaleJSOperator.class.getName()); private OperatorsConfig config; private UrlType scaleJsUrl; private UiDecisions dec; private String scalejsAppName; private String scaleMainURL; private ScaleConfig scaleMainConfig; @Override public void doFilter(HttpFilterRequest request, HttpFilterResponse response, HttpFilterChain chain) throws Exception { Gson gson = new Gson(); request.getServletRequest().setAttribute("com.tremolosecurity.unison.proxy.noRedirectOnError", "com.tremolosecurity.unison.proxy.noRedirectOnError"); try { if (request.getRequestURI().endsWith("/ops/config")) { ScaleJSUtils.addCacheHeaders(response); response.setContentType("application/json"); response.getWriter().println(gson.toJson(this.config).trim()); } else if (request.getRequestURI().endsWith("/ops/search")) { runSearch(request, response, gson); } else if (request.getRequestURI().endsWith("/ops/user") && request.getMethod().equalsIgnoreCase("GET")) { lookupUser(request, response, gson); } else if (request.getRequestURI().endsWith("/ops/user") && request.getMethod().equalsIgnoreCase("POST")) { AuthInfo loggedIn = ((AuthController) request.getSession().getAttribute(ProxyConstants.AUTH_CTL)).getAuthInfo(); String json = new String((byte[]) request.getAttribute(ProxySys.MSG_BODY)); OpsUpdate updateInput = gson.fromJson(json,OpsUpdate.class); if (this.scaleMainConfig == null) { UrlHolder holder = GlobalEntries.getGlobalEntries().getConfigManager().findURL(this.scaleMainURL); for (HttpFilter filter : holder.getFilterChain()) { if (filter instanceof ScaleMain) { ScaleMain scaleMain = (ScaleMain) filter; this.scaleMainConfig = scaleMain.scaleConfig; } } } String dn = updateInput.getDn(); LDAPSearchResults res = GlobalEntries.getGlobalEntries().getConfigManager().getMyVD().search(dn, 0, "(objectClass=*)", new ArrayList<String>()); if (! res.hasMore()) { throw new Exception("Could not locate user '" + dn + "'"); } LDAPEntry entry = res.next(); AuthInfo userData = new AuthInfo(); userData.setUserDN(entry.getDN()); LDAPAttributeSet attrs = entry.getAttributeSet(); for (Object obj : attrs) { LDAPAttribute attr = (LDAPAttribute) obj; Attribute attrib = new Attribute(attr.getName()); String[] vals = attr.getStringValueArray(); for (String val : vals) { attrib.getValues().add(val); } userData.getAttribs().put(attrib.getName(), attrib); } ScaleError errors = new ScaleError(); Set<String> allowedAttrs = null; if (this.scaleMainConfig.getUiDecisions() != null) { allowedAttrs = this.scaleMainConfig.getUiDecisions().availableAttributes(userData, request.getServletRequest()); } HashMap<String,String> values = new HashMap<String,String>(); boolean ok = true; for (Attribute attr : updateInput.getAttributes()) { String attributeName = attr.getName(); if (allowedAttrs == null || allowedAttrs.contains(attributeName)) { String value = attr.getValues().get(0); if (this.scaleMainConfig.getAttributes().get(attributeName) == null) { errors.getErrors().add("Invalid attribute : '" + attributeName + "'"); ok = false; } else if (this.scaleMainConfig.getAttributes().get(attributeName).isReadOnly()) { errors.getErrors().add("Attribute is read only : '" + this.scaleMainConfig.getAttributes().get(attributeName).getDisplayName() + "'"); ok = false; } else if (this.scaleMainConfig.getAttributes().get(attributeName).isRequired() && value.length() == 0) { errors.getErrors().add("Attribute is required : '" + this.scaleMainConfig.getAttributes().get(attributeName).getDisplayName() + "'"); ok = false; } else if (this.scaleMainConfig.getAttributes().get(attributeName).getMinChars() > 0 && this.scaleMainConfig.getAttributes().get(attributeName).getMinChars() > value.length()) { errors.getErrors().add(this.scaleMainConfig.getAttributes().get(attributeName).getDisplayName() + " must have at least " + this.scaleMainConfig.getAttributes().get(attributeName).getMinChars() + " characters"); ok = false; } else if (this.scaleMainConfig.getAttributes().get(attributeName).getMaxChars() > 0 && this.scaleMainConfig.getAttributes().get(attributeName).getMaxChars() < value.length()) { errors.getErrors().add(this.scaleMainConfig.getAttributes().get(attributeName).getDisplayName() + " must have at most " + this.scaleMainConfig.getAttributes().get(attributeName).getMaxChars() + " characters"); ok = false; } else if (this.scaleMainConfig.getAttributes().get(attributeName).getPattern() != null) { try { Matcher m = this.scaleMainConfig.getAttributes().get(attributeName).getPattern().matcher(value); if (m == null || ! m.matches()) { ok = false; } } catch (Exception e) { ok = false; } if (!ok) { errors.getErrors().add("Attribute value not valid : '" + this.scaleMainConfig.getAttributes().get(attributeName).getDisplayName() + "' - " + this.scaleMainConfig.getAttributes().get(attributeName).getRegExFailedMsg()); } } values.put(attributeName, value); } } for (String attrName : this.scaleMainConfig.getAttributes().keySet()) { if (this.scaleMainConfig.getAttributes().get(attrName).isRequired() && ! values.containsKey(attrName) && (allowedAttrs == null || allowedAttrs.contains(attrName) )) { errors.getErrors().add("Attribute is required : '" + this.scaleMainConfig.getAttributes().get(attrName).getDisplayName() + "'"); ok = false; } } if (updateInput.getReason() == null || updateInput.getReason().trim().isEmpty()) { errors.getErrors().add("Reason For Updates Required"); ok = false; } if (ok) { ConfigManager cfgMgr = GlobalEntries.getGlobalEntries().getConfigManager(); WFCall wfCall = new WFCall(); wfCall.setName(this.scaleMainConfig.getWorkflowName()); wfCall.setReason(updateInput.getReason()); wfCall.setUidAttributeName(this.scaleMainConfig.getUidAttributeName()); wfCall.setRequestor(loggedIn.getAttribs().get(this.scaleMainConfig.getUidAttributeName()).getValues().get(0)); TremoloUser tu = new TremoloUser(); tu.setUid(userData.getAttribs().get(this.scaleMainConfig.getUidAttributeName()).getValues().get(0)); for (String name : values.keySet()) { tu.getAttributes().add(new Attribute(name,values.get(name))); } tu.getAttributes().add(new Attribute(this.scaleMainConfig.getUidAttributeName(),userData.getAttribs().get(this.scaleMainConfig.getUidAttributeName()).getValues().get(0))); wfCall.setUser(tu); try { com.tremolosecurity.provisioning.workflow.ExecuteWorkflow exec = new com.tremolosecurity.provisioning.workflow.ExecuteWorkflow(); exec.execute(wfCall, GlobalEntries.getGlobalEntries().getConfigManager()); } catch (Exception e) { logger.error("Could not update user",e); response.setStatus(500); ScaleError error = new ScaleError(); error.getErrors().add("Please contact your system administrator"); ScaleJSUtils.addCacheHeaders(response); response.getWriter().print(gson.toJson(error).trim()); response.getWriter().flush(); } } else { response.setStatus(500); ScaleJSUtils.addCacheHeaders(response); response.getWriter().print(gson.toJson(errors).trim()); response.getWriter().flush(); } } } catch (Throwable t) { logger.error("Could not execute request",t); response.setStatus(500); ScaleError error = new ScaleError(); error.getErrors().add("Operation not supported"); ScaleJSUtils.addCacheHeaders(response); response.getWriter().print(gson.toJson(error).trim()); response.getWriter().flush(); } } private void lookupUser(HttpFilterRequest request, HttpFilterResponse response, Gson gson) throws Exception, LDAPException, IOException { if (this.scaleMainConfig == null) { UrlHolder holder = GlobalEntries.getGlobalEntries().getConfigManager().findURL(this.scaleMainURL); for (HttpFilter filter : holder.getFilterChain()) { if (filter instanceof ScaleMain) { ScaleMain scaleMain = (ScaleMain) filter; this.scaleMainConfig = scaleMain.scaleConfig; } } } String dn = request.getParameter("dn").getValues().get(0); FilterBuilder baseFilter = (FilterBuilder) request.getAttribute("ops.search.filter"); String filter = "(objectClass=*)"; if (baseFilter != null) { filter = baseFilter.toString(); } LDAPSearchResults res = GlobalEntries.getGlobalEntries().getConfigManager().getMyVD().search(dn, 0, filter, new ArrayList<String>()); if (! res.hasMore()) { throw new Exception("Could not locate user '" + dn + "'"); } LDAPEntry entry = res.next(); AuthInfo userData = new AuthInfo(); userData.setUserDN(entry.getDN()); LDAPAttributeSet attrs = entry.getAttributeSet(); for (Object obj : attrs) { LDAPAttribute attr = (LDAPAttribute) obj; Attribute attrib = new Attribute(attr.getName()); String[] vals = attr.getStringValueArray(); for (String val : vals) { attrib.getValues().add(val); } userData.getAttribs().put(attrib.getName(), attrib); } Set<String> allowedAttrs = null; if (scaleMainConfig.getUiDecisions() != null) { allowedAttrs = this.scaleMainConfig.getUiDecisions().availableAttributes(userData, request.getServletRequest()); } OpsUserData userToSend = new OpsUserData(); userToSend.setDn(userData.getUserDN()); for (String attrName : this.scaleMainConfig.getUserAttributeList()) { if (allowedAttrs == null || allowedAttrs.contains(attrName)) { Attribute attr = new Attribute(attrName); Attribute fromUser = userData.getAttribs().get(attrName); if (fromUser != null) { attr.getValues().addAll(fromUser.getValues()); if (attrName.equalsIgnoreCase(this.scaleMainConfig.getUidAttributeName())) { userToSend.setUid(fromUser.getValues().get(0)); } } userToSend.getAttributes().add(attr); } } if (this.scaleMainConfig.getRoleAttribute() != null && ! this.scaleMainConfig.getRoleAttribute().isEmpty()) { Attribute fromUser = userData.getAttribs().get(this.scaleMainConfig.getRoleAttribute()); Attribute attr = new Attribute(this.scaleMainConfig.getRoleAttribute()); if (fromUser != null) { attr.getValues().addAll(fromUser.getValues()); userToSend.getGroups().clear(); userToSend.getGroups().addAll(fromUser.getValues()); } userToSend.getAttributes().add(attr); } ArrayList<String> attrNames = new ArrayList<String>(); attrNames.add("cn"); attrNames.add(GlobalEntries.getGlobalEntries().getConfigManager().getCfg().getGroupMemberAttribute()); res = GlobalEntries.getGlobalEntries().getConfigManager().getMyVD().search(GlobalEntries.getGlobalEntries().getConfigManager().getCfg().getLdapRoot(), 2, equal(GlobalEntries.getGlobalEntries().getConfigManager().getCfg().getGroupMemberAttribute(),dn).toString(), attrNames); net.sourceforge.myvd.types.Filter ldapFiltertoCheck = new net.sourceforge.myvd.types.Filter(equal(GlobalEntries.getGlobalEntries().getConfigManager().getCfg().getGroupMemberAttribute(),dn).toString()); while (res.hasMore()) { entry = res.next(); if (ldapFiltertoCheck.getRoot().checkEntry(entry)) { LDAPAttribute la = entry.getAttribute("cn"); if (la != null) { String val = la.getStringValue(); if (! userToSend.getGroups().contains(val)) { userToSend.getGroups().add(val); } } } } if (scaleMainConfig.getUiDecisions() != null) { Set<String> smAllowedAttrs = this.scaleMainConfig.getUiDecisions().availableAttributes(userData, request.getServletRequest()); ScaleConfig local = new ScaleConfig(this.scaleMainConfig); if (smAllowedAttrs != null) { for (String attrName : this.scaleMainConfig.getAttributes().keySet()) { if (! smAllowedAttrs.contains(attrName)) { local.getAttributes().remove(attrName); } } } userToSend.setMetaData(local.getAttributes()); userToSend.setCanEditUser(this.scaleMainConfig.getUiDecisions().canEditUser(userData, request.getServletRequest())); } else { userToSend.setMetaData(scaleMainConfig.getAttributes()); userToSend.setCanEditUser(scaleMainConfig.isCanEditUser()); } ScaleJSUtils.addCacheHeaders(response); response.setContentType("application/json"); response.getWriter().println(gson.toJson(userToSend).trim()); } private void runSearch(HttpFilterRequest request, HttpFilterResponse response, Gson gson) throws Exception, LDAPException, IOException { String json = new String((byte[]) request.getAttribute(ProxySys.MSG_BODY)); OpsSearch opsSearch = gson.fromJson(json, OpsSearch.class); List<AttributeConfig> forSearch = opsSearch.getToSearch(); List<FilterBuilder> filter = new ArrayList<FilterBuilder>(); for (AttributeConfig attr : forSearch) { if (attr.isPicked()) { filter.add(equal(attr.getName(), attr.getValue())); } } FilterBuilder[] fb = new FilterBuilder[filter.size()]; filter.toArray(fb); FilterBuilder baseFilter = (FilterBuilder) request.getAttribute("ops.search.filter"); String filterString; if (baseFilter != null) { FilterBuilder localFilter = and(fb); filterString = and(localFilter,baseFilter).toString(); } else { filterString = and(fb).toString(); } String searchBase = this.config.getBaseLabelToDN().get(opsSearch.getBase()); if (searchBase == null) { throw new Exception("Invalid search base"); } List<HashMap<String, String>> resList = new ArrayList<HashMap<String, String>>(); LDAPSearchResults res = GlobalEntries.getGlobalEntries().getConfigManager().getMyVD().search(searchBase, 2, filterString, new ArrayList<String>()); while (res.hasMore()) { HashMap<String, String> ret = new HashMap<String, String>(); resList.add(ret); LDAPEntry entry = res.next(); ret.put("dn", entry.getDN()); for (AttributeConfig attr : this.config.getResultsAttributes()) { if (entry.getAttribute(attr.getName()) != null) { String val = entry.getAttribute(attr.getName()).getStringValue(); ret.put(attr.getName(), val); } else { ret.put(attr.getName(), ""); } } } ScaleJSUtils.addCacheHeaders(response); response.setContentType("application/json"); response.getWriter().println(gson.toJson(resList).trim()); } @Override public void filterResponseText(HttpFilterRequest request, HttpFilterResponse response, HttpFilterChain chain, StringBuffer data) throws Exception { } @Override public void filterResponseBinary(HttpFilterRequest request, HttpFilterResponse response, HttpFilterChain chain, byte[] data, int length) throws Exception { } @Override public void initFilter(HttpFilterConfig config) throws Exception { this.config = new OperatorsConfig(); Attribute bases = config.getAttribute("bases"); if (bases == null) { throw new Exception("bases not set"); } for (String base : bases.getValues()) { String desc = base.substring(0, base.indexOf('=')); String ldap = base.substring(base.indexOf('=') + 1); this.config.getBaseLabelToDN().put(desc, ldap); this.config.getSearchBases().add(desc); } Attribute attr = config.getAttribute("searchableAttributes"); if (attr == null) { throw new Exception("searchableAttributes not found"); } for (String searchable : attr.getValues()) { String name = searchable.substring(0, searchable.indexOf('=')); String label = searchable.substring(searchable.indexOf('=') + 1); this.config.getSearchableAttributes().add(new AttributeConfig(name, label, "")); } attr = config.getAttribute("resultAttributes"); if (attr == null) { throw new Exception("resultAttributes not found"); } for (String resultAttr : attr.getValues()) { String name = resultAttr.substring(0, resultAttr.indexOf('=')); String label = resultAttr.substring(resultAttr.indexOf('=') + 1); this.config.getResultsAttributes().add(new AttributeConfig(name, label, "")); } this.config.setScaleJsMainUri(this.loadAttributeValue("scaleMainURI", "Scale Main URI", config)); this.config.setHomeUrl(this.loadAttributeValue("homeUrl", "Home URL", config)); this.scalejsAppName = this.loadAttributeValue("scaleMainAppName", "Scale Main Application", config); ApplicationType app = null; for (ApplicationType at : config.getConfigManager().getCfg().getApplications().getApplication()) { if (at.getName().equalsIgnoreCase(scalejsAppName)) { app = at; } } if (app == null) { throw new Exception(scalejsAppName + " does not exist"); } for (UrlType url : app.getUrls().getUrl()) { if (url.getUri().equalsIgnoreCase(this.config.getScaleJsMainUri())) { this.scaleJsUrl = url; } } if (this.scaleJsUrl == null) { throw new Exception("Could not find url for ScaleJS Main"); } this.scaleMainURL = "https://" + this.scaleJsUrl.getHost().get(0) + this.scaleJsUrl.getUri(); HashMap<String,Attribute> decCfg = new HashMap<String,Attribute>(); for (FilterConfigType filter : this.scaleJsUrl.getFilterChain().getFilter()) { if (filter.getClazz().equalsIgnoreCase("com.tremolosecurity.scalejs.ws.ScaleMain")) { for (ParamWithValueType pt : filter.getParam()) { if (pt.getName().equalsIgnoreCase("uiHelperClassName")) { this.dec = (UiDecisions) Class.forName(pt.getValue()).newInstance(); } else if (pt.getName().equalsIgnoreCase("uihelper.params")) { String v = pt.getValue(); String name = v.substring(0,v.indexOf('=')); String value = v.substring(v.indexOf('=') + 1); Attribute param = decCfg.get(name); if (param == null) { param = new Attribute(name); decCfg.put(name, param); } param.getValues().add(value); } } } } if (this.dec != null) { this.dec.init(decCfg); } } private String loadAttributeValue(String name,String label,HttpFilterConfig config) throws Exception { Attribute attr = config.getAttribute(name); if (attr == null) { throw new Exception(label + " not found"); } String val = attr.getValues().get(0); logger.info(label + ": '" + val + "'"); return val; } private String loadOptionalAttributeValue(String name,String label,HttpFilterConfig config) throws Exception { Attribute attr = config.getAttribute(name); if (attr == null) { logger.warn(label + " not found"); return null; } String val = attr.getValues().get(0); logger.info(label + ": '" + val + "'"); return val; } }
/** * Copyright 2004-2005 Sun Microsystems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.acre.pdmengine.core; import org.acre.common.AcreStringUtil; import org.acre.dao.DAOFactory; import org.acre.dao.PDMXMLConstants; import org.acre.dao.PatternRepository; import org.acre.pdmengine.PatternEngineException; import org.acre.pdmengine.model.PatternResult; import org.acre.pdmengine.model.QueryResult; import org.acre.pdmengine.model.RelationshipResult; import org.acre.pdmengine.model.RoleResult; import org.acre.pdmengine.model.impl.RelationshipResultImpl; import org.acre.pdmengine.pqe.PQLEngineFacade; import org.acre.pdmengine.pqe.PQLVariable; import org.acre.pdmengine.pqe.PatternBaseCommand; import org.acre.pdmengine.pqe.PatternCommandFactory; import org.acre.pdmengine.util.PMTypeUtil; import org.acre.pdmengine.util.PatternEngineUtil; import org.acre.pdm.*; import javax.xml.bind.JAXBException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.ListIterator; /** * User: rajmohan@sun.com * Date: Nov 7, 2004 * Time: 3:17:14 PM */ public class RelationshipExecutor { private PQLEngineFacade pqlEngineFacade; public RelationshipExecutor(PQLEngineFacade pqlEngineFacade) { this.pqlEngineFacade = pqlEngineFacade; } /** * Preprocess & Execute all relationships specified in PDM, * and accumulate the results in PatternResult * @param pdm * @param patternResult */ public void executeRelationships(PDMType pdm, PatternResult patternResult) { // evaluate relationships List relationshipResults = new ArrayList(); RelationshipsType relationships = pdm.getRelationships(); if ( relationships == null ) return; String patternName = pdm.getName(); boolean anyJoinType = false; // Pass1 : PreProcess all relationships ListIterator iter = relationships.getRelationship().listIterator(); while ( iter.hasNext() ) { RelationshipType relationship = (RelationshipType) iter.next(); simplifyRoleReferences(patternName, relationship); // resolve any reference to role="PatternName" List newRelationships = preProcessRelationship(pdm, relationship); if ( newRelationships.size() > 0) { // Replace current relationship, with newly minted relationships iter.remove(); Iterator newRelItr = newRelationships.iterator(); while ( newRelItr.hasNext() ) { iter.add(newRelItr.next()); } if ( newRelationships.size() > 1) anyJoinType = true; } } // Pass2: Execute all relationships iter = relationships.getRelationship().listIterator(); while ( iter.hasNext() ) { RelationshipType relationship = (RelationshipType) iter.next(); RelationshipResultImpl relationshipResult; // get FromRole relationshipResult set RoleResult fromRoleResult = patternResult.getRoleReference(relationship.getFromRole()); // get ToRole relationshipResult set RoleResult toRoleResult = patternResult.getRoleReference(relationship.getToRole()); relationshipResult = (RelationshipResultImpl)executeRelationship(patternResult, relationship, fromRoleResult, toRoleResult); // temporary hack - convert all expanded relationships to ANY Join Type // ultimately, this requires a change in the Pattern Schema to support // ALL or ANY Relationship Types if ( anyJoinType ) relationshipResult.setJoinType(RelationshipResultImpl.JOIN_TYPE_ANY); relationshipResults.add(relationshipResult); } // add relationship models to the patternResult patternResult.setRelationships(relationshipResults); } // rule1: Translate RoleReference = "thisPattern.role" to "role" private void simplifyRoleReferences(String patternName, RelationshipType relationship) { PatternEngineUtil.RoleReference rolereference; rolereference = PatternEngineUtil.getRoleReference(relationship.getFromRole()); if ( patternName.equalsIgnoreCase(rolereference.pattern)) relationship.setFromRole(rolereference.role); rolereference = PatternEngineUtil.getRoleReference(relationship.getToRole()); if ( patternName.equalsIgnoreCase(rolereference.pattern)) relationship.setToRole(rolereference.role); } private List preProcessRelationship(PDMType pdm, RelationshipType relationship) { List newRelationships = new ArrayList(); PatternRepository patternRepository = DAOFactory.getPatternRepository(); String [] fromRoles = null, toRoles = null; RoleType fromRoleType = PMTypeUtil.getRoleType(pdm, relationship.getFromRole()); if ( (fromRoleType != null ) && PDMXMLConstants.ROLE_TYPE_PDM.equalsIgnoreCase(fromRoleType.getType()) ) { String referedPattern; referedPattern = PMTypeUtil.getReferedPatternName(fromRoleType); fromRoles = PMTypeUtil.getPatternRoleNames(referedPattern); // expand role to pattern.role for ( int i=0; i < fromRoles.length; i++) { fromRoles[i] = relationship.getFromRole() +"."+fromRoles[i]; } } RoleType toRoleType = PMTypeUtil.getRoleType(pdm, relationship.getToRole()); if ( (toRoleType != null ) && PDMXMLConstants.ROLE_TYPE_PDM.equalsIgnoreCase(toRoleType.getType()) ) { String referedPattern; referedPattern = PMTypeUtil.getReferedPatternName(toRoleType); toRoles = PMTypeUtil.getPatternRoleNames(referedPattern); for ( int i=0; i < toRoles.length; i++) { toRoles[i] = relationship.getToRole() +"." + toRoles[i]; } } if ( AcreStringUtil.isEmpty(fromRoles) && AcreStringUtil.isEmpty(toRoles)) return newRelationships; // no pattern references, return if ( AcreStringUtil.isEmpty(fromRoles) ) fromRoles = new String[]{relationship.getFromRole()}; if (AcreStringUtil.isEmpty(toRoles)) toRoles = new String[]{relationship.getToRole()}; for ( int i=0; i < fromRoles.length; i++ ) { for ( int j=0; j < toRoles.length; j++) { RelationshipType newRelationship; newRelationship = patternRepository.cloneRelationship(relationship); newRelationship.setFromRole(fromRoles[i]); newRelationship.setToRole(toRoles[j]); newRelationships.add(newRelationship); } } return newRelationships; } /** * Evaluate a relationship type - calls, creates, uses, etc. * @param parentPatternResult - relationship's parent PatternResult * @param relationship - relationship to be evaluated * @param fromRole - relationhip's left role operand * @param toRole - relationship's right role operand * @return RelationshipResult - result of evaluation */ public RelationshipResult executeRelationship(PatternResult parentPatternResult, RelationshipType relationship, RoleResult fromRole, RoleResult toRole) { RelationshipResultImpl relationshipResult = new RelationshipResultImpl(relationship, parentPatternResult); relationshipResult.setFromRole(fromRole); relationshipResult.setToRole(toRole); PatternCommandFactory factory = PatternCommandFactory.getInstance(); PatternBaseCommand relationalOperator = factory.getPDMRelationalOperator(relationship.getType()); String pdmName = "transient"; if ( parentPatternResult != null ) pdmName = parentPatternResult.getName(); String resultVariable = pdmName + "_" + relationship.getName(); String operandFromVariable = fromRole.getVariableName(); QueryResult fromQR = (QueryResult)fromRole.getRoleResult(); fromQR.getArtifactType(); PQLVariable fromEntity = new PQLVariable(operandFromVariable, fromQR.getArtifactType()); String operandToVariable = toRole.getVariableName(); QueryResult toQR = (QueryResult)fromRole.getRoleResult(); PQLVariable toEntity = new PQLVariable(operandToVariable, toQR.getArtifactType()); PQLVariable resultEntity; resultEntity = relationalOperator.execute(pqlEngineFacade, resultVariable, fromEntity, toEntity, true); relationshipResult.setResult(resultEntity.getPqlResultMap()); return relationshipResult; } // List<RelationshipResult> public List discoverRelationships(PatternResult sourcePattern, PatternResult targetPattern, String relationshipType[]) { if ( sourcePattern.getRoles().isEmpty() || targetPattern.getRoles().isEmpty() ) return null; Iterator sourcePDMRoles = sourcePattern.getRoles().iterator(); List relationshipResultList = new ArrayList(); while ( sourcePDMRoles.hasNext()) { RoleResult sourceRole; sourceRole = (RoleResult)sourcePDMRoles.next(); Iterator targetPDMRoles = targetPattern.getRoles().iterator(); while ( targetPDMRoles.hasNext()) { RoleResult targetRole; targetRole = (RoleResult)targetPDMRoles.next(); if( sourceRole == targetRole) continue; List result; result = discoverRelationship(sourcePattern.getName(), sourceRole, targetPattern.getName(), targetRole, relationshipType); relationshipResultList.addAll(result); } } return relationshipResultList; } // List<RelationshipResult> private List discoverRelationship( String sourcePDMName, RoleResult sourceRole, String targetPDMName, RoleResult targetRole, String relOpTypes[]) { RelationshipType relationshipType; List resultList = new ArrayList(); for ( int idx=0; idx < relOpTypes.length; idx++ ) { ObjectFactory objectFactory = new ObjectFactory(); try { relationshipType = objectFactory.createRelationshipType(); relationshipType.setName(relOpTypes[idx]); relationshipType.setType(relOpTypes[idx]); relationshipType.setFromRole(sourcePDMName + "." + sourceRole.getName()); relationshipType.setToRole(targetPDMName + "." + targetRole.getName()); } catch (JAXBException e) { throw new PatternEngineException("Error creating RelationshipType : " + e.getMessage(), e); } RelationshipResultImpl result; result = (RelationshipResultImpl)executeRelationship(null, relationshipType, sourceRole, targetRole); if ( result != null ) { if ( !result.isEmpty()) resultList.add(result); } } return resultList; } /** * Discover relationships among Roles in the given PDM * @param pattern * @param relationshipType * @return */ // List<RelationshipResult> public List discoverRelationships(PatternResult pattern, String relationshipType[]) { if ( pattern.getRoles().isEmpty() ) return null; Iterator pdmRoles = pattern.getRoles().iterator(); List relationshipResultList = new ArrayList(); while ( pdmRoles.hasNext()) { RoleResult sourceRole; sourceRole = (RoleResult)pdmRoles.next(); Iterator targetPDMRoles = pattern.getRoles().iterator(); while ( targetPDMRoles.hasNext()) { RoleResult targetRole; targetRole = (RoleResult)targetPDMRoles.next(); if ( sourceRole != targetRole ) { List result; result = discoverRelationship(pattern.getName(), sourceRole, pattern.getName(), targetRole, relationshipType); relationshipResultList.addAll(result); } } } return relationshipResultList; } }
package com.intel.tsrytkon.myfirstgame; import android.app.Activity; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.view.SurfaceView; import java.util.ArrayList; /** * Created by tsrytkon on 10/25/15. */ public class GameSurfaceView extends SurfaceView implements SurfaceHolder.Callback, GameView { public class Obj { public int m_x; public int m_y; public int m_w; public int m_h; public Obj(int x, int y, int w, int h) { m_x = x; m_y = y; m_w = w; m_h = h; } public boolean hits(Obj obj) { if ( m_x < obj.m_x && obj.m_x < m_x+m_w && m_y < obj.m_y && obj.m_y < m_y+m_h ) { System.out.println("Hit!!!"); return true; } return false; } } public class Shot extends Obj { public Shot(int x, int y, int w, int h) { super(x, y, w, h); } public void draw(Canvas canvas) { Paint shotPaint = new Paint(); shotPaint.setColor(Color.YELLOW); canvas.drawCircle(m_x, m_y, 5, shotPaint); } } public class Droid extends Obj { private int tickCount; public Droid(int x, int y, int w, int h) { super(x, y, w, h); } public void draw(Canvas canvas) { canvas.drawBitmap(droid, m_x, m_y, null); } public void move() { if (tickCount < 10) { m_x += 3; } else if (tickCount < 20) { m_y += 3; } else if (tickCount < 30) { m_x -= 3; } else if (tickCount < 40) { m_y -= 1; } else tickCount = 0; tickCount++; } } public class Spaceship extends Obj { public Spaceship(int x, int y, int w, int h) { super(x, y, w, h); } public void draw(Canvas canvas) { canvas.drawBitmap(spaceship, m_x - m_w / 2, m_y, null); } public void move(int target_x) { m_x += (target_x-m_x)/5; } } public SoundPoolPlayer sounds; private Bitmap droid; private Bitmap spaceship; private Bitmap background; private MainThread m_thread = null; private SurfaceHolder m_holder; private static final String TAG = MainThread.class.getSimpleName(); public Spaceship ship = null; public int touch_x, touch_y_start = 0; public ArrayList<Shot> shots = new ArrayList(); public ArrayList<Droid> droids = new ArrayList(); public GameSurfaceView(Context context) { super(context); init(context); } public GameSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public GameSurfaceView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context); } public void init(Context context) { System.out.println("Initializing Game!"); SurfaceHolder surfaceHolder = getHolder(); surfaceHolder.addCallback(this); m_thread = new MainThread(this); sounds = new SoundPoolPlayer(context); } public int droid_width() { return droid.getWidth(); } public int droid_height() { return droid.getWidth(); } @Override public void surfaceCreated(SurfaceHolder holder) { System.out.println("surfaceCreated"); m_holder = holder; // Set the droid initial position // Load and scale the graphics droid = BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher); spaceship = BitmapFactory.decodeResource(getResources(), R.drawable.spaceship); Bitmap tmpBmp = BitmapFactory.decodeResource(getResources(), R.drawable.space_1); float scale = (float)tmpBmp.getHeight()/(float)getHeight(); int newWidth = Math.round(tmpBmp.getWidth()/scale); int newHeight = Math.round(tmpBmp.getHeight()/scale); background = Bitmap.createScaledBitmap(tmpBmp, newWidth, newHeight, true); // starting positions int droid_y = getHeight() / 12; int droid_x = getWidth() / 12; for (int i=0; i<10; i++) { droids.add(new Droid((i+1)*droid_x, droid_y, droid.getWidth(),droid.getHeight())); } int ship_x = getWidth() / 2; int ship_y = getHeight() - getHeight() / 12 - spaceship.getWidth(); ship = new Spaceship(ship_x, ship_y, spaceship.getWidth(), spaceship.getHeight()); touch_x = getWidth() / 2 - spaceship.getWidth(); m_thread.setRunning(true); m_thread.start(); } @Override public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) { System.out.println("surfaceChanged"); } @Override public void surfaceDestroyed(SurfaceHolder surfaceHolder) { System.out.println("surfaceDestroyed"); m_thread.setRunning(false); sounds.release(); try { m_thread.join(); } catch (InterruptedException e) { System.out.println("Could not close MainThread!"); System.out.println(e); } } public void draw() { Canvas canvas = m_holder.lockCanvas(null); if (canvas != null) { canvas.drawColor(Color.BLACK); canvas.drawBitmap(background, 0, 0, null); for (int j=0; j<droids.size(); j++) { droids.get(j).draw(canvas); } for (int i=0; i<shots.size(); i++) { shots.get(i).draw(canvas); } ship.draw(canvas); m_holder.unlockCanvasAndPost(canvas); } } public void drawFinish() { Canvas canvas = m_holder.lockCanvas(null); if (canvas != null) { canvas.drawColor(Color.BLACK); canvas.drawBitmap(background, 0, 0, null); Paint textPaint = new Paint(); textPaint.setColor(Color.YELLOW); canvas.drawText("You won!", getWidth()/2, getHeight()/2, textPaint); ship.draw(canvas); m_holder.unlockCanvasAndPost(canvas); } } public boolean onTouchEvent(MotionEvent event) { if (event.getAction()== MotionEvent.ACTION_DOWN) { touch_y_start = (int)event.getY(); } else if (event.getAction()== MotionEvent.ACTION_UP) { int touch_y_end = (int)event.getY(); System.out.println("Y diff "+ (touch_y_end-touch_y_start)); if ((touch_y_end-touch_y_start) > 60) { shots.add(new Shot(ship.m_x, ship.m_y, spaceship.getWidth(), spaceship.getHeight())); sounds.playShortResource(R.raw.blaster_solo); } } touch_x = (int)event.getX(); return true; } }
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.exec.catalog; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.apache.arrow.vector.types.pojo.Field; import com.dremio.common.exceptions.UserException; import com.dremio.common.expression.CompleteType; import com.dremio.connector.metadata.AttributeValue; import com.dremio.exec.dotfile.View; import com.dremio.exec.physical.base.WriterOptions; import com.dremio.exec.planner.logical.CreateTableEntry; import com.dremio.exec.record.BatchSchema; import com.dremio.exec.store.ColumnExtendedProperty; import com.dremio.exec.store.DatasetRetrievalOptions; import com.dremio.exec.store.PartitionNotFoundException; import com.dremio.exec.store.StoragePlugin; import com.dremio.exec.store.dfs.IcebergTableProps; import com.dremio.service.catalog.Schema; import com.dremio.service.catalog.SearchQuery; import com.dremio.service.catalog.Table; import com.dremio.service.catalog.TableSchema; import com.dremio.service.namespace.NamespaceAttribute; import com.dremio.service.namespace.NamespaceException; import com.dremio.service.namespace.NamespaceKey; import com.dremio.service.namespace.NamespaceService; import com.dremio.service.namespace.SourceState; import com.dremio.service.namespace.dataset.proto.DatasetConfig; import com.dremio.service.namespace.source.proto.SourceConfig; import com.dremio.service.users.SystemUser; import com.google.common.base.Function; /** * Catalog decorator that handles source access checks. */ class SourceAccessChecker implements Catalog { private final MetadataRequestOptions options; private final Catalog delegate; private SourceAccessChecker(MetadataRequestOptions options, Catalog delegate) { this.options = options; this.delegate = delegate; } private static boolean isInternal(String name) { return name.startsWith("__") || name.startsWith("$"); } private boolean isInvisible(NamespaceKey key) { final String root = key.getRoot(); return isInternal(root) && !"__home".equalsIgnoreCase(root) && !"$scratch".equalsIgnoreCase(root); } private void throwIfInvisible(NamespaceKey key) { if (isInvisible(key)) { throw UserException.validationError() .message("Unknown source %s", key.getRoot()) .buildSilently(); } } private DremioTable getIfVisible(NamespaceKey key, Supplier<DremioTable> tableSupplier) { if (key != null && isInvisible(key)) { return null; } final DremioTable table = tableSupplier.get(); return table == null || isInvisible(table.getPath()) ? null : table; } @Override public void validateSelection() { delegate.validateSelection(); } @Override public DremioTable getTableNoResolve(NamespaceKey key) { return getIfVisible(key, () -> delegate.getTableNoResolve(key)); } @Override public DremioTable getTableNoColumnCount(NamespaceKey key) { return getIfVisible(key, () -> delegate.getTableNoColumnCount(key)); } @Override public void addOrUpdateDataset(NamespaceKey key, DatasetConfig dataset) throws NamespaceException { delegate.addOrUpdateDataset(key, dataset); } @Override public DremioTable getTable(String datasetId) { return getIfVisible(null, () -> delegate.getTable(datasetId)); } @Override public DremioTable getTable(NamespaceKey key) { return getIfVisible(key, () -> delegate.getTable(key)); } @Override public DremioTable getTableForQuery(NamespaceKey key) { return getIfVisible(key, () -> delegate.getTableForQuery(key)); } @Override public Iterable<DremioTable> getAllRequestedTables() { return delegate.getAllRequestedTables(); } @Override public NamespaceKey resolveSingle(NamespaceKey key) { return delegate.resolveSingle(key); } @Override public boolean containerExists(NamespaceKey path) { if (isInvisible(path)) { return false; } return delegate.containerExists(path); } @Override public NamespaceKey resolveToDefault(NamespaceKey key) { return delegate.resolveToDefault(key); } @Override public MetadataStatsCollector getMetadataStatsCollector() { return delegate.getMetadataStatsCollector(); } @Override public void createEmptyTable(NamespaceKey key, BatchSchema batchSchema, WriterOptions writerOptions) { delegate.createEmptyTable(key, batchSchema, writerOptions); } @Override public CreateTableEntry createNewTable( NamespaceKey key, IcebergTableProps icebergTableProps, WriterOptions writerOptions, Map<String, Object> storageOptions) { throwIfInvisible(key); return delegate.createNewTable(key, icebergTableProps, writerOptions, storageOptions); } @Override public CreateTableEntry createNewTable( NamespaceKey key, IcebergTableProps icebergTableProps, WriterOptions writerOptions, Map<String, Object> storageOptions, boolean isResultsTable) { throwIfInvisible(key); return delegate.createNewTable(key, icebergTableProps, writerOptions, storageOptions, isResultsTable); } @Override public void createView(NamespaceKey key, View view, NamespaceAttribute... attributes) throws IOException { throwIfInvisible(key); delegate.createView(key, view, attributes); } @Override public void updateView(NamespaceKey key, View view, NamespaceAttribute... attributes) throws IOException { throwIfInvisible(key); delegate.updateView(key, view, attributes); } @Override public void dropView(NamespaceKey key) throws IOException { throwIfInvisible(key); delegate.dropView(key); } @Override public void dropTable(NamespaceKey key) { throwIfInvisible(key); delegate.dropTable(key); } @Override public void forgetTable(NamespaceKey key) { throwIfInvisible(key); delegate.forgetTable(key); } @Override public void truncateTable(NamespaceKey key) { throwIfInvisible(key); delegate.truncateTable(key); } @Override public void addColumns(NamespaceKey table, List<Field> colsToAdd) { throwIfInvisible(table); delegate.addColumns(table, colsToAdd); } @Override public void dropColumn(NamespaceKey table, String columnToDrop) { throwIfInvisible(table); delegate.dropColumn(table, columnToDrop); } @Override public void changeColumn(NamespaceKey table, String columnToChange, Field fieldFromSqlColDeclaration) { throwIfInvisible(table); delegate.changeColumn(table, columnToChange, fieldFromSqlColDeclaration); } @Override public void createDataset(NamespaceKey key, Function<DatasetConfig, DatasetConfig> datasetMutator) { throwIfInvisible(key); delegate.createDataset(key, datasetMutator); } @Override public UpdateStatus refreshDataset(NamespaceKey key, DatasetRetrievalOptions retrievalOptions) { throwIfInvisible(key); return delegate.refreshDataset(key, retrievalOptions); } @Override public SourceState refreshSourceStatus(NamespaceKey key) throws Exception { throwIfInvisible(key); return delegate.refreshSourceStatus(key); } @Override public Iterable<String> getSubPartitions( NamespaceKey key, List<String> partitionColumns, List<String> partitionValues ) throws PartitionNotFoundException { throwIfInvisible(key); return delegate.getSubPartitions(key, partitionColumns, partitionValues); } @Override public boolean createOrUpdateDataset( NamespaceService userNamespaceService, NamespaceKey source, NamespaceKey datasetPath, DatasetConfig datasetConfig, NamespaceAttribute... attributes ) throws NamespaceException { throwIfInvisible(source); return delegate.createOrUpdateDataset(userNamespaceService, source, datasetPath, datasetConfig, attributes); } @Override public void updateDatasetSchema(NamespaceKey datasetKey, BatchSchema newSchema) { throwIfInvisible(datasetKey); delegate.updateDatasetSchema(datasetKey, newSchema); } @Override public void updateDatasetField(NamespaceKey datasetKey, String originField, CompleteType fieldSchema) { throwIfInvisible(datasetKey); delegate.updateDatasetField(datasetKey, originField, fieldSchema); } @Override public <T extends StoragePlugin> T getSource(String name) { return delegate.getSource(name); } @Override public void createSource(SourceConfig config, NamespaceAttribute... attributes) { delegate.createSource(config, attributes); } @Override public void updateSource(SourceConfig config, NamespaceAttribute... attributes) { delegate.updateSource(config, attributes); } @Override public void deleteSource(SourceConfig config) { delegate.deleteSource(config); } private <T> Iterable<T> checkAndGetList(NamespaceKey path, Supplier<Iterable<T>> iterableSupplier) { if (path.size() != 0 && isInvisible(path)) { return Collections.emptyList(); } return iterableSupplier.get(); } @Override public Iterable<String> listSchemas(NamespaceKey path) { return checkAndGetList(path, () -> delegate.listSchemas(path)); } @Override public Iterable<Table> listDatasets(NamespaceKey path) { return checkAndGetList(path, () -> delegate.listDatasets(path)); } @Override public Collection<org.apache.calcite.schema.Function> getFunctions(NamespaceKey path) { return (Collection<org.apache.calcite.schema.Function>) checkAndGetList(path, () -> delegate.getFunctions(path)); } @Override public NamespaceKey getDefaultSchema() { return delegate.getDefaultSchema(); } @Override public Catalog resolveCatalog(boolean checkValidity) { return secureIfNeeded(options.cloneWith(options.getSchemaConfig().getUserName(), options.getSchemaConfig().getDefaultSchema(), checkValidity), delegate.resolveCatalog(checkValidity)); } @Override public Catalog resolveCatalog(String username) { return secureIfNeeded(options.cloneWith(username, options.getSchemaConfig().getDefaultSchema(), options.checkValidity()), delegate.resolveCatalog(username)); } @Override public Catalog resolveCatalog(String username, NamespaceKey newDefaultSchema) { return secureIfNeeded(options.cloneWith(username, newDefaultSchema, options.checkValidity()), delegate.resolveCatalog(username, newDefaultSchema)); } @Override public Catalog resolveCatalog(String username, NamespaceKey newDefaultSchema, boolean checkValidity) { return secureIfNeeded(options.cloneWith(username, newDefaultSchema, checkValidity), delegate.resolveCatalog(username, newDefaultSchema, checkValidity)); } @Override public Catalog resolveCatalog(NamespaceKey newDefaultSchema) { return secureIfNeeded(options.cloneWith(options.getSchemaConfig().getUserName(), newDefaultSchema, options.checkValidity()), delegate.resolveCatalog(newDefaultSchema)); } /** * Decorates the given catalog to check source access, if enabled by the options. * * @param options options * @param delegate delegate catalog * @return decorated catalog, if needed */ public static Catalog secureIfNeeded(MetadataRequestOptions options, Catalog delegate) { return options.getSchemaConfig().exposeInternalSources() || SystemUser.isSystemUserName(options.getSchemaConfig().getUserName()) ? delegate : new SourceAccessChecker(options, delegate); } @Override public boolean alterDataset(final NamespaceKey key, final Map<String, AttributeValue> attributes) { throwIfInvisible(key); return delegate.alterDataset(key, attributes); } @Override public boolean alterColumnOption(final NamespaceKey key, String columnToChange, final String attributeName, final AttributeValue attributeValue) { throwIfInvisible(key); return delegate.alterColumnOption(key, columnToChange, attributeName, attributeValue); } @Override public Iterator<com.dremio.service.catalog.Catalog> listCatalogs(SearchQuery searchQuery) { return delegate.listCatalogs(searchQuery); } @Override public Iterator<Schema> listSchemata(SearchQuery searchQuery) { return delegate.listSchemata(searchQuery); } @Override public Iterator<Table> listTables(SearchQuery searchQuery) { return delegate.listTables(searchQuery); } @Override public Iterator<com.dremio.service.catalog.View> listViews(SearchQuery searchQuery) { return delegate.listViews(searchQuery); } @Override public Iterator<TableSchema> listTableSchemata(SearchQuery searchQuery) { return delegate.listTableSchemata(searchQuery); } @Override public Map<String, List<ColumnExtendedProperty>> getColumnExtendedProperties(DremioTable table) { return delegate.getColumnExtendedProperties(table); } @Override public Catalog visit(java.util.function.Function<Catalog, Catalog> catalogRewrite) { Catalog newDelegate = delegate.visit(catalogRewrite); return catalogRewrite.apply(new SourceAccessChecker(options, newDelegate)); } }
package com.thexfactor117.lsc.capabilities.cap; import javax.annotation.Nullable; import com.thexfactor117.lsc.LootSlashConquer; import com.thexfactor117.lsc.capabilities.api.ILSCPlayer; import com.thexfactor117.lsc.capabilities.implementation.LSCPlayerCapability; import com.thexfactor117.lsc.network.client.PacketUpdatePlayerInformation; import com.thexfactor117.lsc.network.client.PacketUpdatePlayerStats; import com.thexfactor117.lsc.util.CapabilityUtil; import com.thexfactor117.lsc.util.PlayerUtil; import com.thexfactor117.lsc.util.misc.Reference; import com.thexfactor117.lsc.util.misc.SimpleCapabilityProvider; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.nbt.NBTBase; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.EnumFacing; import net.minecraft.util.ResourceLocation; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.common.capabilities.CapabilityInject; import net.minecraftforge.common.capabilities.CapabilityManager; import net.minecraftforge.common.capabilities.ICapabilityProvider; import net.minecraftforge.event.AttachCapabilitiesEvent; import net.minecraftforge.event.entity.player.PlayerEvent; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.PlayerEvent.PlayerChangedDimensionEvent; /** * * @author TheXFactor117 * */ public class CapabilityLSCPlayer { @CapabilityInject(ILSCPlayer.class) public static final Capability<ILSCPlayer> PLAYER_CAP = null; public static final EnumFacing DEFAULT_FACING = null; public static final ResourceLocation ID = new ResourceLocation(Reference.MODID, "LSCPlayerCap"); public static void register() { CapabilityManager.INSTANCE.register(ILSCPlayer.class, new Capability.IStorage<ILSCPlayer>() { @Override public NBTBase writeNBT(Capability<ILSCPlayer> capability, ILSCPlayer instance, EnumFacing side) { NBTTagCompound nbt = new NBTTagCompound(); // basic info nbt.setInteger("PlayerClass", instance.getPlayerClass()); nbt.setInteger("PlayerLevel", instance.getPlayerLevel()); nbt.setInteger("PlayerExperience", instance.getPlayerExperience()); nbt.setInteger("PlayerSkillPoints", instance.getSkillPoints()); // modifiers nbt.setDouble("PhysicalPower", instance.getPhysicalPower()); nbt.setDouble("RangedPower", instance.getRangedPower()); nbt.setDouble("MagicalPower", instance.getMagicalPower()); nbt.setInteger("PhysicalResistance", instance.getPhysicalResistance()); nbt.setInteger("MagicalResistance", instance.getMagicalResistance()); nbt.setInteger("FireResistance", instance.getFireResistance()); nbt.setInteger("FrostResistance", instance.getFrostResistance()); nbt.setInteger("LightningResistance", instance.getLightningResistance()); nbt.setInteger("PoisonResistance", instance.getPoisonResistance()); nbt.setInteger("MaxMana", instance.getMaxMana()); nbt.setInteger("Mana", instance.getMana()); nbt.setInteger("ManaPerSecond", instance.getManaPerSecond()); nbt.setInteger("HealthPerSecond", instance.getHealthPerSecond()); nbt.setDouble("CriticalChance", instance.getCriticalChance()); nbt.setDouble("CriticalDamage", instance.getCriticalDamage()); nbt.setDouble("CooldownReduction", instance.getCooldownReduction()); nbt.setInteger("UpdateTicks", instance.getUpdateTicks()); nbt.setInteger("RegenTicks", instance.getRegenTicks()); // stats nbt.setInteger("StrengthStat", instance.getStrengthStat()); nbt.setInteger("AgilityStat", instance.getAgilityStat()); nbt.setInteger("DexterityStat", instance.getDexterityStat()); nbt.setInteger("IntelligenceStat", instance.getIntelligenceStat()); nbt.setInteger("WisdomStat", instance.getWisdomStat()); nbt.setInteger("FortitudeStat", instance.getFortitudeStat()); nbt.setInteger("StrengthBonusStat", instance.getBonusStrengthStat()); nbt.setInteger("AgilityBonusStat", instance.getBonusAgilityStat()); nbt.setInteger("DexterityBonusStat", instance.getBonusDexterityStat()); nbt.setInteger("IntelligenceBonusStat", instance.getBonusIntelligenceStat()); nbt.setInteger("WisdomBonusStat", instance.getBonusWisdomStat()); nbt.setInteger("FortitudeBonusStat", instance.getBonusFortitudeStat()); return nbt; } @Override public void readNBT(Capability<ILSCPlayer> capability, ILSCPlayer instance, EnumFacing side, NBTBase nbt) { NBTTagCompound compound = (NBTTagCompound) nbt; // basic info instance.setPlayerClass(compound.getInteger("PlayerClass")); instance.setPlayerLevel(compound.getInteger("PlayerLevel")); instance.setPlayerExperience(compound.getInteger("PlayerExperience")); instance.setSkillPoints(compound.getInteger("PlayerSkillPoints")); // modifiers instance.setPhysicalPower(compound.getDouble("PhysicalPower")); instance.setRangedPower(compound.getDouble("RangedPower")); instance.setMagicalPower(compound.getDouble("MagicalPower")); instance.setPhysicalResistance(compound.getInteger("PhysicalResistance")); instance.setMagicalResistance(compound.getInteger("MagicalResistance")); instance.setFireResistance(compound.getInteger("FireResistance")); instance.setFrostResistance(compound.getInteger("FrostResistance")); instance.setLightningResistance(compound.getInteger("LightningResistance")); instance.setPoisonResistance(compound.getInteger("PoisonResistance")); instance.setMaxMana(compound.getInteger("MaxMana")); instance.setMana(compound.getInteger("Mana")); instance.setManaPerSecond(compound.getInteger("ManaPerSecond")); instance.setHealthPerSecond(compound.getInteger("HealthPerSecond")); instance.setCriticalChance(compound.getDouble("CriticalChance")); instance.setCriticalDamage(compound.getDouble("CriticalDamage")); instance.setCooldownReduction(compound.getDouble("CooldownReduction")); instance.setUpdateTicks(compound.getInteger("UpdateTicks")); instance.setUpdateTicks(compound.getInteger("RegenTicks")); // stats instance.setStrengthStat(compound.getInteger("StrengthStat")); instance.setAgilityStat(compound.getInteger("AgilityStat")); instance.setDexterityStat(compound.getInteger("DexterityStat")); instance.setIntelligenceStat(compound.getInteger("IntelligenceStat")); instance.setWisdomStat(compound.getInteger("WisdomStat")); instance.setFortitudeStat(compound.getInteger("FortitudeStat")); instance.setBonusStrengthStat(compound.getInteger("StrengthBonusStat")); instance.setBonusAgilityStat(compound.getInteger("AgilityBonusStat")); instance.setBonusDexterityStat(compound.getInteger("DexterityBonusStat")); instance.setBonusIntelligenceStat(compound.getInteger("IntelligenceBonusStat")); instance.setBonusWisdomStat(compound.getInteger("WisdomBonusStat")); instance.setBonusFortitudeStat(compound.getInteger("FortitudeBonusStat")); } }, () -> new LSCPlayerCapability(null)); } @Nullable public static ILSCPlayer getPlayerCapability(EntityLivingBase entity) { return CapabilityUtil.getCapability(entity, PLAYER_CAP, DEFAULT_FACING); } public static ICapabilityProvider createProvider(ILSCPlayer playercap) { return new SimpleCapabilityProvider<>(PLAYER_CAP, DEFAULT_FACING, playercap); } @Mod.EventBusSubscriber public static class EventHandler { @SubscribeEvent public static void attachCapabilities(AttachCapabilitiesEvent<Entity> event) { if (event.getObject() instanceof EntityPlayer) { final LSCPlayerCapability playercap = new LSCPlayerCapability((EntityPlayer) event.getObject()); event.addCapability(ID, createProvider(playercap)); } } @SubscribeEvent public static void playerClone(PlayerEvent.Clone event) { ILSCPlayer oldCap = getPlayerCapability(event.getOriginal()); ILSCPlayer newCap = getPlayerCapability(event.getEntityLiving()); if (newCap != null && oldCap != null) { // basic info newCap.setPlayerClass(oldCap.getPlayerClass()); newCap.setPlayerLevel(oldCap.getPlayerLevel()); newCap.setPlayerExperience(oldCap.getPlayerExperience()); newCap.setSkillPoints(oldCap.getSkillPoints()); // modifiers newCap.setPhysicalPower(oldCap.getPhysicalPower()); newCap.setRangedPower(oldCap.getRangedPower()); newCap.setMagicalPower(oldCap.getMagicalPower()); newCap.setPhysicalResistance(oldCap.getPhysicalResistance()); newCap.setMagicalResistance(oldCap.getMagicalResistance()); newCap.setFireResistance(oldCap.getFireResistance()); newCap.setFrostResistance(oldCap.getFrostResistance()); newCap.setLightningResistance(oldCap.getLightningResistance()); newCap.setPoisonResistance(oldCap.getPoisonResistance()); newCap.setMaxMana(oldCap.getMaxMana()); newCap.setMana(oldCap.getMana()); newCap.setManaPerSecond(oldCap.getManaPerSecond()); newCap.setHealthPerSecond(oldCap.getHealthPerSecond()); newCap.setCriticalChance(oldCap.getCriticalChance()); newCap.setCriticalDamage(oldCap.getCriticalDamage()); newCap.setCooldownReduction(oldCap.getCooldownReduction()); newCap.setUpdateTicks(oldCap.getUpdateTicks()); newCap.setRegenTicks(oldCap.getRegenTicks()); // stats newCap.setStrengthStat(oldCap.getStrengthStat()); newCap.setAgilityStat(oldCap.getAgilityStat()); newCap.setDexterityStat(oldCap.getDexterityStat()); newCap.setIntelligenceStat(oldCap.getIntelligenceStat()); newCap.setWisdomStat(oldCap.getWisdomStat()); newCap.setFortitudeStat(oldCap.getFortitudeStat()); newCap.setBonusStrengthStat(oldCap.getBonusStrengthStat()); newCap.setBonusAgilityStat(oldCap.getBonusAgilityStat()); newCap.setBonusDexterityStat(oldCap.getBonusDexterityStat()); newCap.setBonusIntelligenceStat(oldCap.getBonusIntelligenceStat()); newCap.setBonusWisdomStat(oldCap.getBonusWisdomStat()); newCap.setBonusFortitudeStat(oldCap.getBonusFortitudeStat()); } } @SubscribeEvent public static void onPlayerChangeDimension(PlayerChangedDimensionEvent event) { EntityPlayer player = event.player; LSCPlayerCapability playercap = (LSCPlayerCapability) player.getCapability(CapabilityLSCPlayer.PLAYER_CAP, null); if (playercap != null) { LootSlashConquer.network.sendTo(new PacketUpdatePlayerInformation(playercap), (EntityPlayerMP) player); } } @SubscribeEvent public static void onPlayerRespawn(net.minecraftforge.fml.common.gameevent.PlayerEvent.PlayerRespawnEvent event) { LSCPlayerCapability playercap = (LSCPlayerCapability) event.player.getCapability(CapabilityLSCPlayer.PLAYER_CAP, null); if (playercap != null) { playercap.setMana(playercap.getMaxMana()); LootSlashConquer.network.sendTo(new PacketUpdatePlayerInformation(playercap), (EntityPlayerMP) event.player); LootSlashConquer.network.sendTo(new PacketUpdatePlayerStats(playercap), (EntityPlayerMP) event.player); PlayerUtil.updateAllStats(event.player); event.player.setHealth(event.player.getMaxHealth()); } } } }
/** * Copyright (C) 2006 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject; import static com.google.inject.Asserts.assertContains; import static java.lang.annotation.RetentionPolicy.RUNTIME; import com.google.common.collect.Iterables; import com.google.inject.matcher.Matchers; import com.google.inject.spi.ConvertedConstantBinding; import com.google.inject.spi.TypeConverter; import com.google.inject.spi.TypeConverterBinding; import junit.framework.AssertionFailedError; import junit.framework.TestCase; import java.lang.annotation.Retention; import java.util.Date; /** * @author crazybob@google.com (Bob Lee) */ public class TypeConversionTest extends TestCase { @Retention(RUNTIME) @BindingAnnotation @interface NumericValue {} @Retention(RUNTIME) @BindingAnnotation @interface BooleanValue {} @Retention(RUNTIME) @BindingAnnotation @interface EnumValue {} @Retention(RUNTIME) @BindingAnnotation @interface ClassName {} public static class Foo { @Inject @BooleanValue Boolean booleanField; @Inject @BooleanValue boolean primitiveBooleanField; @Inject @NumericValue Byte byteField; @Inject @NumericValue byte primitiveByteField; @Inject @NumericValue Short shortField; @Inject @NumericValue short primitiveShortField; @Inject @NumericValue Integer integerField; @Inject @NumericValue int primitiveIntField; @Inject @NumericValue Long longField; @Inject @NumericValue long primitiveLongField; @Inject @NumericValue Float floatField; @Inject @NumericValue float primitiveFloatField; @Inject @NumericValue Double doubleField; @Inject @NumericValue double primitiveDoubleField; @Inject @EnumValue Bar enumField; @Inject @ClassName Class<?> classField; } public enum Bar { TEE, BAZ, BOB } public void testOneConstantInjection() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bindConstant().annotatedWith(NumericValue.class).to("5"); bind(Simple.class); } }); Simple simple = injector.getInstance(Simple.class); assertEquals(5, simple.i); } static class Simple { @Inject @NumericValue int i; } public void testConstantInjection() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bindConstant().annotatedWith(NumericValue.class).to("5"); bindConstant().annotatedWith(BooleanValue.class).to("true"); bindConstant().annotatedWith(EnumValue.class).to("TEE"); bindConstant().annotatedWith(ClassName.class).to(Foo.class.getName()); } }); Foo foo = injector.getInstance(Foo.class); checkNumbers( foo.integerField, foo.primitiveIntField, foo.longField, foo.primitiveLongField, foo.byteField, foo.primitiveByteField, foo.shortField, foo.primitiveShortField, foo.floatField, foo.primitiveFloatField, foo.doubleField, foo.primitiveDoubleField ); assertEquals(Bar.TEE, foo.enumField); assertEquals(Foo.class, foo.classField); } public void testConstantInjectionWithExplicitBindingsRequired() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { binder().requireExplicitBindings(); bind(Foo.class); bindConstant().annotatedWith(NumericValue.class).to("5"); bindConstant().annotatedWith(BooleanValue.class).to("true"); bindConstant().annotatedWith(EnumValue.class).to("TEE"); bindConstant().annotatedWith(ClassName.class).to(Foo.class.getName()); } }); Foo foo = injector.getInstance(Foo.class); checkNumbers( foo.integerField, foo.primitiveIntField, foo.longField, foo.primitiveLongField, foo.byteField, foo.primitiveByteField, foo.shortField, foo.primitiveShortField, foo.floatField, foo.primitiveFloatField, foo.doubleField, foo.primitiveDoubleField ); assertEquals(Bar.TEE, foo.enumField); assertEquals(Foo.class, foo.classField); } void checkNumbers(Number... ns) { for (Number n : ns) { assertEquals(5, n.intValue()); } } public void testInvalidInteger() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bindConstant().annotatedWith(NumericValue.class).to("invalid"); } }); try { injector.getInstance(InvalidInteger.class); fail(); } catch (ConfigurationException expected) { assertContains(expected.getMessage(), "Error converting 'invalid'"); assertContains(expected.getMessage(), "bound at " + getClass().getName()); assertContains(expected.getMessage(), "to java.lang.Integer"); } } public static class InvalidInteger { @Inject @NumericValue Integer integerField; } public void testInvalidCharacter() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bindConstant().annotatedWith(NumericValue.class).to("invalid"); } }); try { injector.getInstance(InvalidCharacter.class); fail(); } catch (ConfigurationException expected) { assertContains(expected.getMessage(), "Error converting 'invalid'"); assertContains(expected.getMessage(), "bound at " + getClass().getName()); assertContains(expected.getMessage(), "to java.lang.Character"); } } public static class InvalidCharacter { @Inject @NumericValue char foo; } public void testInvalidEnum() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bindConstant().annotatedWith(NumericValue.class).to("invalid"); } }); try { injector.getInstance(InvalidEnum.class); fail(); } catch (ConfigurationException expected) { assertContains(expected.getMessage(), "Error converting 'invalid'"); assertContains(expected.getMessage(), "bound at " + getClass().getName()); assertContains(expected.getMessage(), "to " + Bar.class.getName()); } } public static class InvalidEnum { @Inject @NumericValue Bar foo; } public void testToInstanceIsTreatedLikeConstant() throws CreationException { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bind(String.class).toInstance("5"); bind(LongHolder.class); } }); assertEquals(5L, (long) injector.getInstance(LongHolder.class).foo); } static class LongHolder { @Inject Long foo; } public void testCustomTypeConversion() throws CreationException { final Date result = new Date(); Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { convertToTypes(Matchers.only(TypeLiteral.get(Date.class)) , mockTypeConverter(result)); bindConstant().annotatedWith(NumericValue.class).to("Today"); bind(DateHolder.class); } }); assertSame(result, injector.getInstance(DateHolder.class).date); Binding<Date> binding = injector.getBinding(Key.get(Date.class, NumericValue.class)); assertTrue(binding instanceof ConvertedConstantBinding<?>); TypeConverterBinding converterBinding = ((ConvertedConstantBinding<?>)binding).getTypeConverterBinding(); assertEquals("CustomConverter", converterBinding.getTypeConverter().toString()); assertTrue(injector.getTypeConverterBindings().contains(converterBinding)); } public void testInvalidCustomValue() throws CreationException { Module module = new AbstractModule() { protected void configure() { convertToTypes(Matchers.only(TypeLiteral.get(Date.class)), failingTypeConverter()); bindConstant().annotatedWith(NumericValue.class).to("invalid"); bind(DateHolder.class); } }; try { Guice.createInjector(module); fail(); } catch (CreationException expected) { Throwable cause = Iterables.getOnlyElement(expected.getErrorMessages()).getCause(); assertTrue(cause instanceof UnsupportedOperationException); assertContains(expected.getMessage(), "1) Error converting 'invalid' (bound at ", getClass().getName(), ".configure(TypeConversionTest.java:", "to java.util.Date", "using BrokenConverter which matches only(java.util.Date) ", "(bound at " + getClass().getName(), ".configure(TypeConversionTest.java:", "Reason: java.lang.UnsupportedOperationException: Cannot convert", "at " + DateHolder.class.getName() + ".date(TypeConversionTest.java:"); } } public void testNullCustomValue() { Module module = new AbstractModule() { protected void configure() { convertToTypes(Matchers.only(TypeLiteral.get(Date.class)), mockTypeConverter(null)); bindConstant().annotatedWith(NumericValue.class).to("foo"); bind(DateHolder.class); } }; try { Guice.createInjector(module); fail(); } catch (CreationException expected) { assertContains(expected.getMessage(), "1) Received null converting 'foo' (bound at ", getClass().getName(), ".configure(TypeConversionTest.java:", "to java.util.Date", "using CustomConverter which matches only(java.util.Date) ", "(bound at " + getClass().getName(), ".configure(TypeConversionTest.java:", "at " + DateHolder.class.getName() + ".date(TypeConversionTest.java:"); } } public void testCustomValueTypeMismatch() { Module module = new AbstractModule() { protected void configure() { convertToTypes(Matchers.only(TypeLiteral.get(Date.class)), mockTypeConverter(-1)); bindConstant().annotatedWith(NumericValue.class).to("foo"); bind(DateHolder.class); } }; try { Guice.createInjector(module); fail(); } catch (CreationException expected) { assertContains(expected.getMessage(), "1) Type mismatch converting 'foo' (bound at ", getClass().getName(), ".configure(TypeConversionTest.java:", "to java.util.Date", "using CustomConverter which matches only(java.util.Date) ", "(bound at " + getClass().getName(), ".configure(TypeConversionTest.java:", "Converter returned -1.", "at " + DateHolder.class.getName() + ".date(TypeConversionTest.java:"); } } public void testStringIsConvertedOnlyOnce() { final TypeConverter converter = new TypeConverter() { boolean converted = false; public Object convert(String value, TypeLiteral<?> toType) { if (converted) { throw new AssertionFailedError("converted multiple times!"); } converted = true; return new Date(); } }; Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { convertToTypes(Matchers.only(TypeLiteral.get(Date.class)), converter); bindConstant().annotatedWith(NumericValue.class).to("unused"); } }); Date first = injector.getInstance(Key.get(Date.class, NumericValue.class)); Date second = injector.getInstance(Key.get(Date.class, NumericValue.class)); assertSame(first, second); } public void testAmbiguousTypeConversion() { Module module = new AbstractModule() { protected void configure() { convertToTypes(Matchers.only(TypeLiteral.get(Date.class)), mockTypeConverter(new Date())); convertToTypes(Matchers.only(TypeLiteral.get(Date.class)), mockTypeConverter(new Date())); bindConstant().annotatedWith(NumericValue.class).to("foo"); bind(DateHolder.class); } }; try { Guice.createInjector(module); fail(); } catch (CreationException expected) { assertContains(expected.getMessage(), "1) Multiple converters can convert 'foo' (bound at ", getClass().getName(), ".configure(TypeConversionTest.java:", "to java.util.Date:", "CustomConverter which matches only(java.util.Date)", "and", "CustomConverter which matches only(java.util.Date)", "Please adjust your type converter configuration to avoid overlapping matches.", "at " + DateHolder.class.getName() + ".date(TypeConversionTest.java:"); } } TypeConverter mockTypeConverter(final Object result) { return new TypeConverter() { public Object convert(String value, TypeLiteral<?> toType) { return result; } @Override public String toString() { return "CustomConverter"; } }; } private TypeConverter failingTypeConverter() { return new TypeConverter() { public Object convert(String value, TypeLiteral<?> toType) { throw new UnsupportedOperationException("Cannot convert"); } @Override public String toString() { return "BrokenConverter"; } }; } static class DateHolder { @Inject @NumericValue Date date; } public void testCannotConvertUnannotatedBindings() { Injector injector = Guice.createInjector(new AbstractModule() { protected void configure() { bind(String.class).toInstance("55"); } }); try { injector.getInstance(Integer.class); fail("Converted an unannotated String to an Integer"); } catch (ConfigurationException expected) { Asserts.assertContains(expected.getMessage(), "Could not find a suitable constructor in java.lang.Integer."); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Properties; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.util.CacheListenerAdapter; import org.apache.geode.cache.util.TransactionListenerAdapter; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.internal.cache.CachePerfStats; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; /** * Unit test for basic DataPolicy.EMPTY feature. NOTE: these tests using a loner DistributedSystem * and local scope regions * * @since GemFire 5.0 */ public class ProxyJUnitTest { private DistributedSystem ds; private Cache c; @Before public void setUp() throws Exception { Properties p = new Properties(); p.setProperty(MCAST_PORT, "0"); p.setProperty(LOCATORS, ""); this.ds = DistributedSystem.connect(p); this.c = CacheFactory.create(this.ds); } @After public void tearDown() throws Exception { System.clearProperty(LocalRegion.EXPIRY_MS_PROPERTY); if (this.c != null) { this.c.close(); this.c = null; } if (this.ds != null) { this.ds.disconnect(); this.ds = null; } } private CachePerfStats getStats() { return ((GemFireCacheImpl) this.c).getCachePerfStats(); } /** * last event a cache listener saw */ private CacheEvent clLastEvent; /** * number of cache listener invocations */ private int clInvokeCount; /** * true if cache listener close called */ private boolean clClosed; /** * last event a cache writer saw */ private CacheEvent cwLastEvent; /** * number of cache writer invocations */ private int cwInvokeCount; /** * true if cache writer close called */ private boolean cwClosed; /** * last getEvents() a transaction listener saw */ private List tlLastEvents; /** * number of transaction listener invocations */ private int tlInvokeCount; /** * true if transaction listener close called */ private boolean tlClosed; /** * Clears the all the callback state this test has received. */ private void clearCallbackState() { this.clLastEvent = null; this.clInvokeCount = 0; this.clClosed = false; this.cwLastEvent = null; this.cwInvokeCount = 0; this.cwClosed = false; this.tlLastEvents = null; this.tlInvokeCount = 0; this.tlClosed = false; } /** * Used to check to see if CacheEvent was what was expected */ private abstract class ExpectedCacheEvent implements CacheEvent { public Region r; public Operation op; public Object cbArg; public boolean queued; public void check(CacheEvent other) { if (getRegion() != other.getRegion()) { fail("wrong region. Expected " + getRegion() + " but found " + other.getRegion()); } assertEquals(getOperation(), other.getOperation()); assertEquals(getCallbackArgument(), other.getCallbackArgument()); assertEquals(isOriginRemote(), other.isOriginRemote()); assertEquals(getDistributedMember(), other.getDistributedMember()); assertEquals(isExpiration(), other.getOperation().isExpiration()); assertEquals(isDistributed(), other.getOperation().isDistributed()); } @Override public Region getRegion() { return this.r; } @Override public Operation getOperation() { return this.op; } @Override public Object getCallbackArgument() { return this.cbArg; } @Override public boolean isCallbackArgumentAvailable() { return true; } @Override public boolean isOriginRemote() { return false; } @Override public DistributedMember getDistributedMember() { return c.getDistributedSystem().getDistributedMember(); } public boolean isExpiration() { return this.op.isExpiration(); } public boolean isDistributed() { return this.op.isDistributed(); } } /** * Used to check to see if EntryEvent was what was expected */ private class ExpectedEntryEvent extends ExpectedCacheEvent implements EntryEvent { public void check(EntryEvent other) { super.check(other); assertEquals(getKey(), other.getKey()); assertEquals(getOldValue(), other.getOldValue()); assertEquals(getNewValue(), other.getNewValue()); assertEquals(isLocalLoad(), other.getOperation().isLocalLoad()); assertEquals(isNetLoad(), other.getOperation().isNetLoad()); assertEquals(isLoad(), other.getOperation().isLoad()); assertEquals(isNetSearch(), other.getOperation().isNetSearch()); assertEquals(getTransactionId(), other.getTransactionId()); } public Object key; @Override public Object getKey() { return this.key; } @Override public Object getOldValue() { return null; } @Override public boolean isOldValueAvailable() { return true; } public Object newValue; @Override public Object getNewValue() { return this.newValue; } public boolean isLocalLoad() { return getOperation().isLocalLoad(); } public boolean isNetLoad() { return getOperation().isNetLoad(); } public boolean isLoad() { return getOperation().isLoad(); } public boolean isNetSearch() { return getOperation().isNetSearch(); } public TransactionId txId; @Override public TransactionId getTransactionId() { return this.txId; } public boolean isBridgeEvent() { return hasClientOrigin(); } @Override public boolean hasClientOrigin() { return false; } public ClientProxyMembershipID getContext() { // TODO Auto-generated method stub return null; } @Override public SerializedCacheValue getSerializedOldValue() { return null; } @Override public SerializedCacheValue getSerializedNewValue() { return null; } } /** * Used to check to see if EntryEvent was what was expected */ private class ExpectedRegionEvent extends ExpectedCacheEvent implements RegionEvent { public void check(RegionEvent other) { super.check(other); assertEquals(isReinitializing(), other.isReinitializing()); } @Override public boolean isReinitializing() { return false; } } private void checkCWClosed() { assertEquals(true, this.cwClosed); } private void checkCLClosed() { assertEquals(true, this.clClosed); } private void checkTLClosed() { assertEquals(true, this.tlClosed); } private void checkNoCW() { assertEquals(0, this.cwInvokeCount); } private void checkNoCL() { assertEquals(0, this.clInvokeCount); } private void checkNoTL() { assertEquals(0, this.tlInvokeCount); } private void checkTL(ExpectedCacheEvent expected) { assertEquals(1, this.tlInvokeCount); assertEquals(1, this.tlLastEvents.size()); { Object old_CA = expected.cbArg; // expected.cbArg = null; try { expected.check((CacheEvent) this.tlLastEvents.get(0)); } finally { expected.cbArg = old_CA; } } checkNoCW(); // checkNoCL(); clearCallbackState(); } private void checkCW(ExpectedCacheEvent expected) { assertEquals(1, this.cwInvokeCount); expected.check(this.cwLastEvent); } private void checkCL(ExpectedCacheEvent expected) { checkCL(expected, true); } private void checkCL(ExpectedCacheEvent expected, boolean clearCallbackState) { assertEquals(1, this.clInvokeCount); expected.check(this.clLastEvent); if (clearCallbackState) { clearCallbackState(); } } private void setCallbacks(AttributesFactory af) { CacheListener cl1 = new CacheListener() { @Override public void afterUpdate(EntryEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterCreate(EntryEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterInvalidate(EntryEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterDestroy(EntryEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterRegionInvalidate(RegionEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterRegionDestroy(RegionEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterRegionClear(RegionEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterRegionCreate(RegionEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void afterRegionLive(RegionEvent e) { clLastEvent = e; clInvokeCount++; } @Override public void close() { clClosed = true; } }; CacheWriter cw = new CacheWriter() { @Override public void beforeUpdate(EntryEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } @Override public void beforeCreate(EntryEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } @Override public void beforeDestroy(EntryEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } @Override public void beforeRegionDestroy(RegionEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } @Override public void beforeRegionClear(RegionEvent e) throws CacheWriterException { cwLastEvent = e; cwInvokeCount++; } @Override public void close() { cwClosed = true; } }; af.addCacheListener(cl1); af.setCacheWriter(cw); { TransactionListener tl = new TransactionListenerAdapter() { @Override public void afterCommit(TransactionEvent e) { tlLastEvents = e.getEvents(); tlInvokeCount++; } @Override public void close() { tlClosed = true; }; }; CacheTransactionManager ctm = this.c.getCacheTransactionManager(); ctm.addListener(tl); } } /** * Confirms region (non-map) methods */ @Test public void testRegionMethods() throws Exception { Object cbArg = new Object(); AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); setCallbacks(af); clearCallbackState(); ExpectedRegionEvent expre = new ExpectedRegionEvent(); assertEquals(0, getStats().getRegions()); Region r = this.c.createRegion("r", af.create()); assertEquals(1, getStats().getRegions()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); assertEquals("r", r.getName()); assertEquals("/r", r.getFullPath()); assertEquals(null, r.getParentRegion()); assertEquals(DataPolicy.EMPTY, r.getAttributes().getDataPolicy()); r.getAttributesMutator(); try { r.getStatistics(); fail("expected StatisticsDisabledException"); } catch (StatisticsDisabledException expected) { // because they were not enabled in the region attributes } r.invalidateRegion(); expre.op = Operation.REGION_INVALIDATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.invalidateRegion(cbArg); expre.cbArg = cbArg; checkNoCW(); checkCL(expre); r.localInvalidateRegion(); expre.op = Operation.REGION_LOCAL_INVALIDATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.localInvalidateRegion(cbArg); expre.cbArg = cbArg; checkNoCW(); checkCL(expre); r.destroyRegion(); assertEquals(true, r.isDestroyed()); assertEquals(0, getStats().getRegions()); expre.op = Operation.REGION_DESTROY; expre.cbArg = null; checkCW(expre); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.destroyRegion(cbArg); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_DESTROY; expre.cbArg = cbArg; checkCW(expre); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.localDestroyRegion(); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_LOCAL_DESTROY; expre.cbArg = null; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.localDestroyRegion(cbArg); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_LOCAL_DESTROY; expre.cbArg = cbArg; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); r = this.c.createRegion("r", af.create()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); r.close(); assertEquals(0, getStats().getRegions()); assertEquals(true, r.isDestroyed()); expre.op = Operation.REGION_CLOSE; expre.cbArg = null; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); r = this.c.createRegion("r", af.create()); assertEquals(1, getStats().getRegions()); expre.r = r; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); try { r.saveSnapshot(System.out); fail("expected UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } try { r.loadSnapshot(System.in); fail("expected UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { } { Region sr = r.createSubregion("sr", af.create()); assertEquals(2, getStats().getRegions()); expre.r = sr; expre.op = Operation.REGION_CREATE; expre.cbArg = null; checkNoCW(); checkCL(expre); assertEquals("sr", sr.getName()); assertEquals("/r/sr", sr.getFullPath()); assertEquals(r, sr.getParentRegion()); assertEquals(sr, r.getSubregion("sr")); assertEquals(Collections.singleton(sr), r.subregions(false)); sr.close(); assertEquals(1, getStats().getRegions()); expre.op = Operation.REGION_CLOSE; expre.cbArg = null; checkNoCW(); checkCWClosed(); checkCLClosed(); checkCL(expre); assertEquals(true, sr.isDestroyed()); assertEquals(null, r.getSubregion("sr")); assertEquals(Collections.EMPTY_SET, r.subregions(false)); } ExpectedEntryEvent expee = new ExpectedEntryEvent(); expee.r = r; expee.key = "key"; long creates = getStats().getCreates(); long destroys = getStats().getDestroys(); long invalidates = getStats().getInvalidates(); long gets = getStats().getGets(); long misses = getStats().getMisses(); r.put("key", "value", cbArg); expee.op = Operation.CREATE; creates++; assertEquals(creates, getStats().getCreates()); expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkCL(expee); // note on a non-proxy region create after put fails with EntryExistsException r.create("key", "value", cbArg); creates++; assertEquals(creates, getStats().getCreates()); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkCL(expee); assertEquals(null, r.getEntry("key")); assertEquals(null, r.get("key", cbArg)); gets++; assertEquals(gets, getStats().getGets()); misses++; assertEquals(misses, getStats().getMisses()); checkNoCW(); checkNoCL(); r.invalidate("key"); invalidates++; assertEquals(invalidates, getStats().getInvalidates()); expee.op = Operation.INVALIDATE; expee.cbArg = null; expee.newValue = null; checkNoCW(); checkCL(expee); r.invalidate("key", cbArg); invalidates++; assertEquals(invalidates, getStats().getInvalidates()); expee.op = Operation.INVALIDATE; expee.cbArg = cbArg; expee.newValue = null; checkNoCW(); checkCL(expee); try { r.localInvalidate("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } try { r.localInvalidate("key", cbArg); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } assertEquals(invalidates, getStats().getInvalidates()); checkNoCW(); checkNoCL(); r.destroy("key"); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; expee.cbArg = null; expee.newValue = null; checkCW(expee); checkCL(expee); r.destroy("key", cbArg); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; expee.cbArg = cbArg; expee.newValue = null; checkCW(expee); checkCL(expee); try { r.localDestroy("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } try { r.localDestroy("key", cbArg); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } assertEquals(destroys, getStats().getDestroys()); checkNoCW(); checkNoCL(); assertEquals(Collections.EMPTY_SET, r.keySet()); assertEquals(Collections.EMPTY_SET, r.entrySet(true)); assertEquals(this.c, r.getCache()); r.setUserAttribute(cbArg); assertEquals(cbArg, r.getUserAttribute()); checkNoCW(); checkNoCL(); r.put("key", "value", cbArg); creates++; assertEquals(creates, getStats().getCreates()); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkCL(expee); assertEquals(false, r.containsValueForKey("key")); assertEquals(false, r.existsValue("this = 'value'")); { SelectResults sr = r.query("this = 'value'"); assertEquals(Collections.EMPTY_SET, sr.asSet()); } assertEquals(null, r.selectValue("this = 'value'")); try { r.getRegionDistributedLock(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not global } try { r.getDistributedLock("key"); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not global } try { r.becomeLockGrantor(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not global } try { r.writeToDisk(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { // because we are not configured for disk } checkNoCW(); checkNoCL(); // check to see if a local loader works { CacheLoader cl = new CacheLoader() { @Override public Object load(LoaderHelper helper) throws CacheLoaderException { return "loadedValue"; } @Override public void close() {} }; r.getAttributesMutator().setCacheLoader(cl); r.get("key", cbArg); gets++; assertEquals(gets, getStats().getGets()); misses++; assertEquals(misses, getStats().getMisses()); expee.op = Operation.LOCAL_LOAD_CREATE; expee.newValue = "loadedValue"; checkCW(expee); checkCL(expee); r.getAttributesMutator().setCacheLoader(null); } } /** * Confirms map methods */ @Test public void testMapMethods() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); setCallbacks(af); clearCallbackState(); ExpectedRegionEvent expre = new ExpectedRegionEvent(); Region r = this.c.createRegion("r", af.create()); expre.r = r; expre.cbArg = null; expre.op = Operation.REGION_CREATE; checkNoCW(); checkCL(expre); long creates = getStats().getCreates(); long destroys = getStats().getDestroys(); long gets = getStats().getGets(); long misses = getStats().getMisses(); ExpectedEntryEvent expee = new ExpectedEntryEvent(); expee.r = r; expee.key = "key"; expee.cbArg = null; assertEquals(null, r.put("key", "value")); creates++; assertEquals(creates, getStats().getCreates()); expee.op = Operation.CREATE; expee.newValue = "value"; checkCW(expee); checkCL(expee); { HashMap m = new HashMap(); m.put("k1", "v1"); m.put("k2", "v2"); r.putAll(m); assertEquals(0, r.size()); // @todo darrel: check events assertEquals(2, this.cwInvokeCount); assertEquals(2, this.clInvokeCount); clearCallbackState(); creates += 2; assertEquals(creates, getStats().getCreates()); } assertEquals(false, r.containsKey("key")); assertEquals(false, r.containsValue("value")); assertEquals(Collections.EMPTY_SET, r.entrySet()); assertEquals(true, r.isEmpty()); assertEquals(Collections.EMPTY_SET, r.keySet()); assertEquals(0, r.size()); assertEquals(Collections.EMPTY_LIST, new ArrayList(r.values())); checkNoCW(); checkNoCL(); assertEquals(null, r.get("key")); gets++; assertEquals(gets, getStats().getGets()); misses++; assertEquals(misses, getStats().getMisses()); checkNoCW(); checkNoCL(); assertEquals(null, r.remove("key")); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; expee.key = "key"; expee.newValue = null; checkCW(expee); checkCL(expee); r.localClear(); expre.op = Operation.REGION_LOCAL_CLEAR; checkNoCW(); checkCL(expre); r.clear(); expre.op = Operation.REGION_CLEAR; checkCW(expre); checkCL(expre); } /** * Check region ops on a proxy region done from a tx. */ @Test public void testAllMethodsWithTX() throws Exception { Object cbArg = new Object(); AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); setCallbacks(af); clearCallbackState(); CacheTransactionManager ctm = this.c.getCacheTransactionManager(); ExpectedRegionEvent expre = new ExpectedRegionEvent(); Region r = this.c.createRegion("r", af.create()); expre.r = r; expre.cbArg = null; expre.op = Operation.REGION_CREATE; checkNoCW(); checkNoTL(); checkCL(expre); long creates = getStats().getCreates(); long destroys = getStats().getDestroys(); long invalidates = getStats().getInvalidates(); ExpectedEntryEvent expee = new ExpectedEntryEvent(); expee.r = r; expee.key = "key"; ctm.begin(); try { r.localInvalidate("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } try { r.localDestroy("key"); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } ctm.rollback(); ctm.begin(); expee.txId = ctm.getTransactionId(); r.put("key", "value", cbArg); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/* ClearCallbacks */); checkTL(expee); creates++; assertEquals(creates, getStats().getCreates()); ctm.begin(); expee.txId = ctm.getTransactionId(); r.create("key", "value", cbArg); expee.op = Operation.CREATE; expee.cbArg = cbArg; expee.newValue = "value"; checkCW(expee); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/* ClearCallbacks */); checkTL(expee); creates++; assertEquals(creates, getStats().getCreates()); ctm.begin(); expee.txId = ctm.getTransactionId(); r.invalidate("key", cbArg); expee.op = Operation.INVALIDATE; expee.cbArg = cbArg; expee.newValue = null; checkNoCW(); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/* ClearCallbacks */); invalidates++; assertEquals(invalidates, getStats().getInvalidates()); checkTL(expee); ctm.begin(); expee.txId = ctm.getTransactionId(); r.destroy("key", cbArg); expee.op = Operation.DESTROY; expee.cbArg = cbArg; expee.newValue = null; checkCW(expee); checkNoTL(); checkNoCL(); clearCallbackState(); ctm.commit(); checkCL(expee, false/* ClearCallbacks */); destroys++; assertEquals(destroys, getStats().getDestroys()); checkTL(expee); ctm.begin(); expee.txId = ctm.getTransactionId(); r.create("key", "value", cbArg); r.destroy("key", cbArg); clearCallbackState(); ctm.commit(); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.DESTROY; checkTL(expee); // the following confirms that bug 37903 is fixed ctm.begin(); expee.txId = ctm.getTransactionId(); r.invalidate("key"); r.localInvalidate("key"); r.localDestroy("key", cbArg); // note that the following would fail on a non-proxy with EntryNotFound // so it should also fail on a proxy try { // note if bug 37903 exists then the next line will throw an AssertionError r.destroy("key", cbArg); fail("expected EntryNotFoundException"); } catch (EntryNotFoundException expected) { } clearCallbackState(); ctm.commit(); destroys++; assertEquals(destroys, getStats().getDestroys()); expee.op = Operation.LOCAL_DESTROY; checkTL(expee); } /** * Make sure a proxy region can be lru and that it makes no difference since proxies are always * empty */ @Test public void testLRU() throws Exception { AttributesFactory af = new AttributesFactory(); af.setEvictionAttributes(EvictionAttributes.createLRUEntryAttributes(1)); CacheListener cl1 = new CacheListenerAdapter() { @Override public void afterDestroy(EntryEvent e) { clInvokeCount++; } }; af.addCacheListener(cl1); // now try it with a proxy region which should never to do an eviction. { af.setDataPolicy(DataPolicy.EMPTY); try { af.create(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { } } } /** * Make sure a disk region and proxy play nice. */ @Test public void testDiskProxy() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); af.setEvictionAttributes( EvictionAttributes.createLRUEntryAttributes(1, EvictionAction.OVERFLOW_TO_DISK)); try { af.create(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { } } /** * Make sure a CachStatistics work on proxy */ @Test public void testCacheStatisticsOnProxy() throws Exception { AttributesFactory af = new AttributesFactory(); af.setDataPolicy(DataPolicy.EMPTY); af.setStatisticsEnabled(true); Region r = this.c.createRegion("rEMPTY", af.create()); CacheStatistics stats = r.getStatistics(); long lastModifiedTime = stats.getLastModifiedTime(); long lastAccessedTime = stats.getLastAccessedTime(); waitForSystemTimeChange(); r.put("k", "v"); assertTrue(lastModifiedTime != stats.getLastModifiedTime()); assertTrue(lastAccessedTime != stats.getLastAccessedTime()); lastModifiedTime = stats.getLastModifiedTime(); lastAccessedTime = stats.getLastAccessedTime(); waitForSystemTimeChange(); r.create("k", "v"); assertTrue(lastModifiedTime != stats.getLastModifiedTime()); assertTrue(lastAccessedTime != stats.getLastAccessedTime()); lastModifiedTime = stats.getLastModifiedTime(); lastAccessedTime = stats.getLastAccessedTime(); long missCount = stats.getMissCount(); long hitCount = stats.getHitCount(); waitForSystemTimeChange(); r.get("k"); assertEquals(lastModifiedTime, stats.getLastModifiedTime()); assertTrue(lastAccessedTime != stats.getLastAccessedTime()); assertEquals(hitCount, stats.getHitCount()); assertEquals(missCount + 1, stats.getMissCount()); } /** * Waits (hot) until the system time changes. */ private void waitForSystemTimeChange() { long start = System.currentTimeMillis(); while (System.currentTimeMillis() == start); } }
package jp.skypencil.kemuri; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static org.objectweb.asm.Opcodes.ACC_PUBLIC; import static org.objectweb.asm.Opcodes.ACC_STATIC; import static org.objectweb.asm.Opcodes.ALOAD; import static org.objectweb.asm.Opcodes.ASTORE; import static org.objectweb.asm.Opcodes.CHECKCAST; import static org.objectweb.asm.Opcodes.DUP; import static org.objectweb.asm.Opcodes.DUP2; import static org.objectweb.asm.Opcodes.GETSTATIC; import static org.objectweb.asm.Opcodes.GOTO; import static org.objectweb.asm.Opcodes.IADD; import static org.objectweb.asm.Opcodes.IFNE; import static org.objectweb.asm.Opcodes.INEG; import static org.objectweb.asm.Opcodes.INVOKEINTERFACE; import static org.objectweb.asm.Opcodes.INVOKESPECIAL; import static org.objectweb.asm.Opcodes.INVOKESTATIC; import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; import static org.objectweb.asm.Opcodes.IXOR; import static org.objectweb.asm.Opcodes.NEW; import static org.objectweb.asm.Opcodes.POP; import static org.objectweb.asm.Opcodes.RETURN; import static org.objectweb.asm.Opcodes.SWAP; import static org.objectweb.asm.Opcodes.V1_5; import java.io.File; import java.io.IOException; import java.io.Reader; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; import java.util.List; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.io.Files; public class Compiler { private final Logger logger = LoggerFactory.getLogger(Compiler.class); private static final Type[] DEQUE_TYPE = new Type[] { Type.getType(Deque.class) }; private static final List<Character> HELLO_WORLD = Lists.reverse(Arrays.asList(new Character[] { 72, 101, 108, 108, 111, 44, 32, 119, 111, 114, 108, 100, 33 })); private static final Type[] EMPTY_TYPE = new Type[0]; private void compile(MethodVisitor mv, Reader reader, String innerFullClassName) throws IOException { int command; while ((command = reader.read()) != -1) { exec(mv, command, innerFullClassName); } } public byte[] compile(Reader reader, String classFullName) throws IOException { checkNotNull(reader); String className = checkNotNull(classFullName); String innerFullClassName = classFullName.replaceAll("\\.", "/"); if (className.contains(".")) { className = className.substring(className.lastIndexOf('.') + 1); } ClassWriter cw = new ClassWriter(0); cw.visit(V1_5, ACC_PUBLIC, innerFullClassName, null, "java/lang/Object", null); createConstructor(cw, innerFullClassName); createDup(cw, innerFullClassName); createPrint(cw, innerFullClassName); createHello(cw, innerFullClassName); createNot(cw, innerFullClassName); createRot(cw, innerFullClassName); createXor(cw, innerFullClassName); createMain(cw, reader, innerFullClassName); cw.visitEnd(); return cw.toByteArray(); } public void compileTo(Reader reader, String className, File directory) throws IOException { checkNotNull(reader); checkNotNull(className); checkNotNull(directory); checkArgument(directory.isDirectory()); File classFile = new File(directory, className.concat(".class")); byte[] binary = compile(reader, className); Files.write(binary, classFile); } private void createConstructor(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); mv.visitMaxs(1, 1); mv.visitVarInsn(ALOAD, 0); // push `this` to the operand stack mv.visitMethodInsn(INVOKESPECIAL, Type.getInternalName(Object.class), "<init>", "()V"); // call the constructor of super class mv.visitInsn(RETURN); mv.visitEnd(); } private void createDup(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod( ACC_STATIC, "dup", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE), null, null); mv.visitVarInsn(ALOAD, 0); mv.visitInsn(DUP); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "pop", Type.getMethodDescriptor(Type.getType(Object.class), EMPTY_TYPE)); mv.visitInsn(DUP2); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "push", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[]{ Type.getType(Object.class) })); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "push", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[]{ Type.getType(Object.class) })); mv.visitInsn(RETURN); mv.visitMaxs(4, 1); mv.visitEnd(); } private void createHello(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod( ACC_STATIC, "hello", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE), null, null); mv.visitVarInsn(ALOAD, 0); for (char c : HELLO_WORLD) { mv.visitInsn(DUP); mv.visitLdcInsn(c); mv.visitMethodInsn(INVOKESTATIC, Type.getInternalName(Character.class), "valueOf", Type.getMethodDescriptor(Type.getType(Character.class), new Type[]{ Type.getType(char.class) })); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "push", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[] { Type.getType(Object.class) })); } mv.visitInsn(RETURN); mv.visitMaxs(3, 1); mv.visitEnd(); } private void createMain(ClassWriter cw, Reader reader, String innerFullClassName) throws IOException { MethodVisitor mv = cw.visitMethod( ACC_PUBLIC | ACC_STATIC, "main", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[]{Type.getObjectType("[Ljava/lang/String;")}), null, null); mv.visitTypeInsn(NEW, Type.getInternalName(ArrayDeque.class)); mv.visitInsn(DUP); mv.visitMethodInsn(INVOKESPECIAL, Type.getInternalName(ArrayDeque.class), "<init>", "()V"); compile(mv, reader, innerFullClassName); mv.visitInsn(POP); mv.visitInsn(RETURN); mv.visitMaxs(2, 1); mv.visitEnd(); } private void createNot(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod( ACC_STATIC, "not", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE), null, null); mv.visitVarInsn(ALOAD, 0); mv.visitInsn(DUP); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "pop", Type.getMethodDescriptor(Type.getType(Object.class), EMPTY_TYPE)); mv.visitTypeInsn(CHECKCAST, Type.getInternalName(Character.class)); mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName(Character.class), "charValue", Type.getMethodDescriptor(Type.getType(char.class), EMPTY_TYPE)); mv.visitInsn(INEG); mv.visitLdcInsn(255); mv.visitInsn(IADD); mv.visitMethodInsn(INVOKESTATIC, Type.getInternalName(Character.class), "valueOf", Type.getMethodDescriptor(Type.getType(Character.class), new Type[]{ Type.getType(char.class) })); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "push", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[] { Type.getType(Object.class) })); mv.visitInsn(RETURN); mv.visitMaxs(3, 1); mv.visitEnd(); } private void createPrint(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod( ACC_STATIC, "print", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE), null, null); Label loopStart = new Label(); Label loopEnd = new Label(); mv.visitLabel(loopStart); mv.visitVarInsn(ALOAD, 0); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "isEmpty", Type.getMethodDescriptor(Type.getType(boolean.class), EMPTY_TYPE)); mv.visitJumpInsn(IFNE, loopEnd); mv.visitFieldInsn(GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;"); mv.visitVarInsn(ALOAD, 0); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "pop", Type.getMethodDescriptor(Type.getType(Object.class), EMPTY_TYPE)); mv.visitTypeInsn(CHECKCAST, Type.getInternalName(Character.class)); mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName(Character.class), "charValue", Type.getMethodDescriptor(Type.getType(char.class), EMPTY_TYPE)); mv.visitMethodInsn(INVOKEVIRTUAL, "java/io/PrintStream", "print", "(C)V"); mv.visitJumpInsn(GOTO, loopStart); mv.visitLabel(loopEnd); mv.visitInsn(RETURN); mv.visitMaxs(2, 1); mv.visitEnd(); } private static final int[] ROT_MAPPING = {1, 3, 2}; private void createRot(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod( ACC_STATIC, "rot", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE), null, null); mv.visitVarInsn(ALOAD, 0); for (int i = 0; i < ROT_MAPPING.length; ++i) { mv.visitInsn(DUP); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "pop", Type.getMethodDescriptor(Type.getType(Object.class), EMPTY_TYPE)); mv.visitVarInsn(ASTORE, i + 1); } for (int i = 0; i < ROT_MAPPING.length; ++i) { mv.visitInsn(DUP); mv.visitVarInsn(ALOAD, ROT_MAPPING[i]); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "push", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[]{ Type.getType(Object.class) })); } mv.visitInsn(POP); mv.visitInsn(RETURN); mv.visitMaxs(3, 4); mv.visitEnd(); } private void createXor(ClassWriter cw, String innerFullClassName) { MethodVisitor mv = cw.visitMethod( ACC_STATIC, "xor", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE), null, null); mv.visitVarInsn(ALOAD, 0); mv.visitInsn(DUP); mv.visitInsn(DUP); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "pop", Type.getMethodDescriptor(Type.getType(Object.class), EMPTY_TYPE)); mv.visitTypeInsn(CHECKCAST, Type.getInternalName(Character.class)); mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName(Character.class), "charValue", Type.getMethodDescriptor(Type.getType(char.class), EMPTY_TYPE)); mv.visitInsn(SWAP); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "pop", Type.getMethodDescriptor(Type.getType(Object.class), EMPTY_TYPE)); mv.visitTypeInsn(CHECKCAST, Type.getInternalName(Character.class)); mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName(Character.class), "charValue", Type.getMethodDescriptor(Type.getType(char.class), EMPTY_TYPE)); mv.visitInsn(IXOR); mv.visitMethodInsn(INVOKESTATIC, Type.getInternalName(Character.class), "valueOf", Type.getMethodDescriptor(Type.getType(Character.class), new Type[]{ Type.getType(char.class) })); mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Deque.class), "push", Type.getMethodDescriptor(Type.VOID_TYPE, new Type[] { Type.getType(Object.class) })); mv.visitInsn(RETURN); mv.visitMaxs(3, 1); mv.visitEnd(); } private void exec(MethodVisitor mv, int command, String innerFullClassName) { mv.visitInsn(DUP); switch (command) { case '^': mv.visitMethodInsn(INVOKESTATIC, innerFullClassName, "xor", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE)); break; case '~': mv.visitMethodInsn(INVOKESTATIC, innerFullClassName, "not", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE)); break; case '"': mv.visitMethodInsn(INVOKESTATIC, innerFullClassName, "dup", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE)); break; case '\'': mv.visitMethodInsn(INVOKESTATIC, innerFullClassName, "rot", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE)); break; case '`': mv.visitMethodInsn(INVOKESTATIC, innerFullClassName, "hello", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE)); break; case '|': mv.visitMethodInsn(INVOKESTATIC, innerFullClassName, "print", Type.getMethodDescriptor(Type.VOID_TYPE, DEQUE_TYPE)); break; default: logger.warn("unknown command: {}", Character.toString((char) command)); mv.visitInsn(POP); } } }
/* * Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.event.builder.core.internal.util; import org.apache.axis2.engine.AxisConfiguration; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.databridge.commons.Attribute; import org.wso2.carbon.databridge.commons.AttributeType; import org.wso2.carbon.databridge.commons.StreamDefinition; import org.wso2.carbon.databridge.commons.utils.DataBridgeCommonsUtils; import org.wso2.carbon.event.builder.core.config.EventBuilderConfiguration; import org.wso2.carbon.event.builder.core.exception.EventBuilderConfigurationException; import org.wso2.carbon.event.builder.core.internal.config.InputMappingAttribute; import org.wso2.carbon.event.builder.core.internal.config.InputStreamConfiguration; import org.wso2.carbon.event.builder.core.internal.type.AbstractInputMapping; import org.wso2.carbon.event.builder.core.internal.type.wso2event.Wso2EventInputMapping; import org.wso2.carbon.event.input.adaptor.core.message.config.InputEventAdaptorMessageConfiguration; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.File; import java.util.ArrayList; import java.util.List; public class EventBuilderUtil { public static Object getConvertedAttributeObject(String value, AttributeType type) { switch (type) { case INT: return Integer.valueOf(value); case LONG: return Long.valueOf(value); case DOUBLE: return Double.valueOf(value); case FLOAT: return Float.valueOf(value); case BOOL: return Boolean.valueOf(value); case STRING: default: return value; } } public static String getExportedStreamIdFrom( EventBuilderConfiguration eventBuilderConfiguration) { String streamId = null; if (eventBuilderConfiguration != null && eventBuilderConfiguration.getToStreamName() != null && !eventBuilderConfiguration.getToStreamName().isEmpty()) { streamId = eventBuilderConfiguration.getToStreamName() + EventBuilderConstants.STREAM_NAME_VER_DELIMITER + ((eventBuilderConfiguration.getToStreamVersion() != null && !eventBuilderConfiguration.getToStreamVersion().isEmpty()) ? eventBuilderConfiguration.getToStreamVersion() : EventBuilderConstants.DEFAULT_STREAM_VERSION); } return streamId; } public static boolean isMetaAttribute(String attributeName) { return attributeName != null && attributeName.startsWith(EventBuilderConstants.META_DATA_PREFIX); } public static boolean isCorrelationAttribute(String attributeName) { return attributeName != null && attributeName.startsWith(EventBuilderConstants.CORRELATION_DATA_PREFIX); } public static Attribute[] getOrderedAttributeArray(AbstractInputMapping inputMapping) { List<InputMappingAttribute> orderedInputMappingAttributes = EventBuilderUtil.sortInputMappingAttributes(inputMapping.getInputMappingAttributes()); int currentCount = 0; int totalAttributeCount = orderedInputMappingAttributes.size(); Attribute[] attributeArray = new Attribute[totalAttributeCount]; for (InputMappingAttribute inputMappingAttribute : orderedInputMappingAttributes) { attributeArray[currentCount++] = new Attribute(inputMappingAttribute.getToElementKey(), inputMappingAttribute.getToElementType()); } return attributeArray; } public static List<InputMappingAttribute> sortInputMappingAttributes( List<InputMappingAttribute> inputMappingAttributes) { List<InputMappingAttribute> metaAttributes = new ArrayList<InputMappingAttribute>(); List<InputMappingAttribute> correlationAttributes = new ArrayList<InputMappingAttribute>(); List<InputMappingAttribute> payloadAttributes = new ArrayList<InputMappingAttribute>(); for (InputMappingAttribute inputMappingAttribute : inputMappingAttributes) { if (inputMappingAttribute.getToElementKey().startsWith(EventBuilderConstants.META_DATA_PREFIX)) { metaAttributes.add(inputMappingAttribute); } else if (inputMappingAttribute.getToElementKey().startsWith(EventBuilderConstants.CORRELATION_DATA_PREFIX)) { correlationAttributes.add(inputMappingAttribute); } else { payloadAttributes.add(inputMappingAttribute); } } List<InputMappingAttribute> orderedInputMappingAttributes = new ArrayList<InputMappingAttribute>(); orderedInputMappingAttributes.addAll(metaAttributes); orderedInputMappingAttributes.addAll(correlationAttributes); orderedInputMappingAttributes.addAll(payloadAttributes); return orderedInputMappingAttributes; } public static String generateFilePath(EventBuilderConfiguration eventBuilderConfiguration) throws EventBuilderConfigurationException { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); String repositoryPath = MultitenantUtils.getAxis2RepositoryPath(tenantId); String eventBuilderName = eventBuilderConfiguration.getEventBuilderName(); return generateFilePath(eventBuilderName, repositoryPath); } public static String generateFilePath(String eventBuilderName) throws EventBuilderConfigurationException { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); String repositoryPath = MultitenantUtils.getAxis2RepositoryPath(tenantId); return generateFilePath(eventBuilderName, repositoryPath); } public static String generateFilePath(EventBuilderConfiguration eventBuilderConfiguration, AxisConfiguration axisConfiguration) throws EventBuilderConfigurationException { String eventBuilderName = eventBuilderConfiguration.getEventBuilderName(); return generateFilePath(eventBuilderName, axisConfiguration.getRepository().getPath()); } private static String generateFilePath(String eventBuilderName, String repositoryPath) throws EventBuilderConfigurationException { File repoDir = new File(repositoryPath); if (!repoDir.exists()) { if (repoDir.mkdir()) { throw new EventBuilderConfigurationException("Cannot create directory to add tenant specific event builder :" + eventBuilderName); } } File subDir = new File(repoDir.getAbsolutePath() + File.separator + EventBuilderConstants.EB_CONFIG_DIRECTORY); if (!subDir.exists()) { if (!subDir.mkdir()) { throw new EventBuilderConfigurationException("Cannot create directory " + EventBuilderConstants.EB_CONFIG_DIRECTORY + " to add tenant specific event builder :" + eventBuilderName); } } return subDir.getAbsolutePath() + File.separator + eventBuilderName + EventBuilderConstants.EB_CONFIG_FILE_EXTENSION_WITH_DOT; } /** * Returns an array of {@link Attribute} elements derived from the stream definition. The returned attributes * will be prefixed with its data type (e.g. meta_, correlation_) * * @param streamDefinition the stream definition to be used to extract attributes * @return the array of attributes in the passed in stream with attribute names that contain prefixes */ public static Attribute[] streamDefinitionToAttributeArray(StreamDefinition streamDefinition) { int size = 0; if (streamDefinition.getMetaData() != null) { size += streamDefinition.getMetaData().size(); } if (streamDefinition.getCorrelationData() != null) { size += streamDefinition.getCorrelationData().size(); } if (streamDefinition.getPayloadData() != null) { size += streamDefinition.getPayloadData().size(); } Attribute[] attributes = new Attribute[size]; int index = 0; if (streamDefinition.getMetaData() != null) { for (Attribute attribute : streamDefinition.getMetaData()) { attributes[index] = new Attribute(EventBuilderConstants.META_DATA_PREFIX + attribute.getName(), attribute.getType()); index++; } } if (streamDefinition.getCorrelationData() != null) { for (Attribute attribute : streamDefinition.getCorrelationData()) { attributes[index] = new Attribute(EventBuilderConstants.CORRELATION_DATA_PREFIX + attribute.getName(), attribute.getType()); index++; } } if (streamDefinition.getPayloadData() != null) { for (Attribute attribute : streamDefinition.getPayloadData()) { attributes[index] = new Attribute(attribute.getName(), attribute.getType()); index++; } } return attributes; } public static EventBuilderConfiguration createDefaultEventBuilder(String streamId, String transportAdaptorName) { String toStreamName = DataBridgeCommonsUtils.getStreamNameFromStreamId(streamId); String toStreamVersion = DataBridgeCommonsUtils.getStreamVersionFromStreamId(streamId); EventBuilderConfiguration eventBuilderConfiguration = new EventBuilderConfiguration(); eventBuilderConfiguration.setEventBuilderName(streamId.replaceAll(":", "_") + EventBuilderConstants.DEFAULT_EVENT_BUILDER_POSTFIX); Wso2EventInputMapping wso2EventInputMapping = new Wso2EventInputMapping(); wso2EventInputMapping.setCustomMappingEnabled(false); eventBuilderConfiguration.setInputMapping(wso2EventInputMapping); InputStreamConfiguration inputStreamConfiguration = new InputStreamConfiguration(); InputEventAdaptorMessageConfiguration inputEventAdaptorMessageConfiguration = new InputEventAdaptorMessageConfiguration(); inputEventAdaptorMessageConfiguration.addInputMessageProperty(EventBuilderConstants.ADAPTOR_MESSAGE_STREAM_NAME, toStreamName); inputEventAdaptorMessageConfiguration.addInputMessageProperty(EventBuilderConstants.ADAPTOR_MESSAGE_STREAM_VERSION, toStreamVersion); inputStreamConfiguration.setInputEventAdaptorMessageConfiguration(inputEventAdaptorMessageConfiguration); inputStreamConfiguration.setInputEventAdaptorName(transportAdaptorName); inputStreamConfiguration.setInputEventAdaptorType(EventBuilderConstants.ADAPTOR_TYPE_WSO2EVENT); eventBuilderConfiguration.setInputStreamConfiguration(inputStreamConfiguration); eventBuilderConfiguration.setToStreamName(toStreamName); eventBuilderConfiguration.setToStreamVersion(toStreamVersion); return eventBuilderConfiguration; } /** * Returns the position of a given attribute in the stream. * Complexity : O(#attributes of stream) * * @param attributeName attribute name. Should be in the prefixed format * @param streamDefinition the stream definition to search in * @return the position of the attribute in stream if found, or -1 if no matching attribute is found. */ public static int getAttributePosition(String attributeName, StreamDefinition streamDefinition) { if (streamDefinition != null) { int metaAttributeSize = 0; int correlationAttributeSize = 0; List<Attribute> metaData = streamDefinition.getMetaData(); List<Attribute> correlationData = streamDefinition.getCorrelationData(); List<Attribute> payloadData = streamDefinition.getPayloadData(); if (metaData != null) { metaAttributeSize = metaData.size(); } if (correlationData != null) { correlationAttributeSize = correlationData.size(); } if (attributeName.startsWith(EventBuilderConstants.META_DATA_PREFIX)) { if (metaData != null) { for (int i = 0; i < metaAttributeSize; i++) { if (metaData.get(i).getName().equals(attributeName.substring(EventBuilderConstants.META_DATA_PREFIX.length()))) { return i; } } } } else if (attributeName.startsWith(EventBuilderConstants.CORRELATION_DATA_PREFIX)) { if (correlationData != null) { for (int i = 0; i < correlationAttributeSize; i++) { if (correlationData.get(i).getName().equals(attributeName.substring(EventBuilderConstants.CORRELATION_DATA_PREFIX.length()))) { return metaAttributeSize + i; } } } } else { if (payloadData != null) { for (int i = 0; i < payloadData.size(); i++) { if (payloadData.get(i).getName().equals(attributeName)) { return metaAttributeSize + correlationAttributeSize + i; } } } } } return -1; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.distributedlog; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.VisibleForTesting; import java.nio.ByteBuffer; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Hex; /** * DistributedLog Sequence Number (DLSN) is the system generated sequence number for log record. * * <p>DLSN is comprised with 3 components: * <ul> * <li>LogSegment Sequence Number: the sequence number of log segment that the record is written in * <li>Entry Id: the entry id of the entry that the record is batched in * <li>Slot Id: the slot id that the record is in the entry * </ul> * * @see LogRecordWithDLSN */ public class DLSN implements Comparable<DLSN> { public static final byte VERSION0 = (byte) 0; public static final byte VERSION1 = (byte) 1; // The initial DLSN that DL starts with public static final DLSN InitialDLSN = new DLSN(1, 0 , 0); // The non-inclusive lower bound DLSN public static final DLSN NonInclusiveLowerBound = new DLSN(1, 0 , -1); // Invalid DLSN public static final DLSN InvalidDLSN = new DLSN(0, -1, -1); static final byte CUR_VERSION = VERSION1; static final int VERSION0_LEN = Long.SIZE * 3 + Byte.SIZE; static final int VERSION1_LEN = Long.SIZE * 3 / Byte.SIZE + 1; private final long logSegmentSequenceNo; private final long entryId; private final long slotId; public DLSN(long logSegmentSequenceNo, long entryId, long slotId) { this.logSegmentSequenceNo = logSegmentSequenceNo; this.entryId = entryId; this.slotId = slotId; } /** * Return the sequence number of the log segment that the record is written to. * * @return sequence number of the log segment that the record is written to. */ public long getLogSegmentSequenceNo() { return logSegmentSequenceNo; } /** * Return the entry id of the batch that the record is written to. * * @return entry id of the batch that the record is written to. */ public long getEntryId() { return entryId; } /** * Return the slot id in the batch that the record is written to. * * @return slot id in the batch that the record is written to. */ public long getSlotId() { return slotId; } @Override public int compareTo(DLSN that) { if (this.logSegmentSequenceNo != that.logSegmentSequenceNo) { return (this.logSegmentSequenceNo < that.logSegmentSequenceNo) ? -1 : 1; } else if (this.entryId != that.entryId) { return (this.entryId < that.entryId) ? -1 : 1; } else { return (this.slotId < that.slotId) ? -1 : (this.slotId == that.slotId) ? 0 : 1; } } /** * Serialize the DLSN into bytes with current version. * * @return the serialized bytes */ public byte[] serializeBytes() { return serializeBytes(CUR_VERSION); } /** * Serialize the DLSN into bytes with given <code>version</code>. * * @param version * version to serialize the DLSN * @return the serialized bytes */ public byte[] serializeBytes(byte version) { checkArgument(version <= CUR_VERSION && version >= VERSION0); byte[] data = new byte[CUR_VERSION == version ? VERSION1_LEN : VERSION0_LEN]; ByteBuffer bb = ByteBuffer.wrap(data); bb.put(version); bb.putLong(logSegmentSequenceNo); bb.putLong(entryId); bb.putLong(slotId); return data; } /** * Serialize the DLSN into base64 encoded string. * * @return serialized base64 string * @see #serializeBytes() */ public String serialize() { return serialize(CUR_VERSION); } /** * Serialize the DLSN into base64 encoded string with given <code>version</code>. * * @param version * version to serialize the DLSN * @return the serialized base64 string * @see #serializeBytes(byte) */ public String serialize(byte version) { return Base64.encodeBase64String(serializeBytes(version)); } /** * Deserialize the DLSN from base64 encoded string <code>dlsn</code>. * * @param dlsn * base64 encoded string * @return dlsn */ public static DLSN deserialize(String dlsn) { byte[] data = Base64.decodeBase64(dlsn); return deserializeBytes(data); } /** * Deserialize the DLSN from bytes array. * * @param data * serialized bytes * @return dlsn */ public static DLSN deserializeBytes(byte[] data) { ByteBuffer bb = ByteBuffer.wrap(data); byte version = bb.get(); if (VERSION0 == version) { if (VERSION0_LEN != data.length) { throw new IllegalArgumentException("Invalid version zero DLSN " + Hex.encodeHexString(data)); } } else if (VERSION1 == version) { if (VERSION1_LEN != data.length) { throw new IllegalArgumentException("Invalid version one DLSN " + Hex.encodeHexString(data)); } } else { throw new IllegalArgumentException("Invalid DLSN : version = " + version + ", " + Hex.encodeHexString(data)); } return new DLSN(bb.getLong(), bb.getLong(), bb.getLong()); } // Keep original version0 logic for testing. @VisibleForTesting static DLSN deserialize0(String dlsn) { byte[] data = Base64.decodeBase64(dlsn); ByteBuffer bb = ByteBuffer.wrap(data); byte version = bb.get(); if (VERSION0 != version || VERSION0_LEN != data.length) { throw new IllegalArgumentException("Invalid DLSN " + dlsn); } return new DLSN(bb.getLong(), bb.getLong(), bb.getLong()); } @Override public String toString() { return "DLSN{" + "logSegmentSequenceNo=" + logSegmentSequenceNo + ", entryId=" + entryId + ", slotId=" + slotId + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof DLSN)) { return false; } DLSN dlsn = (DLSN) o; if (entryId != dlsn.entryId) { return false; } if (logSegmentSequenceNo != dlsn.logSegmentSequenceNo) { return false; } return slotId == dlsn.slotId; } @Override public int hashCode() { int result = (int) (logSegmentSequenceNo ^ (logSegmentSequenceNo >>> 32)); result = 31 * result + (int) (entryId ^ (entryId >>> 32)); result = 31 * result + (int) (slotId ^ (slotId >>> 32)); return result; } /** * Positions to a DLSN greater than the current value. * * <p>This may not correspond to an actual LogRecord, its just used by the positioning logic * to position the reader. * * @return the next DLSN */ public DLSN getNextDLSN() { return new DLSN(logSegmentSequenceNo, entryId, slotId + 1); } /** * Positions to next log segment than the current value. * * <p>this may not correspond to an actual LogRecord, its just used by the positioning logic * to position the reader. * * @return the next DLSN */ public DLSN positionOnTheNextLedger() { return new DLSN(logSegmentSequenceNo + 1 , 0, 0); } }
/** * Copyright (C) 2009-2012 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.restygwt.client.basic; import com.google.gwt.core.client.GWT; import com.google.gwt.json.client.JSONParser; import com.google.gwt.json.client.JSONValue; import com.google.gwt.junit.client.GWTTestCase; import org.fusesource.restygwt.client.*; import org.fusesource.restygwt.rebind.RestServiceClassCreator; import javax.ws.rs.FormParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import java.util.*; /** * * * @author mkristian * @author Bogdan Mustiata &lt;bogdan.mustiata@gmail.com&gt; * */ public class FormParamTestGwt extends GWTTestCase { private FormParamTestRestService service; @Override public String getModuleName() { return "org.fusesource.restygwt.EchoTestGwt"; } @Path("/get") static interface FormParamTestRestService extends RestService { @POST void get(@FormParam(value = "id") int id, MethodCallback<Echo> callback); @POST void get(@FormParam(value = "id") Integer id, MethodCallback<Echo> callback); @POST void twoParams(@FormParam(value = "id") int id, @FormParam(value = "dto") ExampleDto exampleDto, MethodCallback<Echo> callback); @POST void listParams(@FormParam(value = "dtoList") List<ExampleDto> exampleDtoList, MethodCallback<Echo> callback); @POST void listStringParams(@FormParam(value = "stringList") List<String> exampleStringList, MethodCallback<Echo> callback); /** * Method to check special handling of package "java.lang." in {@link RestServiceClassCreator#toIteratedFormStringExpression} */ @POST void listStringBuilderParams(@FormParam(value = "stringBuilderList") List<java.lang.StringBuilder> exampleStringBuilderList, MethodCallback<Echo> callback); @POST void arrayParams(@FormParam(value = "dtoArray") ExampleDto[] exampleDtoArray, MethodCallback<Echo> callback); @POST void enumParam(@FormParam("param") FormParamTestEnum param, MethodCallback<Echo> callback); } enum FormParamTestEnum { VALUE } class EchoMethodCallback implements MethodCallback<Echo> { private final String id; EchoMethodCallback(String id) { this.id = id; } @Override public void onSuccess(Method method, Echo response) { GWT.log("method was called: " + response.params.get("id")); assertEquals(response.params.get("id"), id); assertEquals(response.params.size(), 1); finishTest(); } @Override public void onFailure(Method method, Throwable exception) { System.out.println("test failed"); fail(); } } @Override protected void gwtSetUp() throws Exception { super.gwtSetUp(); service = GWT.create(FormParamTestRestService.class); Resource resource = new Resource(GWT.getModuleBaseURL() + "echo"); ((RestServiceProxy) service).setResource(resource); delayTestFinish(10000); } public void testGetWithInt() { service.get(123, new EchoMethodCallback("123")); } public void testGetWithInteger() { service.get(new Integer(2), new EchoMethodCallback("2")); } public void testGetWithNull() { service.get(null, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertFalse(response.params.containsKey("id")); assertEquals(response.params.size(), 0); finishTest(); } }); } public interface ExampleDtoDecoder extends JsonEncoderDecoder<ExampleDto> { } public void testPostWithDto() { final ExampleDtoDecoder dtoDecoder = GWT.create(ExampleDtoDecoder.class); final ExampleDto dto = createDtoObject(); service.twoParams(3, dto, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertEquals(2, response.params.size()); assertEquals("3", response.params.get("id")); JSONValue jsonDto = JSONParser.parseStrict(response.params.get("dto")); assertEquals(dto, dtoDecoder.decode(jsonDto)); finishTest(); } }); } public void testPostWithDtoList() { final ObjectEncoderDecoder objectEncoderDecoder = new ObjectEncoderDecoder(); final List<ExampleDto> dtoList = Collections.singletonList(createDtoObject()); service.listParams(dtoList, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertEquals(1, response.params.size()); JSONValue jsonDto = JSONParser.parseStrict(response.params.get("dtoList")); final Object decoded_object = objectEncoderDecoder.decode(jsonDto); if (decoded_object instanceof Collection) { final Collection<String> decoded_list = (Collection<String>) decoded_object; final List decoded_elem_list = new ArrayList(); for (final String json_elem : decoded_list) { decoded_elem_list.add(objectEncoderDecoder.decode(json_elem)); } assertEquals(createDtoObjectAsList(), decoded_elem_list); } else { assertEquals(createDtoObjectAsList(), Arrays.asList(decoded_object)); } finishTest(); } }); } public void testPostWithStringList() { final ObjectEncoderDecoder objectEncoderDecoder = new ObjectEncoderDecoder(); final List<String> stringList = Arrays.asList("test"); service.listStringParams(stringList, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertEquals(1, response.params.size()); JSONValue jsonValue = AbstractJsonEncoderDecoder.JSON_VALUE.decode(response.params.get("stringList")); final Object decoded_object = objectEncoderDecoder.decode(jsonValue); if (decoded_object instanceof Collection) { final Collection<String> decoded_list = (Collection<String>) decoded_object; final List decoded_elem_list = new ArrayList(); for (final String json_elem : decoded_list) { decoded_elem_list.add(objectEncoderDecoder.decode(json_elem)); } assertEquals(stringList, decoded_elem_list); } else { assertEquals(stringList, Arrays.asList(decoded_object)); } finishTest(); } }); } /** * Simple check of List equality, ignores difference of literal {@code null} and String "null" * @param expected * @param actual */ private void assertListEquals(List<Object> expected, List<Object> actual) { assertEquals(expected.size(), actual.size()); for (int i = 0, size = expected.size(); i < size; i++) { assertEquals(String.valueOf(expected.get(i)), String.valueOf(actual.get(i))); } } /** * Test to check special handling of package "java.lang." in {@link RestServiceClassCreator#toIteratedFormStringExpression} * * @see FormParamTestRestService#listStringBuilderParams(List, MethodCallback) */ public void testPostWithStringBuilderList() { final ObjectEncoderDecoder objectEncoderDecoder = new ObjectEncoderDecoder(); final List stringBuilderList = Arrays.asList(new StringBuilder("Test StringBuilder")); service.listStringBuilderParams(stringBuilderList, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertEquals(1, response.params.size()); JSONValue jsonValue = AbstractJsonEncoderDecoder.JSON_VALUE.decode(response.params.get("stringBuilderList")); final Object decoded_object = objectEncoderDecoder.decode(jsonValue); if (decoded_object instanceof Collection) { final Collection<String> decoded_list = (Collection<String>) decoded_object; final List decoded_elem_list = new ArrayList(); for (final String json_elem : decoded_list) { decoded_elem_list.add(objectEncoderDecoder.decode(json_elem)); } assertListEquals(stringBuilderList, decoded_elem_list); } else { assertListEquals(stringBuilderList, Arrays.asList(decoded_object)); } finishTest(); } }); } public void testPostWithDtoArray() { final ObjectEncoderDecoder objectEncoderDecoder = new ObjectEncoderDecoder(); final ExampleDto[] dtoList = new ExampleDto[] { createDtoObject() }; service.arrayParams(dtoList, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertEquals(1, response.params.size()); JSONValue jsonDto = JSONParser.parseStrict(response.params.get("dtoArray")); assertEquals(createDtoObjectAsList(), objectEncoderDecoder.decode(jsonDto)); finishTest(); } }); } public void testPostWithEnum() { service.enumParam(FormParamTestEnum.VALUE, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertEquals(1, response.params.size()); assertEquals("VALUE", response.params.get("param")); finishTest(); } }); } public void testPostWithNullEnum() { service.enumParam(null, new MethodCallback<Echo>() { @Override public void onFailure(Method method, Throwable exception) { fail(); } @Override public void onSuccess(Method method, Echo response) { assertTrue(response.params.isEmpty()); finishTest(); } }); } private List createDtoObjectAsList() { ArrayList result = new ArrayList(); result.add( map("name", "dtoName", "complexMap1", map( "1", "one", "2", "two", "3", "three" ), "complexMap2", null, "complexMap3", null, "complexMap4", null, "complexMap5", null, "complexMap7", null, "complexMap8", null, "complexMap9", null, "complexMap10", null, "complexMap11", null ) ); return result; } public HashMap map(Object... keyValues) { HashMap result = new HashMap(); for (int i = 0; i < keyValues.length; i += 2) { result.put(keyValues[i], keyValues[i + 1]); } return result; } private ExampleDto createDtoObject() { final ExampleDto dto = new ExampleDto(); dto.name = "dtoName"; dto.complexMap1 = new HashMap<Integer, String>(); dto.complexMap1.put(1, "one"); dto.complexMap1.put(2, "two"); dto.complexMap1.put(3, "three"); return dto; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.comprehend; import javax.annotation.Generated; import com.amazonaws.services.comprehend.model.*; /** * Abstract implementation of {@code AmazonComprehendAsync}. Convenient method forms pass through to the corresponding * overload that takes a request object and an {@code AsyncHandler}, which throws an * {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAmazonComprehendAsync extends AbstractAmazonComprehend implements AmazonComprehendAsync { protected AbstractAmazonComprehendAsync() { } @Override public java.util.concurrent.Future<BatchDetectDominantLanguageResult> batchDetectDominantLanguageAsync(BatchDetectDominantLanguageRequest request) { return batchDetectDominantLanguageAsync(request, null); } @Override public java.util.concurrent.Future<BatchDetectDominantLanguageResult> batchDetectDominantLanguageAsync(BatchDetectDominantLanguageRequest request, com.amazonaws.handlers.AsyncHandler<BatchDetectDominantLanguageRequest, BatchDetectDominantLanguageResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<BatchDetectEntitiesResult> batchDetectEntitiesAsync(BatchDetectEntitiesRequest request) { return batchDetectEntitiesAsync(request, null); } @Override public java.util.concurrent.Future<BatchDetectEntitiesResult> batchDetectEntitiesAsync(BatchDetectEntitiesRequest request, com.amazonaws.handlers.AsyncHandler<BatchDetectEntitiesRequest, BatchDetectEntitiesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<BatchDetectKeyPhrasesResult> batchDetectKeyPhrasesAsync(BatchDetectKeyPhrasesRequest request) { return batchDetectKeyPhrasesAsync(request, null); } @Override public java.util.concurrent.Future<BatchDetectKeyPhrasesResult> batchDetectKeyPhrasesAsync(BatchDetectKeyPhrasesRequest request, com.amazonaws.handlers.AsyncHandler<BatchDetectKeyPhrasesRequest, BatchDetectKeyPhrasesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<BatchDetectSentimentResult> batchDetectSentimentAsync(BatchDetectSentimentRequest request) { return batchDetectSentimentAsync(request, null); } @Override public java.util.concurrent.Future<BatchDetectSentimentResult> batchDetectSentimentAsync(BatchDetectSentimentRequest request, com.amazonaws.handlers.AsyncHandler<BatchDetectSentimentRequest, BatchDetectSentimentResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<BatchDetectSyntaxResult> batchDetectSyntaxAsync(BatchDetectSyntaxRequest request) { return batchDetectSyntaxAsync(request, null); } @Override public java.util.concurrent.Future<BatchDetectSyntaxResult> batchDetectSyntaxAsync(BatchDetectSyntaxRequest request, com.amazonaws.handlers.AsyncHandler<BatchDetectSyntaxRequest, BatchDetectSyntaxResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ClassifyDocumentResult> classifyDocumentAsync(ClassifyDocumentRequest request) { return classifyDocumentAsync(request, null); } @Override public java.util.concurrent.Future<ClassifyDocumentResult> classifyDocumentAsync(ClassifyDocumentRequest request, com.amazonaws.handlers.AsyncHandler<ClassifyDocumentRequest, ClassifyDocumentResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ContainsPiiEntitiesResult> containsPiiEntitiesAsync(ContainsPiiEntitiesRequest request) { return containsPiiEntitiesAsync(request, null); } @Override public java.util.concurrent.Future<ContainsPiiEntitiesResult> containsPiiEntitiesAsync(ContainsPiiEntitiesRequest request, com.amazonaws.handlers.AsyncHandler<ContainsPiiEntitiesRequest, ContainsPiiEntitiesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateDocumentClassifierResult> createDocumentClassifierAsync(CreateDocumentClassifierRequest request) { return createDocumentClassifierAsync(request, null); } @Override public java.util.concurrent.Future<CreateDocumentClassifierResult> createDocumentClassifierAsync(CreateDocumentClassifierRequest request, com.amazonaws.handlers.AsyncHandler<CreateDocumentClassifierRequest, CreateDocumentClassifierResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateEndpointResult> createEndpointAsync(CreateEndpointRequest request) { return createEndpointAsync(request, null); } @Override public java.util.concurrent.Future<CreateEndpointResult> createEndpointAsync(CreateEndpointRequest request, com.amazonaws.handlers.AsyncHandler<CreateEndpointRequest, CreateEndpointResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<CreateEntityRecognizerResult> createEntityRecognizerAsync(CreateEntityRecognizerRequest request) { return createEntityRecognizerAsync(request, null); } @Override public java.util.concurrent.Future<CreateEntityRecognizerResult> createEntityRecognizerAsync(CreateEntityRecognizerRequest request, com.amazonaws.handlers.AsyncHandler<CreateEntityRecognizerRequest, CreateEntityRecognizerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteDocumentClassifierResult> deleteDocumentClassifierAsync(DeleteDocumentClassifierRequest request) { return deleteDocumentClassifierAsync(request, null); } @Override public java.util.concurrent.Future<DeleteDocumentClassifierResult> deleteDocumentClassifierAsync(DeleteDocumentClassifierRequest request, com.amazonaws.handlers.AsyncHandler<DeleteDocumentClassifierRequest, DeleteDocumentClassifierResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteEndpointResult> deleteEndpointAsync(DeleteEndpointRequest request) { return deleteEndpointAsync(request, null); } @Override public java.util.concurrent.Future<DeleteEndpointResult> deleteEndpointAsync(DeleteEndpointRequest request, com.amazonaws.handlers.AsyncHandler<DeleteEndpointRequest, DeleteEndpointResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteEntityRecognizerResult> deleteEntityRecognizerAsync(DeleteEntityRecognizerRequest request) { return deleteEntityRecognizerAsync(request, null); } @Override public java.util.concurrent.Future<DeleteEntityRecognizerResult> deleteEntityRecognizerAsync(DeleteEntityRecognizerRequest request, com.amazonaws.handlers.AsyncHandler<DeleteEntityRecognizerRequest, DeleteEntityRecognizerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DeleteResourcePolicyResult> deleteResourcePolicyAsync(DeleteResourcePolicyRequest request) { return deleteResourcePolicyAsync(request, null); } @Override public java.util.concurrent.Future<DeleteResourcePolicyResult> deleteResourcePolicyAsync(DeleteResourcePolicyRequest request, com.amazonaws.handlers.AsyncHandler<DeleteResourcePolicyRequest, DeleteResourcePolicyResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeDocumentClassificationJobResult> describeDocumentClassificationJobAsync( DescribeDocumentClassificationJobRequest request) { return describeDocumentClassificationJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeDocumentClassificationJobResult> describeDocumentClassificationJobAsync( DescribeDocumentClassificationJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeDocumentClassificationJobRequest, DescribeDocumentClassificationJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeDocumentClassifierResult> describeDocumentClassifierAsync(DescribeDocumentClassifierRequest request) { return describeDocumentClassifierAsync(request, null); } @Override public java.util.concurrent.Future<DescribeDocumentClassifierResult> describeDocumentClassifierAsync(DescribeDocumentClassifierRequest request, com.amazonaws.handlers.AsyncHandler<DescribeDocumentClassifierRequest, DescribeDocumentClassifierResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeDominantLanguageDetectionJobResult> describeDominantLanguageDetectionJobAsync( DescribeDominantLanguageDetectionJobRequest request) { return describeDominantLanguageDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeDominantLanguageDetectionJobResult> describeDominantLanguageDetectionJobAsync( DescribeDominantLanguageDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeDominantLanguageDetectionJobRequest, DescribeDominantLanguageDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeEndpointResult> describeEndpointAsync(DescribeEndpointRequest request) { return describeEndpointAsync(request, null); } @Override public java.util.concurrent.Future<DescribeEndpointResult> describeEndpointAsync(DescribeEndpointRequest request, com.amazonaws.handlers.AsyncHandler<DescribeEndpointRequest, DescribeEndpointResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeEntitiesDetectionJobResult> describeEntitiesDetectionJobAsync(DescribeEntitiesDetectionJobRequest request) { return describeEntitiesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeEntitiesDetectionJobResult> describeEntitiesDetectionJobAsync(DescribeEntitiesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeEntitiesDetectionJobRequest, DescribeEntitiesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeEntityRecognizerResult> describeEntityRecognizerAsync(DescribeEntityRecognizerRequest request) { return describeEntityRecognizerAsync(request, null); } @Override public java.util.concurrent.Future<DescribeEntityRecognizerResult> describeEntityRecognizerAsync(DescribeEntityRecognizerRequest request, com.amazonaws.handlers.AsyncHandler<DescribeEntityRecognizerRequest, DescribeEntityRecognizerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeEventsDetectionJobResult> describeEventsDetectionJobAsync(DescribeEventsDetectionJobRequest request) { return describeEventsDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeEventsDetectionJobResult> describeEventsDetectionJobAsync(DescribeEventsDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeEventsDetectionJobRequest, DescribeEventsDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeKeyPhrasesDetectionJobResult> describeKeyPhrasesDetectionJobAsync(DescribeKeyPhrasesDetectionJobRequest request) { return describeKeyPhrasesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeKeyPhrasesDetectionJobResult> describeKeyPhrasesDetectionJobAsync(DescribeKeyPhrasesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeKeyPhrasesDetectionJobRequest, DescribeKeyPhrasesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribePiiEntitiesDetectionJobResult> describePiiEntitiesDetectionJobAsync( DescribePiiEntitiesDetectionJobRequest request) { return describePiiEntitiesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribePiiEntitiesDetectionJobResult> describePiiEntitiesDetectionJobAsync( DescribePiiEntitiesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribePiiEntitiesDetectionJobRequest, DescribePiiEntitiesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeResourcePolicyResult> describeResourcePolicyAsync(DescribeResourcePolicyRequest request) { return describeResourcePolicyAsync(request, null); } @Override public java.util.concurrent.Future<DescribeResourcePolicyResult> describeResourcePolicyAsync(DescribeResourcePolicyRequest request, com.amazonaws.handlers.AsyncHandler<DescribeResourcePolicyRequest, DescribeResourcePolicyResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeSentimentDetectionJobResult> describeSentimentDetectionJobAsync(DescribeSentimentDetectionJobRequest request) { return describeSentimentDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeSentimentDetectionJobResult> describeSentimentDetectionJobAsync(DescribeSentimentDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeSentimentDetectionJobRequest, DescribeSentimentDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeTopicsDetectionJobResult> describeTopicsDetectionJobAsync(DescribeTopicsDetectionJobRequest request) { return describeTopicsDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<DescribeTopicsDetectionJobResult> describeTopicsDetectionJobAsync(DescribeTopicsDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<DescribeTopicsDetectionJobRequest, DescribeTopicsDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DetectDominantLanguageResult> detectDominantLanguageAsync(DetectDominantLanguageRequest request) { return detectDominantLanguageAsync(request, null); } @Override public java.util.concurrent.Future<DetectDominantLanguageResult> detectDominantLanguageAsync(DetectDominantLanguageRequest request, com.amazonaws.handlers.AsyncHandler<DetectDominantLanguageRequest, DetectDominantLanguageResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DetectEntitiesResult> detectEntitiesAsync(DetectEntitiesRequest request) { return detectEntitiesAsync(request, null); } @Override public java.util.concurrent.Future<DetectEntitiesResult> detectEntitiesAsync(DetectEntitiesRequest request, com.amazonaws.handlers.AsyncHandler<DetectEntitiesRequest, DetectEntitiesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DetectKeyPhrasesResult> detectKeyPhrasesAsync(DetectKeyPhrasesRequest request) { return detectKeyPhrasesAsync(request, null); } @Override public java.util.concurrent.Future<DetectKeyPhrasesResult> detectKeyPhrasesAsync(DetectKeyPhrasesRequest request, com.amazonaws.handlers.AsyncHandler<DetectKeyPhrasesRequest, DetectKeyPhrasesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DetectPiiEntitiesResult> detectPiiEntitiesAsync(DetectPiiEntitiesRequest request) { return detectPiiEntitiesAsync(request, null); } @Override public java.util.concurrent.Future<DetectPiiEntitiesResult> detectPiiEntitiesAsync(DetectPiiEntitiesRequest request, com.amazonaws.handlers.AsyncHandler<DetectPiiEntitiesRequest, DetectPiiEntitiesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DetectSentimentResult> detectSentimentAsync(DetectSentimentRequest request) { return detectSentimentAsync(request, null); } @Override public java.util.concurrent.Future<DetectSentimentResult> detectSentimentAsync(DetectSentimentRequest request, com.amazonaws.handlers.AsyncHandler<DetectSentimentRequest, DetectSentimentResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DetectSyntaxResult> detectSyntaxAsync(DetectSyntaxRequest request) { return detectSyntaxAsync(request, null); } @Override public java.util.concurrent.Future<DetectSyntaxResult> detectSyntaxAsync(DetectSyntaxRequest request, com.amazonaws.handlers.AsyncHandler<DetectSyntaxRequest, DetectSyntaxResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ImportModelResult> importModelAsync(ImportModelRequest request) { return importModelAsync(request, null); } @Override public java.util.concurrent.Future<ImportModelResult> importModelAsync(ImportModelRequest request, com.amazonaws.handlers.AsyncHandler<ImportModelRequest, ImportModelResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListDocumentClassificationJobsResult> listDocumentClassificationJobsAsync(ListDocumentClassificationJobsRequest request) { return listDocumentClassificationJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListDocumentClassificationJobsResult> listDocumentClassificationJobsAsync(ListDocumentClassificationJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListDocumentClassificationJobsRequest, ListDocumentClassificationJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListDocumentClassifierSummariesResult> listDocumentClassifierSummariesAsync( ListDocumentClassifierSummariesRequest request) { return listDocumentClassifierSummariesAsync(request, null); } @Override public java.util.concurrent.Future<ListDocumentClassifierSummariesResult> listDocumentClassifierSummariesAsync( ListDocumentClassifierSummariesRequest request, com.amazonaws.handlers.AsyncHandler<ListDocumentClassifierSummariesRequest, ListDocumentClassifierSummariesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListDocumentClassifiersResult> listDocumentClassifiersAsync(ListDocumentClassifiersRequest request) { return listDocumentClassifiersAsync(request, null); } @Override public java.util.concurrent.Future<ListDocumentClassifiersResult> listDocumentClassifiersAsync(ListDocumentClassifiersRequest request, com.amazonaws.handlers.AsyncHandler<ListDocumentClassifiersRequest, ListDocumentClassifiersResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListDominantLanguageDetectionJobsResult> listDominantLanguageDetectionJobsAsync( ListDominantLanguageDetectionJobsRequest request) { return listDominantLanguageDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListDominantLanguageDetectionJobsResult> listDominantLanguageDetectionJobsAsync( ListDominantLanguageDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListDominantLanguageDetectionJobsRequest, ListDominantLanguageDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListEndpointsResult> listEndpointsAsync(ListEndpointsRequest request) { return listEndpointsAsync(request, null); } @Override public java.util.concurrent.Future<ListEndpointsResult> listEndpointsAsync(ListEndpointsRequest request, com.amazonaws.handlers.AsyncHandler<ListEndpointsRequest, ListEndpointsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListEntitiesDetectionJobsResult> listEntitiesDetectionJobsAsync(ListEntitiesDetectionJobsRequest request) { return listEntitiesDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListEntitiesDetectionJobsResult> listEntitiesDetectionJobsAsync(ListEntitiesDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListEntitiesDetectionJobsRequest, ListEntitiesDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListEntityRecognizerSummariesResult> listEntityRecognizerSummariesAsync(ListEntityRecognizerSummariesRequest request) { return listEntityRecognizerSummariesAsync(request, null); } @Override public java.util.concurrent.Future<ListEntityRecognizerSummariesResult> listEntityRecognizerSummariesAsync(ListEntityRecognizerSummariesRequest request, com.amazonaws.handlers.AsyncHandler<ListEntityRecognizerSummariesRequest, ListEntityRecognizerSummariesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListEntityRecognizersResult> listEntityRecognizersAsync(ListEntityRecognizersRequest request) { return listEntityRecognizersAsync(request, null); } @Override public java.util.concurrent.Future<ListEntityRecognizersResult> listEntityRecognizersAsync(ListEntityRecognizersRequest request, com.amazonaws.handlers.AsyncHandler<ListEntityRecognizersRequest, ListEntityRecognizersResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListEventsDetectionJobsResult> listEventsDetectionJobsAsync(ListEventsDetectionJobsRequest request) { return listEventsDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListEventsDetectionJobsResult> listEventsDetectionJobsAsync(ListEventsDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListEventsDetectionJobsRequest, ListEventsDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListKeyPhrasesDetectionJobsResult> listKeyPhrasesDetectionJobsAsync(ListKeyPhrasesDetectionJobsRequest request) { return listKeyPhrasesDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListKeyPhrasesDetectionJobsResult> listKeyPhrasesDetectionJobsAsync(ListKeyPhrasesDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListKeyPhrasesDetectionJobsRequest, ListKeyPhrasesDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListPiiEntitiesDetectionJobsResult> listPiiEntitiesDetectionJobsAsync(ListPiiEntitiesDetectionJobsRequest request) { return listPiiEntitiesDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListPiiEntitiesDetectionJobsResult> listPiiEntitiesDetectionJobsAsync(ListPiiEntitiesDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListPiiEntitiesDetectionJobsRequest, ListPiiEntitiesDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListSentimentDetectionJobsResult> listSentimentDetectionJobsAsync(ListSentimentDetectionJobsRequest request) { return listSentimentDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListSentimentDetectionJobsResult> listSentimentDetectionJobsAsync(ListSentimentDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListSentimentDetectionJobsRequest, ListSentimentDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request) { return listTagsForResourceAsync(request, null); } @Override public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request, com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListTopicsDetectionJobsResult> listTopicsDetectionJobsAsync(ListTopicsDetectionJobsRequest request) { return listTopicsDetectionJobsAsync(request, null); } @Override public java.util.concurrent.Future<ListTopicsDetectionJobsResult> listTopicsDetectionJobsAsync(ListTopicsDetectionJobsRequest request, com.amazonaws.handlers.AsyncHandler<ListTopicsDetectionJobsRequest, ListTopicsDetectionJobsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<PutResourcePolicyResult> putResourcePolicyAsync(PutResourcePolicyRequest request) { return putResourcePolicyAsync(request, null); } @Override public java.util.concurrent.Future<PutResourcePolicyResult> putResourcePolicyAsync(PutResourcePolicyRequest request, com.amazonaws.handlers.AsyncHandler<PutResourcePolicyRequest, PutResourcePolicyResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartDocumentClassificationJobResult> startDocumentClassificationJobAsync(StartDocumentClassificationJobRequest request) { return startDocumentClassificationJobAsync(request, null); } @Override public java.util.concurrent.Future<StartDocumentClassificationJobResult> startDocumentClassificationJobAsync(StartDocumentClassificationJobRequest request, com.amazonaws.handlers.AsyncHandler<StartDocumentClassificationJobRequest, StartDocumentClassificationJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartDominantLanguageDetectionJobResult> startDominantLanguageDetectionJobAsync( StartDominantLanguageDetectionJobRequest request) { return startDominantLanguageDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartDominantLanguageDetectionJobResult> startDominantLanguageDetectionJobAsync( StartDominantLanguageDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartDominantLanguageDetectionJobRequest, StartDominantLanguageDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartEntitiesDetectionJobResult> startEntitiesDetectionJobAsync(StartEntitiesDetectionJobRequest request) { return startEntitiesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartEntitiesDetectionJobResult> startEntitiesDetectionJobAsync(StartEntitiesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartEntitiesDetectionJobRequest, StartEntitiesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartEventsDetectionJobResult> startEventsDetectionJobAsync(StartEventsDetectionJobRequest request) { return startEventsDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartEventsDetectionJobResult> startEventsDetectionJobAsync(StartEventsDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartEventsDetectionJobRequest, StartEventsDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartKeyPhrasesDetectionJobResult> startKeyPhrasesDetectionJobAsync(StartKeyPhrasesDetectionJobRequest request) { return startKeyPhrasesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartKeyPhrasesDetectionJobResult> startKeyPhrasesDetectionJobAsync(StartKeyPhrasesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartKeyPhrasesDetectionJobRequest, StartKeyPhrasesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartPiiEntitiesDetectionJobResult> startPiiEntitiesDetectionJobAsync(StartPiiEntitiesDetectionJobRequest request) { return startPiiEntitiesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartPiiEntitiesDetectionJobResult> startPiiEntitiesDetectionJobAsync(StartPiiEntitiesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartPiiEntitiesDetectionJobRequest, StartPiiEntitiesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartSentimentDetectionJobResult> startSentimentDetectionJobAsync(StartSentimentDetectionJobRequest request) { return startSentimentDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartSentimentDetectionJobResult> startSentimentDetectionJobAsync(StartSentimentDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartSentimentDetectionJobRequest, StartSentimentDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StartTopicsDetectionJobResult> startTopicsDetectionJobAsync(StartTopicsDetectionJobRequest request) { return startTopicsDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StartTopicsDetectionJobResult> startTopicsDetectionJobAsync(StartTopicsDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StartTopicsDetectionJobRequest, StartTopicsDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopDominantLanguageDetectionJobResult> stopDominantLanguageDetectionJobAsync( StopDominantLanguageDetectionJobRequest request) { return stopDominantLanguageDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StopDominantLanguageDetectionJobResult> stopDominantLanguageDetectionJobAsync( StopDominantLanguageDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StopDominantLanguageDetectionJobRequest, StopDominantLanguageDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopEntitiesDetectionJobResult> stopEntitiesDetectionJobAsync(StopEntitiesDetectionJobRequest request) { return stopEntitiesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StopEntitiesDetectionJobResult> stopEntitiesDetectionJobAsync(StopEntitiesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StopEntitiesDetectionJobRequest, StopEntitiesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopEventsDetectionJobResult> stopEventsDetectionJobAsync(StopEventsDetectionJobRequest request) { return stopEventsDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StopEventsDetectionJobResult> stopEventsDetectionJobAsync(StopEventsDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StopEventsDetectionJobRequest, StopEventsDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopKeyPhrasesDetectionJobResult> stopKeyPhrasesDetectionJobAsync(StopKeyPhrasesDetectionJobRequest request) { return stopKeyPhrasesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StopKeyPhrasesDetectionJobResult> stopKeyPhrasesDetectionJobAsync(StopKeyPhrasesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StopKeyPhrasesDetectionJobRequest, StopKeyPhrasesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopPiiEntitiesDetectionJobResult> stopPiiEntitiesDetectionJobAsync(StopPiiEntitiesDetectionJobRequest request) { return stopPiiEntitiesDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StopPiiEntitiesDetectionJobResult> stopPiiEntitiesDetectionJobAsync(StopPiiEntitiesDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StopPiiEntitiesDetectionJobRequest, StopPiiEntitiesDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopSentimentDetectionJobResult> stopSentimentDetectionJobAsync(StopSentimentDetectionJobRequest request) { return stopSentimentDetectionJobAsync(request, null); } @Override public java.util.concurrent.Future<StopSentimentDetectionJobResult> stopSentimentDetectionJobAsync(StopSentimentDetectionJobRequest request, com.amazonaws.handlers.AsyncHandler<StopSentimentDetectionJobRequest, StopSentimentDetectionJobResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopTrainingDocumentClassifierResult> stopTrainingDocumentClassifierAsync(StopTrainingDocumentClassifierRequest request) { return stopTrainingDocumentClassifierAsync(request, null); } @Override public java.util.concurrent.Future<StopTrainingDocumentClassifierResult> stopTrainingDocumentClassifierAsync(StopTrainingDocumentClassifierRequest request, com.amazonaws.handlers.AsyncHandler<StopTrainingDocumentClassifierRequest, StopTrainingDocumentClassifierResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<StopTrainingEntityRecognizerResult> stopTrainingEntityRecognizerAsync(StopTrainingEntityRecognizerRequest request) { return stopTrainingEntityRecognizerAsync(request, null); } @Override public java.util.concurrent.Future<StopTrainingEntityRecognizerResult> stopTrainingEntityRecognizerAsync(StopTrainingEntityRecognizerRequest request, com.amazonaws.handlers.AsyncHandler<StopTrainingEntityRecognizerRequest, StopTrainingEntityRecognizerResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request) { return tagResourceAsync(request, null); } @Override public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request, com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request) { return untagResourceAsync(request, null); } @Override public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request, com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<UpdateEndpointResult> updateEndpointAsync(UpdateEndpointRequest request) { return updateEndpointAsync(request, null); } @Override public java.util.concurrent.Future<UpdateEndpointResult> updateEndpointAsync(UpdateEndpointRequest request, com.amazonaws.handlers.AsyncHandler<UpdateEndpointRequest, UpdateEndpointResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } }
/* * Copyright (C) 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.struts2; import com.google.inject.AbstractModule; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.internal.Annotations; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.ObjectFactory; import com.opensymphony.xwork2.config.ConfigurationException; import com.opensymphony.xwork2.config.entities.InterceptorConfig; import com.opensymphony.xwork2.inject.Inject; import com.opensymphony.xwork2.interceptor.Interceptor; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Logger; /** * Cleanup up version from Bob's GuiceObjectFactory. Now works properly with GS2 and fixes several * bugs. * * @author dhanji@gmail.com * @author benmccann.com */ public class Struts2Factory extends ObjectFactory { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(Struts2Factory.class.getName()); private static final String ERROR_NO_INJECTOR = "Cannot find a Guice injector. Are you sure you registered a GuiceServletContextListener " + "that uses the Struts2GuicePluginModule in your application's web.xml?"; private static @com.google.inject.Inject Injector injector; private final List<ProvidedInterceptor> interceptors = new ArrayList<>(); private volatile Injector strutsInjector; @Override public boolean isNoArgConstructorRequired() { return false; } @Inject(value = "guice.module", required = false) void setModule(String moduleClassName) { throw new RuntimeException( "The struts2 plugin no longer supports" + " specifying a module via the 'guice.module' property in XML." + " Please install your module via a GuiceServletContextListener instead."); } Set<Class<?>> boundClasses = new HashSet<>(); @Override public Class<?> getClassInstance(String name) throws ClassNotFoundException { Class<?> clazz = super.getClassInstance(name); synchronized (this) { if (strutsInjector == null) { // We can only bind each class once. if (!boundClasses.contains(clazz)) { try { // Calling these methods now helps us detect ClassNotFoundErrors // early. clazz.getDeclaredFields(); clazz.getDeclaredMethods(); boundClasses.add(clazz); } catch (Throwable t) { // Struts should still work even though some classes aren't in the // classpath. It appears we always get the exception here when // this is the case. return clazz; } } } } return clazz; } @Override @SuppressWarnings({"unchecked", "rawtypes"}) // Parent class uses raw type. public Object buildBean(Class clazz, Map<String, Object> extraContext) { if (strutsInjector == null) { synchronized (this) { if (strutsInjector == null) { createInjector(); } } } return strutsInjector.getInstance(clazz); } private void createInjector() { logger.info("Loading struts2 Guice support..."); // Something is wrong, since this should be there if GuiceServletContextListener // was present. if (injector == null) { logger.severe(ERROR_NO_INJECTOR); throw new RuntimeException(ERROR_NO_INJECTOR); } this.strutsInjector = injector.createChildInjector( new AbstractModule() { @Override protected void configure() { // Tell the injector about all the action classes, etc., so it // can validate them at startup. for (Class<?> boundClass : boundClasses) { // TODO: Set source from Struts XML. bind(boundClass); } // Validate the interceptor class. for (ProvidedInterceptor interceptor : interceptors) { interceptor.validate(binder()); } } }); // Inject interceptors. for (ProvidedInterceptor interceptor : interceptors) { interceptor.inject(); } logger.info("Injector created successfully."); } @Override @SuppressWarnings("unchecked") public Interceptor buildInterceptor( InterceptorConfig interceptorConfig, Map<String, String> interceptorRefParams) throws ConfigurationException { // Ensure the interceptor class is present. Class<? extends Interceptor> interceptorClass; try { interceptorClass = (Class<? extends Interceptor>) getClassInstance(interceptorConfig.getClassName()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } ProvidedInterceptor providedInterceptor = new ProvidedInterceptor(interceptorConfig, interceptorRefParams, interceptorClass); interceptors.add(providedInterceptor); if (strutsInjector != null) { synchronized (this) { if (strutsInjector != null) { providedInterceptor.inject(); } } } return providedInterceptor; } private Interceptor superBuildInterceptor( InterceptorConfig interceptorConfig, Map<String, String> interceptorRefParams) throws ConfigurationException { return super.buildInterceptor(interceptorConfig, interceptorRefParams); } private class ProvidedInterceptor implements Interceptor { private static final long serialVersionUID = 1L; private final InterceptorConfig config; private final Map<String, String> params; private final Class<? extends Interceptor> interceptorClass; private Interceptor delegate; ProvidedInterceptor( InterceptorConfig config, Map<String, String> params, Class<? extends Interceptor> interceptorClass) { this.config = config; this.params = params; this.interceptorClass = interceptorClass; } void validate(Binder binder) { // TODO: Set source from Struts XML. if (hasScope(interceptorClass)) { binder.addError( "Scoping interceptors is not currently supported." + " Please remove the scope annotation from " + interceptorClass.getName() + "."); } // Make sure it implements Interceptor. if (!Interceptor.class.isAssignableFrom(interceptorClass)) { binder.addError( interceptorClass.getName() + " must implement " + Interceptor.class.getName() + "."); } } void inject() { delegate = superBuildInterceptor(config, params); } @Override public void destroy() { if (null != delegate) { delegate.destroy(); } } @Override public void init() { throw new AssertionError(); } @Override public String intercept(ActionInvocation invocation) throws Exception { return delegate.intercept(invocation); } } /** Returns true if the given class has a scope annotation. */ private static boolean hasScope(Class<? extends Interceptor> interceptorClass) { for (Annotation annotation : interceptorClass.getAnnotations()) { if (Annotations.isScopeAnnotation(annotation.annotationType())) { return true; } } return false; } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.dynamicconfig; import com.hazelcast.config.CacheDeserializedValues; import com.hazelcast.config.CardinalityEstimatorConfig; import com.hazelcast.config.DurableExecutorConfig; import com.hazelcast.config.EntryListenerConfig; import com.hazelcast.config.EvictionPolicy; import com.hazelcast.config.ExecutorConfig; import com.hazelcast.config.HotRestartConfig; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.config.ItemListenerConfig; import com.hazelcast.config.ListConfig; import com.hazelcast.config.LockConfig; import com.hazelcast.config.MapAttributeConfig; import com.hazelcast.config.MapConfig; import com.hazelcast.config.MapIndexConfig; import com.hazelcast.config.MaxSizeConfig; import com.hazelcast.config.MultiMapConfig; import com.hazelcast.config.QueueConfig; import com.hazelcast.config.QueueStoreConfig; import com.hazelcast.config.ReplicatedMapConfig; import com.hazelcast.config.RingbufferConfig; import com.hazelcast.config.ScheduledExecutorConfig; import com.hazelcast.config.SetConfig; import com.hazelcast.core.EntryEvent; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.ItemEvent; import com.hazelcast.core.ItemListener; import com.hazelcast.core.RingbufferStore; import com.hazelcast.core.RingbufferStoreFactory; import com.hazelcast.map.listener.EntryAddedListener; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Properties; import static com.hazelcast.config.MultiMapConfig.ValueCollectionType.LIST; import static org.junit.Assert.assertEquals; // todo tests still missing @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class DynamicConfigTest extends HazelcastTestSupport { protected static final int INSTANCE_COUNT = 2; private String name = randomString(); private TestHazelcastInstanceFactory factory; private HazelcastInstance[] members; // add***Config is invoked on driver instance private HazelcastInstance driver; @Before public void setup() { members = newInstances(); driver = getDriver(); } protected HazelcastInstance[] newInstances() { factory = createHazelcastInstanceFactory(INSTANCE_COUNT); HazelcastInstance[] instances = factory.newInstances(); return instances; } protected HazelcastInstance getDriver() { return members[members.length - 1]; } @Test public void testMultiMapConfig() { MultiMapConfig multiMapConfig = new MultiMapConfig(name); multiMapConfig.setBackupCount(4) .setAsyncBackupCount(2) .setStatisticsEnabled(true) .setBinary(true) .setValueCollectionType(LIST) .addEntryListenerConfig( new EntryListenerConfig("com.hazelcast.Listener", true, false) ); driver.getConfig().addMultiMapConfig(multiMapConfig); assertConfigurationsEqualsOnAllMembers(multiMapConfig); } @Test public void testMultiMapConfig_whenEntryListenerConfigHasImplementation() { MultiMapConfig multiMapConfig = new MultiMapConfig(name); multiMapConfig.setBackupCount(4) .setAsyncBackupCount(2) .setStatisticsEnabled(true) .setBinary(true) .setValueCollectionType(LIST) .addEntryListenerConfig( new EntryListenerConfig(new SampleEntryListener(), true, false) ); driver.getConfig().addMultiMapConfig(multiMapConfig); assertConfigurationsEqualsOnAllMembers(multiMapConfig); } @Test public void testCardinalityEstimatorConfig() { CardinalityEstimatorConfig config = new CardinalityEstimatorConfig(name, 4 ,2); driver.getConfig().addCardinalityEstimatorConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testLockConfig() { LockConfig config = new LockConfig(name); config.setQuorumName(randomString()); driver.getConfig().addLockConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testListConfig() { ListConfig config = getListConfig(); driver.getConfig().addListConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testListConfig_withItemListenerConfig_byClassName() { ListConfig config = getListConfig(); List<ItemListenerConfig> itemListenerConfigs = new ArrayList<ItemListenerConfig>(); ItemListenerConfig listenerConfig = new ItemListenerConfig("com.hazelcast.ItemListener", true); itemListenerConfigs.add(listenerConfig); config.setItemListenerConfigs(itemListenerConfigs); driver.getConfig().addListConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testListConfig_withItemListenerConfig_byImplementation() { ListConfig config = getListConfig(); List<ItemListenerConfig> itemListenerConfigs = new ArrayList<ItemListenerConfig>(); ItemListenerConfig listenerConfig = new ItemListenerConfig(new SampleItemListener(), true); itemListenerConfigs.add(listenerConfig); config.setItemListenerConfigs(itemListenerConfigs); driver.getConfig().addListConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testExecutorConfig() { ExecutorConfig config = new ExecutorConfig(name, 7); config.setStatisticsEnabled(true); config.setQueueCapacity(13); driver.getConfig().addExecutorConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testDurableExecutorConfig() { DurableExecutorConfig config = new DurableExecutorConfig(name, 7, 3, 10); driver.getConfig().addDurableExecutorConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testScheduledExecutorConfig() { ScheduledExecutorConfig config = new ScheduledExecutorConfig(name, 2, 3, 10); driver.getConfig().addScheduledExecutorConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testRingbufferConfig() { RingbufferConfig config = getRingbufferConfig(); driver.getConfig().addRingBufferConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testQueueConfig() { QueueConfig config = getQueueConfig(); driver.getConfig().addQueueConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testQueueConfig_withListeners() { QueueConfig config = getQueueConfig_withListeners(); driver.getConfig().addQueueConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testRingbufferConfig_whenConfiguredWithRingbufferStore_byClassName() { RingbufferConfig config = getRingbufferConfig(); config.getRingbufferStoreConfig().setEnabled(true).setClassName("com.hazelcast.Foo"); driver.getConfig().addRingBufferConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testRingbufferConfig_whenConfiguredWithRingbufferStore_byFactoryClassName() { RingbufferConfig config = getRingbufferConfig(); config.getRingbufferStoreConfig().setEnabled(true).setFactoryClassName("com.hazelcast.FactoryFoo"); driver.getConfig().addRingBufferConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testRingbufferConfig_whenConfiguredWithRingbufferStore_byStoreImplementation() { RingbufferConfig config = getRingbufferConfig(); config.getRingbufferStoreConfig().setEnabled(true).setStoreImplementation(new SampleRingbufferStore()); driver.getConfig().addRingBufferConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testRingbufferConfig_whenConfiguredWithRingbufferStore_byFactoryImplementation() { RingbufferConfig config = getRingbufferConfig(); config.getRingbufferStoreConfig().setEnabled(true).setFactoryImplementation(new SampleRingbufferStoreFactory()); driver.getConfig().addRingBufferConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testReplicatedMapConfig_withListenerByClassName() { ReplicatedMapConfig config = new ReplicatedMapConfig(name); config.setStatisticsEnabled(true); config.setMergePolicy("com.hazelcast.SomeMergePolicy"); config.setInMemoryFormat(InMemoryFormat.NATIVE); config.setAsyncFillup(true); config.addEntryListenerConfig(new EntryListenerConfig(randomString(), true, false)); driver.getConfig().addReplicatedMapConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testReplicatedMapConfig_withListenerByImplementation() { ReplicatedMapConfig config = new ReplicatedMapConfig(name); config.setStatisticsEnabled(true); config.setMergePolicy("com.hazelcast.SomeMergePolicy"); config.setInMemoryFormat(InMemoryFormat.NATIVE); config.setAsyncFillup(true); config.addEntryListenerConfig(new EntryListenerConfig(new SampleEntryListener(), false, true)); driver.getConfig().addReplicatedMapConfig(config); assertConfigurationsEqualsOnAllMembers(config); } @Test public void testSetConfig() { String name = randomName(); SetConfig setConfig = getSetConfig(name); driver.getConfig().addSetConfig(setConfig); assertConfigurationsEqualsOnAllMembers(setConfig); } private void assertConfigurationsEqualsOnAllMembers(QueueConfig queueConfig) { String name = queueConfig.getName(); for (HazelcastInstance instance : members) { QueueConfig registeredConfig = instance.getConfig().getQueueConfig(name); assertEquals(queueConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(LockConfig lockConfig) { String name = lockConfig.getName(); for (HazelcastInstance instance : members) { LockConfig registeredConfig = instance.getConfig().getLockConfig(name); assertEquals(lockConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(CardinalityEstimatorConfig cardinalityEstimatorConfig) { String name = cardinalityEstimatorConfig.getName(); for (HazelcastInstance instance : members) { CardinalityEstimatorConfig registeredConfig = instance.getConfig().getCardinalityEstimatorConfig(name); assertEquals(cardinalityEstimatorConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(MultiMapConfig multiMapConfig) { String name = multiMapConfig.getName(); for (HazelcastInstance instance : members) { MultiMapConfig registeredConfig = instance.getConfig().getMultiMapConfig(name); assertEquals(multiMapConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(ExecutorConfig executorConfig) { String name = executorConfig.getName(); for (HazelcastInstance instance : members) { ExecutorConfig registeredConfig = instance.getConfig().getExecutorConfig(name); assertEquals(executorConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(RingbufferConfig ringbufferConfig) { String name = ringbufferConfig.getName(); for (HazelcastInstance instance : members) { RingbufferConfig registeredConfig = instance.getConfig().getRingbufferConfig(name); assertEquals(ringbufferConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(DurableExecutorConfig durableExecutorConfig) { String name = durableExecutorConfig.getName(); for (HazelcastInstance instance : members) { DurableExecutorConfig registeredConfig = instance.getConfig().getDurableExecutorConfig(name); assertEquals(durableExecutorConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(ScheduledExecutorConfig scheduledExecutorConfig) { String name = scheduledExecutorConfig.getName(); for (HazelcastInstance instance : members) { ScheduledExecutorConfig registeredConfig = instance.getConfig().getScheduledExecutorConfig(name); assertEquals(scheduledExecutorConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(SetConfig setConfig) { String name = setConfig.getName(); for (HazelcastInstance instance : members) { SetConfig registeredConfig = instance.getConfig().getSetConfig(name); assertEquals(setConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(MapConfig mapConfig) { String name = mapConfig.getName(); for (HazelcastInstance instance : members) { MapConfig registeredConfig = instance.getConfig().getMapConfig(name); assertEquals(mapConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(ReplicatedMapConfig replicatedMapConfig) { String name = replicatedMapConfig.getName(); for (HazelcastInstance instance : members) { ReplicatedMapConfig registeredConfig = instance.getConfig().getReplicatedMapConfig(name); assertEquals(replicatedMapConfig, registeredConfig); } } private void assertConfigurationsEqualsOnAllMembers(ListConfig listConfig) { String name = listConfig.getName(); for (HazelcastInstance instance : members) { ListConfig registeredConfig = instance.getConfig().getListConfig(name); assertEquals(listConfig, registeredConfig); } } private SetConfig getSetConfig(String name) { SetConfig setConfig = new SetConfig(name); setConfig.addItemListenerConfig(new ItemListenerConfig("foo.bar.Class", true)); setConfig.setBackupCount(2); return setConfig; } // todo MapConfig tests missing @Test public void testMapConfig() { MapConfig config = getMapConfig(); driver.getConfig().addMapConfig(config); assertConfigurationsEqualsOnAllMembers(config); } private MapConfig getMapConfig() { MapConfig config = new MapConfig(name); config.setAsyncBackupCount(3); config.setBackupCount(2); config.setCacheDeserializedValues(CacheDeserializedValues.INDEX_ONLY); config.setEvictionPolicy(EvictionPolicy.LRU); config.setHotRestartConfig(new HotRestartConfig().setEnabled(true).setFsync(true)); config.setInMemoryFormat(InMemoryFormat.OBJECT); config.setMergePolicy("com.hazelcast.SomeMergePolicy"); config.setMaxSizeConfig(new MaxSizeConfig(4096, MaxSizeConfig.MaxSizePolicy.PER_NODE)); config.setMaxIdleSeconds(110); config.setQuorumName(randomString()); config.addMapAttributeConfig(new MapAttributeConfig("attributeName", "com.attribute.extractor")); config.addMapIndexConfig(new MapIndexConfig("attr", true)); return config; } private ListConfig getListConfig() { ListConfig config = new ListConfig(name); config.setStatisticsEnabled(true) .setMaxSize(99) .setBackupCount(4) .setAsyncBackupCount(2); return config; } private RingbufferConfig getRingbufferConfig() { RingbufferConfig config = new RingbufferConfig(name); config.setTimeToLiveSeconds(59); config.setInMemoryFormat(InMemoryFormat.OBJECT); config.setCapacity(33); config.setBackupCount(4); config.setAsyncBackupCount(2); return config; } public QueueConfig getQueueConfig() { String name = randomName(); QueueConfig queueConfig = new QueueConfig(name); queueConfig.setBackupCount(2); queueConfig.setAsyncBackupCount(2); // no explicit max size - let's test encoding of the default value queueConfig.setEmptyQueueTtl(10); queueConfig.setQueueStoreConfig(new QueueStoreConfig().setClassName("foo.bar.ImplName").setEnabled(true)); queueConfig.setStatisticsEnabled(false); queueConfig.setQuorumName("myQuorum"); return queueConfig; } public QueueConfig getQueueConfig_withListeners() { String name = randomName(); QueueConfig queueConfig = new QueueConfig(name); queueConfig.addItemListenerConfig(new ItemListenerConfig("foo.bar.SampleItemListener", true)); queueConfig.addItemListenerConfig(new ItemListenerConfig(new SampleItemListener(), false)); queueConfig.setBackupCount(2); queueConfig.setAsyncBackupCount(2); queueConfig.setMaxSize(1000); queueConfig.setEmptyQueueTtl(10); queueConfig.setQueueStoreConfig(new QueueStoreConfig().setClassName("foo.bar.ImplName").setEnabled(true)); queueConfig.setStatisticsEnabled(false); queueConfig.setQuorumName("myQuorum"); return queueConfig; } public static class SampleEntryListener implements EntryAddedListener, Serializable { @Override public void entryAdded(EntryEvent event) { } @Override public int hashCode() { return 31; } @Override public boolean equals(Object obj) { return obj instanceof SampleEntryListener; } } public static class SampleItemListener implements ItemListener, Serializable { @Override public void itemAdded(ItemEvent item) { } @Override public void itemRemoved(ItemEvent item) { } @Override public boolean equals(Object obj) { return (obj instanceof SampleItemListener); } @Override public int hashCode() { return 33; } } public static class SampleRingbufferStore implements RingbufferStore, Serializable { @Override public void store(long sequence, Object data) { } @Override public void storeAll(long firstItemSequence, Object[] items) { } @Override public Object load(long sequence) { return null; } @Override public long getLargestSequence() { return 0; } @Override public int hashCode() { return 33; } @Override public boolean equals(Object obj) { return (obj instanceof SampleRingbufferStore); } } public static class SampleRingbufferStoreFactory implements RingbufferStoreFactory, Serializable { @Override public RingbufferStore newRingbufferStore(String name, Properties properties) { return null; } @Override public boolean equals(Object obj) { return (obj instanceof SampleRingbufferStoreFactory); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.spatial4j.core.shape.jts; import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.context.jts.JtsSpatialContext; import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.shape.Circle; import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.SpatialRelation; import com.spatial4j.core.shape.impl.BufferedLineString; import com.spatial4j.core.shape.impl.PointImpl; import com.spatial4j.core.shape.impl.Range; import com.spatial4j.core.shape.impl.RectangleImpl; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.CoordinateSequence; import com.vividsolutions.jts.geom.CoordinateSequenceFilter; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryCollection; import com.vividsolutions.jts.geom.GeometryFilter; import com.vividsolutions.jts.geom.IntersectionMatrix; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.Lineal; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.Polygon; import com.vividsolutions.jts.geom.Puntal; import com.vividsolutions.jts.geom.prep.PreparedGeometry; import com.vividsolutions.jts.geom.prep.PreparedGeometryFactory; import com.vividsolutions.jts.operation.union.UnaryUnionOp; import com.vividsolutions.jts.operation.valid.IsValidOp; import java.util.ArrayList; import java.util.List; /** * Wraps a JTS {@link Geometry} (i.e. may be a polygon or basically anything). * JTS does a great deal of the hard work, but there is work here in handling * dateline wrap. */ public class JtsGeometry implements Shape { /** * System property boolean that can disable auto validation in an assert. */ public static final String SYSPROP_ASSERT_VALIDATE = "spatial4j.JtsGeometry.assertValidate"; private final Geometry geom;//cannot be a direct instance of GeometryCollection as it doesn't support relate() private final boolean hasArea; private final Rectangle bbox; protected final JtsSpatialContext ctx; protected PreparedGeometry preparedGeometry; protected boolean validated = false; public JtsGeometry(Geometry geom, JtsSpatialContext ctx, boolean dateline180Check, boolean allowMultiOverlap) { this.ctx = ctx; //GeometryCollection isn't supported in relate() if (geom.getClass().equals(GeometryCollection.class)) throw new IllegalArgumentException("JtsGeometry does not support GeometryCollection but does support its subclasses."); //NOTE: All this logic is fairly expensive. There are some short-circuit checks though. if (ctx.isGeo()) { //Unwraps the geometry across the dateline so it exceeds the standard geo bounds (-180 to +180). if (dateline180Check) unwrapDateline(geom);//potentially modifies geom //If given multiple overlapping polygons, fix it by union if (allowMultiOverlap) geom = unionGeometryCollection(geom);//returns same or new geom //Cuts an unwrapped geometry back into overlaid pages in the standard geo bounds. geom = cutUnwrappedGeomInto360(geom);//returns same or new geom assert geom.getEnvelopeInternal().getWidth() <= 360; assert !geom.getClass().equals(GeometryCollection.class) : "GeometryCollection unsupported";//double check //Compute bbox bbox = computeGeoBBox(geom); } else {//not geo //If given multiple overlapping polygons, fix it by union if (allowMultiOverlap) geom = unionGeometryCollection(geom);//returns same or new geom Envelope env = geom.getEnvelopeInternal(); bbox = new RectangleImpl(env.getMinX(), env.getMaxX(), env.getMinY(), env.getMaxY(), ctx); } geom.getEnvelopeInternal();//ensure envelope is cached internally, which is lazy evaluated. Keeps this thread-safe. this.geom = geom; assert assertValidate();//kinda expensive but caches valid state this.hasArea = !((geom instanceof Lineal) || (geom instanceof Puntal)); } /** * called via assertion */ private boolean assertValidate() { String assertValidate = System.getProperty(SYSPROP_ASSERT_VALIDATE); if (assertValidate == null || Boolean.parseBoolean(assertValidate)) validate(); return true; } /** * Validates the shape, throwing a descriptive error if it isn't valid. Note that this * is usually called automatically by default, but that can be disabled. * * @throws InvalidShapeException with descriptive error if the shape isn't valid */ public void validate() throws InvalidShapeException { if (!validated) { IsValidOp isValidOp = new IsValidOp(geom); if (!isValidOp.isValid()) throw new InvalidShapeException(isValidOp.getValidationError().toString()); validated = true; } } /** * Adds an index to this class internally to compute spatial relations faster. In JTS this * is called a {@link com.vividsolutions.jts.geom.prep.PreparedGeometry}. This * isn't done by default because it takes some time to do the optimization, and it uses more * memory. Calling this method isn't thread-safe so be careful when this is done. If it was * already indexed then nothing happens. */ public void index() { if (preparedGeometry == null) preparedGeometry = PreparedGeometryFactory.prepare(geom); } @Override public boolean isEmpty() { return geom.isEmpty(); } /** * Given {@code geoms} which has already been checked for being in world * bounds, return the minimal longitude range of the bounding box. */ protected Rectangle computeGeoBBox(Geometry geoms) { if (geoms.isEmpty()) return new RectangleImpl(Double.NaN, Double.NaN, Double.NaN, Double.NaN, ctx); final Envelope env = geoms.getEnvelopeInternal();//for minY & maxY (simple) if (env.getWidth() > 180 && geoms.getNumGeometries() > 1) { // This is ShapeCollection's bbox algorithm Range xRange = null; for (int i = 0; i < geoms.getNumGeometries(); i++) { Envelope envI = geoms.getGeometryN(i).getEnvelopeInternal(); Range xRange2 = new Range.LongitudeRange(envI.getMinX(), envI.getMaxX()); if (xRange == null) { xRange = xRange2; } else { xRange = xRange.expandTo(xRange2); } if (xRange == Range.LongitudeRange.WORLD_180E180W) break; // can't grow any bigger } return new RectangleImpl(xRange.getMin(), xRange.getMax(), env.getMinY(), env.getMaxY(), ctx); } else { return new RectangleImpl(env.getMinX(), env.getMaxX(), env.getMinY(), env.getMaxY(), ctx); } } @Override public JtsGeometry getBuffered(double distance, SpatialContext ctx) { //TODO doesn't work correctly across the dateline. The buffering needs to happen // when it's transiently unrolled, prior to being sliced. return this.ctx.makeShape(geom.buffer(distance), true, true); } @Override public boolean hasArea() { return hasArea; } @Override public double getArea(SpatialContext ctx) { double geomArea = geom.getArea(); if (ctx == null || geomArea == 0) return geomArea; //Use the area proportional to how filled the bbox is. double bboxArea = getBoundingBox().getArea(null);//plain 2d area assert bboxArea >= geomArea; double filledRatio = geomArea / bboxArea; return getBoundingBox().getArea(ctx) * filledRatio; // (Future: if we know we use an equal-area projection then we don't need to // estimate) } @Override public Rectangle getBoundingBox() { return bbox; } @Override public JtsPoint getCenter() { if (isEmpty()) //geom.getCentroid == null return new JtsPoint(ctx.getGeometryFactory().createPoint((Coordinate) null), ctx); return new JtsPoint(geom.getCentroid(), ctx); } @Override public SpatialRelation relate(Shape other) { if (other instanceof Point) return relate((Point) other); else if (other instanceof Rectangle) return relate((Rectangle) other); else if (other instanceof Circle) return relate((Circle) other); else if (other instanceof JtsGeometry) return relate((JtsGeometry) other); else if (other instanceof BufferedLineString) throw new UnsupportedOperationException("Can't use BufferedLineString with JtsGeometry"); return other.relate(this).transpose(); } public SpatialRelation relate(Point pt) { if (!getBoundingBox().relate(pt).intersects()) return SpatialRelation.DISJOINT; Geometry ptGeom; if (pt instanceof JtsPoint) ptGeom = ((JtsPoint) pt).getGeom(); else ptGeom = ctx.getGeometryFactory().createPoint(new Coordinate(pt.getX(), pt.getY())); return relate(ptGeom);//is point-optimized } public SpatialRelation relate(Rectangle rectangle) { SpatialRelation bboxR = bbox.relate(rectangle); if (bboxR == SpatialRelation.WITHIN || bboxR == SpatialRelation.DISJOINT) return bboxR; // FYI, the right answer could still be DISJOINT or WITHIN, but we don't know yet. return relate(ctx.getGeometryFrom(rectangle)); } public SpatialRelation relate(Circle circle) { SpatialRelation bboxR = bbox.relate(circle); if (bboxR == SpatialRelation.WITHIN || bboxR == SpatialRelation.DISJOINT) return bboxR; //Test each point to see how many of them are outside of the circle. //TODO consider instead using geom.apply(CoordinateSequenceFilter) -- maybe faster since avoids Coordinate[] allocation Coordinate[] coords = geom.getCoordinates(); int outside = 0; int i = 0; for (Coordinate coord : coords) { i++; SpatialRelation sect = circle.relate(new PointImpl(coord.x, coord.y, ctx)); if (sect == SpatialRelation.DISJOINT) outside++; if (i != outside && outside != 0)//short circuit: partially outside, partially inside return SpatialRelation.INTERSECTS; } if (i == outside) { return (relate(circle.getCenter()) == SpatialRelation.DISJOINT) ? SpatialRelation.DISJOINT : SpatialRelation.CONTAINS; } assert outside == 0; return SpatialRelation.WITHIN; } public SpatialRelation relate(JtsGeometry jtsGeometry) { //don't bother checking bbox since geom.relate() does this already return relate(jtsGeometry.geom); } protected SpatialRelation relate(Geometry oGeom) { //see http://docs.geotools.org/latest/userguide/library/jts/dim9.html#preparedgeometry if (oGeom instanceof com.vividsolutions.jts.geom.Point) { if (preparedGeometry != null) return preparedGeometry.disjoint(oGeom) ? SpatialRelation.DISJOINT : SpatialRelation.CONTAINS; return geom.disjoint(oGeom) ? SpatialRelation.DISJOINT : SpatialRelation.CONTAINS; } if (preparedGeometry == null) return intersectionMatrixToSpatialRelation(geom.relate(oGeom)); else if (preparedGeometry.covers(oGeom)) return SpatialRelation.CONTAINS; else if (preparedGeometry.coveredBy(oGeom)) return SpatialRelation.WITHIN; else if (preparedGeometry.intersects(oGeom)) return SpatialRelation.INTERSECTS; return SpatialRelation.DISJOINT; } public static SpatialRelation intersectionMatrixToSpatialRelation(IntersectionMatrix matrix) { //As indicated in SpatialRelation javadocs, Spatial4j CONTAINS & WITHIN are // OGC's COVERS & COVEREDBY if (matrix.isCovers()) return SpatialRelation.CONTAINS; else if (matrix.isCoveredBy()) return SpatialRelation.WITHIN; else if (matrix.isDisjoint()) return SpatialRelation.DISJOINT; return SpatialRelation.INTERSECTS; } @Override public String toString() { return geom.toString(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JtsGeometry that = (JtsGeometry) o; return geom.equalsExact(that.geom);//fast equality for normalized geometries } @Override public int hashCode() { //FYI if geometry.equalsExact(that.geometry), then their envelopes are the same. return geom.getEnvelopeInternal().hashCode(); } public Geometry getGeom() { return geom; } /** * If <code>geom</code> spans the dateline, then this modifies it to be a * valid JTS geometry that extends to the right of the standard -180 to +180 * width such that some points are greater than +180 but some remain less. * Takes care to invoke {@link com.vividsolutions.jts.geom.Geometry#geometryChanged()} * if needed. * * @return The number of times the geometry spans the dateline. >= 0 */ private static int unwrapDateline(Geometry geom) { if (geom.getEnvelopeInternal().getWidth() < 180) return 0;//can't possibly cross the dateline final int[] crossings = {0};//an array so that an inner class can modify it. geom.apply(new GeometryFilter() { @Override public void filter(Geometry geom) { int cross = 0; if (geom instanceof LineString) {//note: LinearRing extends LineString if (geom.getEnvelopeInternal().getWidth() < 180) return;//can't possibly cross the dateline cross = unwrapDateline((LineString) geom); } else if (geom instanceof Polygon) { if (geom.getEnvelopeInternal().getWidth() < 180) return;//can't possibly cross the dateline cross = unwrapDateline((Polygon) geom); } else return; crossings[0] = Math.max(crossings[0], cross); } });//geom.apply() return crossings[0]; } /** * See {@link #unwrapDateline(Geometry)}. */ private static int unwrapDateline(Polygon poly) { LineString exteriorRing = poly.getExteriorRing(); int cross = unwrapDateline(exteriorRing); if (cross > 0) { //TODO TEST THIS! Maybe bug if doesn't cross but is in another page? for (int i = 0; i < poly.getNumInteriorRing(); i++) { LineString innerLineString = poly.getInteriorRingN(i); unwrapDateline(innerLineString); for (int shiftCount = 0; !exteriorRing.contains(innerLineString); shiftCount++) { if (shiftCount > cross) throw new IllegalArgumentException("The inner ring doesn't appear to be within the exterior: " + exteriorRing + " inner: " + innerLineString); shiftGeomByX(innerLineString, 360); } } poly.geometryChanged(); } return cross; } /** * See {@link #unwrapDateline(Geometry)}. */ private static int unwrapDateline(LineString lineString) { CoordinateSequence cseq = lineString.getCoordinateSequence(); int size = cseq.size(); if (size <= 1) return 0; int shiftX = 0;//invariant: == shiftXPage*360 int shiftXPage = 0; int shiftXPageMin = 0/* <= 0 */, shiftXPageMax = 0; /* >= 0 */ double prevX = cseq.getX(0); for (int i = 1; i < size; i++) { double thisX_orig = cseq.getX(i); assert thisX_orig >= -180 && thisX_orig <= 180 : "X not in geo bounds"; double thisX = thisX_orig + shiftX; if (prevX - thisX > 180) {//cross dateline from left to right thisX += 360; shiftX += 360; shiftXPage += 1; shiftXPageMax = Math.max(shiftXPageMax, shiftXPage); } else if (thisX - prevX > 180) {//cross dateline from right to left thisX -= 360; shiftX -= 360; shiftXPage -= 1; shiftXPageMin = Math.min(shiftXPageMin, shiftXPage); } if (shiftXPage != 0) cseq.setOrdinate(i, CoordinateSequence.X, thisX); prevX = thisX; } if (lineString instanceof LinearRing) { assert cseq.getCoordinate(0).equals(cseq.getCoordinate(size - 1)); assert shiftXPage == 0;//starts and ends at 0 } assert shiftXPageMax >= 0 && shiftXPageMin <= 0; //Unfortunately we are shifting again; it'd be nice to be smarter and shift once shiftGeomByX(lineString, shiftXPageMin * -360); int crossings = shiftXPageMax - shiftXPageMin; if (crossings > 0) lineString.geometryChanged(); return crossings; } private static void shiftGeomByX(Geometry geom, final int xShift) { if (xShift == 0) return; geom.apply(new CoordinateSequenceFilter() { @Override public void filter(CoordinateSequence seq, int i) { seq.setOrdinate(i, CoordinateSequence.X, seq.getX(i) + xShift); } @Override public boolean isDone() { return false; } @Override public boolean isGeometryChanged() { return true; } }); } private static Geometry unionGeometryCollection(Geometry geom) { if (geom instanceof GeometryCollection) { return geom.union(); } return geom; } /** * This "pages" through standard geo boundaries offset by multiples of 360 * longitudinally that intersect geom, and the intersecting results of a page * and the geom are shifted into the standard -180 to +180 and added to a new * geometry that is returned. */ private static Geometry cutUnwrappedGeomInto360(Geometry geom) { Envelope geomEnv = geom.getEnvelopeInternal(); if (geomEnv.getMinX() >= -180 && geomEnv.getMaxX() <= 180) return geom; assert geom.isValid() : "geom"; //TODO opt: support geom's that start at negative pages -- // ... will avoid need to previously shift in unwrapDateline(geom). List<Geometry> geomList = new ArrayList<Geometry>(); //page 0 is the standard -180 to 180 range for (int page = 0; true; page++) { double minX = -180 + page * 360; if (geomEnv.getMaxX() <= minX) break; Geometry rect = geom.getFactory().toGeometry(new Envelope(minX, minX + 360, -90, 90)); assert rect.isValid() : "rect"; Geometry pageGeom = rect.intersection(geom);//JTS is doing some hard work assert pageGeom.isValid() : "pageGeom"; shiftGeomByX(pageGeom, page * -360); geomList.add(pageGeom); } return UnaryUnionOp.union(geomList); } // private static Geometry removePolyHoles(Geometry geom) { // //TODO this does a deep copy of geom even if no changes needed; be smarter // GeometryTransformer gTrans = new GeometryTransformer() { // @Override // protected Geometry transformPolygon(Polygon geom, Geometry parent) { // if (geom.getNumInteriorRing() == 0) // return geom; // return factory.createPolygon((LinearRing) geom.getExteriorRing(),null); // } // }; // return gTrans.transform(geom); // } // // private static Geometry snapAndClean(Geometry geom) { // return new GeometrySnapper(geom).snapToSelf(GeometrySnapper.computeOverlaySnapTolerance(geom), true); // } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.cassandra.cluster.mgt.mbean; import org.apache.cassandra.service.StorageServiceMBean; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.cassandra.cluster.ClusterMBeanDataAccess; import org.wso2.carbon.cassandra.cluster.mgt.exception.ClusterDataAdminException; import org.wso2.carbon.cassandra.cluster.mgt.component.ClusterAdminComponentManager; import org.wso2.carbon.cassandra.cluster.mgt.registry.RegistryStore; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; public class ClusterStorageMBeanService { private static Log log = LogFactory.getLog(ClusterStorageMBeanService.class); private StorageServiceMBean storageServiceMBean; private static boolean isGossipEnable = true; private static boolean isIncrementalBackUpEnable = false; public ClusterStorageMBeanService() throws ClusterDataAdminException { createProxyConnection(); } private void createProxyConnection() throws ClusterDataAdminException { ClusterMBeanDataAccess clusterMBeanDataAccess = ClusterAdminComponentManager.getInstance().getClusterMBeanDataAccess(); try { storageServiceMBean = clusterMBeanDataAccess.locateStorageServiceMBean(); } catch (Exception e) { throw new ClusterDataAdminException("Unable to locate storage service MBean connection", e, log); } } /** * Decommission node * * @return boolean * @throws ClusterDataAdminException */ public boolean decommissionNode() throws ClusterDataAdminException { try { storageServiceMBean.decommission(); return true; } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot drain the node.Cause due to interrupted exception", e, log); } } /** * Drain node * * @return boolean * @throws ClusterDataAdminException */ public boolean drainNode() throws ClusterDataAdminException { try { storageServiceMBean.drain(); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot drain the node.Cause due to IOException", e, log); } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot drain the node.Cause due to interrupted exception", e, log); } catch (ExecutionException e) { throw new ClusterDataAdminException("Cannot drain the node.Cause due to execution exception", e, log); } } /** * Join ring * * @return boolean * @throws ClusterDataAdminException */ public boolean joinRing() throws ClusterDataAdminException { try { storageServiceMBean.joinRing(); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot join the ring.Cause due to IOException", e, log); } } /** * Clear snapshot * * @param tag snapshot name * @param keyspace keyspace name * @return boolean * @throws ClusterDataAdminException */ public boolean clearSnapShot(String tag, String... keyspace) throws ClusterDataAdminException { RegistryStore registryStore = new RegistryStore(); try { if (keyspace.length == 0 || keyspace[0] == null) { storageServiceMBean.clearSnapshot(tag); registryStore.clearNodeSnapshot(tag); } else { storageServiceMBean.clearSnapshot(tag, keyspace); registryStore.clearKeyspaceSnapshot(tag, keyspace[0]); } return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot clear snapshot.Cause due to IOException", e, log); } } /** * Stop RPC server */ public void shutDownNodeRPCServer() { storageServiceMBean.stopRPCServer(); } /** * Start node RPC server */ public void startNodeRPCServer() { storageServiceMBean.startRPCServer(); } /** * Get RPC server status * * @return boolean */ public boolean getRPCServerStatus() { return storageServiceMBean.isRPCServerRunning(); } /** * Stop gossip server */ public void stopGossipServer() { isGossipEnable = false; storageServiceMBean.stopGossiping(); } /** * Start gossip server */ public void startGossipServer() { isGossipEnable = true; storageServiceMBean.startGossiping(); } /** * Get gossip server status * * @return boolean */ public boolean isGossipEnable() { return isGossipEnable; } /** * Get node join status * * @return boolean */ public boolean isJoined() { return storageServiceMBean.isJoined(); } /** * Flush * * @param keyspace keyspace name * @param columnFamily column family name * @return boolean * @throws ClusterDataAdminException */ public boolean flush(String keyspace, String... columnFamily) throws ClusterDataAdminException { try { storageServiceMBean.forceTableFlush(keyspace, columnFamily); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot flush the column family.Cause due to IOException", e, log); } catch (ExecutionException e) { throw new ClusterDataAdminException("Cannot flush the column family.Cause due to execution exception", e, log); } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot flush the column family.Cause due to interrupted exception", e, log); } } /** * Cleanup * * @param keyspace keyspace name * @param columnFamily column family name * @return boolean * @throws ClusterDataAdminException */ public boolean cleanUp(String keyspace, String... columnFamily) throws ClusterDataAdminException { try { storageServiceMBean.forceTableCleanup(keyspace, columnFamily); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot cleanUp the column family.Cause due to IOException", e, log); } catch (ExecutionException e) { throw new ClusterDataAdminException("Cannot cleanUP the column family.Cause due to execution exception", e, log); } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot cleanUP the column family.Cause due to interrupted exception", e, log); } } /** * Repair * * @param keyspace keyspace name * @param columnFamily column family name * @return boolean * @throws ClusterDataAdminException */ public boolean repair(String keyspace, String... columnFamily) throws ClusterDataAdminException { try { storageServiceMBean.forceTableRepair(keyspace, false, false, columnFamily); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot repair the column family.Cause due to interrupted exception", e, log); } } /** * Compact * * @param keyspace keyspace name * @param columnFamily column family name * @return boolean * @throws ClusterDataAdminException */ public boolean compact(String keyspace, String... columnFamily) throws ClusterDataAdminException { try { storageServiceMBean.forceTableCompaction(keyspace, columnFamily); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot compact the column family.Cause due to IOException", e, log); } catch (ExecutionException e) { throw new ClusterDataAdminException("Cannot compact the column family.Cause due to execution exception", e, log); } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot compact the column family.Cause due to interrupted exception", e, log); } } /** * Move node to new token * * @param newToken new token * @return boolean * @throws ClusterDataAdminException */ public boolean moveNode(String newToken) throws ClusterDataAdminException { try { storageServiceMBean.move(newToken); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot move the node.Cause due to IOException", e, log); } } /** * Take a snapshot of all the tables, optionally specifying only a specific column family. * * @param tag the name of the snapshot. * @param columnFamily the column family to snapshot or all on null * @param keyspaces the keyspaces to snapshot */ public boolean takeSnapShot(String tag, String columnFamily, String... keyspaces) throws ClusterDataAdminException { RegistryStore registryStore = new RegistryStore(); try { if (columnFamily != null) { if (keyspaces.length != 1) { throw new IOException("When specifying the column family for a snapshot, you must specify one and only one keyspace"); } storageServiceMBean.takeColumnFamilySnapshot(keyspaces[0], columnFamily, tag); registryStore.saveColumnFamilySnapshot(tag, keyspaces[0], columnFamily); return true; } else { if (keyspaces.length == 1 && keyspaces[0] != null) { storageServiceMBean.takeSnapshot(tag, keyspaces); registryStore.saveKeyspaceSnapshot(tag, keyspaces[0]); } else { storageServiceMBean.takeSnapshot(tag); registryStore.saveNodeSnapshot(tag); } return true; } } catch (IOException ex) { throw new ClusterDataAdminException("Unable to take the snapshot", ex, log); } } /** * Scrub * * @param keyspace keyspace name * @param columnFamilies column families * @return boolean * @throws ClusterDataAdminException */ public boolean scrub(boolean disableSnapshot, String keyspace, String... columnFamilies) throws ClusterDataAdminException { try { storageServiceMBean.scrub(disableSnapshot, keyspace, columnFamilies); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot perform the scrub.Cause due to IOException", e, log); } catch (ExecutionException e) { throw new ClusterDataAdminException("Cannot scrub the column family.Cause due to execution exception", e, log); } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot scrub the column family.Cause due to interrupted exception", e, log); } } /** * Upgrade SS Tables * * @param keyspace keyspace name * @param columnFamilies column family name * @return boolean * @throws ClusterDataAdminException */ public boolean upgradeSSTables(String keyspace, boolean excludeCurrentVersion, String... columnFamilies) throws ClusterDataAdminException { try { storageServiceMBean.upgradeSSTables(keyspace, excludeCurrentVersion, columnFamilies); return true; } catch (IOException e) { throw new ClusterDataAdminException("Cannot perform the upgradeSSTables.Cause due to IOException", e, log); } catch (ExecutionException e) { throw new ClusterDataAdminException("Cannot upgradeSSTables for the column family.Cause due to execution exception", e, log); } catch (InterruptedException e) { throw new ClusterDataAdminException("Cannot upgradeSSTables for the column family.Cause due to interrupted exception", e, log); } } /** * Remove all the existing snapshots. */ /*public void clearSnapshot(String tag, String... keyspaces) throws IOException { storageServiceMBean.clearSnapshot(tag, keyspaces); }*/ /** * Set incremental backup status * * @param status state */ public void setIncrementalBackUpStatus(boolean status) { storageServiceMBean.setIncrementalBackupsEnabled(status); isIncrementalBackUpEnable = status; } /** * Get incremental backup status * * @return boolean */ public boolean isIncrementalBackUpEnable() { return isIncrementalBackUpEnable; } /*public void forceTableRepair(String tableName, boolean isSequential, String... columnFamilies) throws IOException { storageServiceMBean.forceTableRepair(tableName, isSequential, columnFamilies); } public void forceTableRepairPrimaryRange(String tableName, boolean isSequential, String... columnFamilies) throws IOException { storageServiceMBean.forceTableRepairPrimaryRange(tableName, isSequential, columnFamilies); } public void forceTableRepairRange(String beginToken, String endToken, String tableName, boolean isSequential, String... columnFamilies) throws IOException { storageServiceMBean.forceTableRepairRange(beginToken, endToken, tableName, isSequential, columnFamilies); }*/ /** * Get token to endpoint map * * @return map Map<String, String> */ public Map<String, String> getTokenToEndpointMap() { return storageServiceMBean.getTokenToEndpointMap(); } /** * Get live nodes * * @return list List<String> */ public List<String> getLiveNodes() { return storageServiceMBean.getLiveNodes(); } /** * Get joining nodes * * @return list List<String> */ public List<String> getJoiningNodes() { return storageServiceMBean.getJoiningNodes(); } /** * Get leaving nodes * * @return list List<String> */ public List<String> getLeavingNodes() { return storageServiceMBean.getLeavingNodes(); } /** * Get moving nodes * * @return list List<String> */ public List<String> getMovingNodes() { return storageServiceMBean.getMovingNodes(); } /** * Get unreachable nodes * * @return list List<String> */ public List<String> getUnreachableNodes() { return storageServiceMBean.getUnreachableNodes(); } /** * Get load map * * @return map Map<String, String> */ public Map<String, String> getLoadMap() { return storageServiceMBean.getLoadMap(); } /** * Get ownership * * @return */ public Map<InetAddress, Float> getOwnership() { return storageServiceMBean.getOwnership(); } /** * Get effective ownership * * @param keyspace keyspace name * @return map Map<String, Float> */ public Map<InetAddress, Float> effectiveOwnership(String keyspace) { return storageServiceMBean.effectiveOwnership(keyspace); } /** * Loca new ss tables * * @param ksName keyspace name * @param cfName column family name */ public void loadNewSSTables(String ksName, String cfName) { storageServiceMBean.loadNewSSTables(ksName, cfName); } /** * Rebuild index * * @param ksName keyspace name * @param cfName column family name * @param idxNames indexes */ public void rebuildIndex(String ksName, String cfName, String... idxNames) { storageServiceMBean.rebuildSecondaryIndex(ksName, cfName, idxNames); } /** * Set stream throughput * * @param value value */ public void setStreamThroughput(int value) { storageServiceMBean.setStreamThroughputMbPerSec(value); } /** * Get schema version * * @return String */ public String getSchemaVersion() { return storageServiceMBean.getSchemaVersion(); } /** * Describe ring JMX * * @param keyspaceName keyspace name * @return list List<String> * @throws InvalidRequestException */ public List<String> describeRing(String keyspaceName) throws IOException { return storageServiceMBean.describeRingJMX(keyspaceName); } /** * Get release version * * @return String */ public String getReleaseVersion() { return storageServiceMBean.getReleaseVersion(); } /** * Rebuild * * @param sourceDc source data center */ public void rebuild(String sourceDc) { storageServiceMBean.rebuild(sourceDc); } /** * Sample key range * * @return list List<String> */ public List<String> sampleKeyRange() { return storageServiceMBean.sampleKeyRange(); } /** * Reset local schema * * @return boolean * @throws ClusterDataAdminException */ public boolean resetLocalSchema() throws ClusterDataAdminException { try { storageServiceMBean.resetLocalSchema(); return true; } catch (Exception e) { throw new ClusterDataAdminException("Can't reset the local schema", e, log); } } /** * get is initialized * * @return boolean */ public boolean isInitialized() { return storageServiceMBean.isInitialized(); } /** * Set compaction throughput * * @param value value */ public void setCompactionThroughput(int value) { storageServiceMBean.setCompactionThroughputMbPerSec(value); } /** * Get compaction throughput * * @return int */ public int getCompactionThroughput() { return storageServiceMBean.getCompactionThroughputMbPerSec(); } /** * Get exception count * * @return int */ public int getExceptionCount() { return storageServiceMBean.getExceptionCount(); } /** * Get keyspaces * * @return String array */ public List<String> getKeyspaces() { return storageServiceMBean.getKeyspaces(); } /*public void truncate(String tableName, String cfName) { try { storageServiceMBean.truncate(tableName, cfName); } catch (UnavailableException e) { throw new RuntimeException("Error while executing truncate", e); } catch (TimeoutException e) { throw new RuntimeException("Error while executing truncate", e); } catch (IOException e) { throw new RuntimeException("Error while executing truncate", e); } }*/ /** * Remove token * * @param token token name */ public void removeNode(String token) { storageServiceMBean.removeNode(token); } /** * Get token removal state * * @return String */ public String getRemovalStatus() { return storageServiceMBean.getRemovalStatus(); } /** * Force remove completion */ public void forceRemoveCompletion() { storageServiceMBean.forceRemoveCompletion(); } /** * Get endpoints * * @param keyspace keyspace name * @param cf column family name * @param key key * @return list List<InetAddress> */ public List<InetAddress> getEndpoints(String keyspace, String cf, String key) { return storageServiceMBean.getNaturalEndpoints(keyspace, cf, key); } /** * Get operation mode * * @return String */ public String getOperationMode() { return storageServiceMBean.getOperationMode(); } /** * Get endpoint * * @return String */ public String getEndpoint() throws ClusterDataAdminException { String hostId = storageServiceMBean.getLocalHostId(); Map<String, String> map = storageServiceMBean.getHostIdMap(); for(Map.Entry<String, String> item : map.entrySet()){ if(item.getValue().equals(hostId)){ return item.getKey(); } } throw new ClusterDataAdminException("Host ID not found.", log); } /** * Get token * * @return String */ public String getToken() throws ClusterDataAdminException { try { return getListAsCommaSeparatedString(storageServiceMBean.getTokens(getEndpoint())); } catch (UnknownHostException e) { throw new ClusterDataAdminException("Unknown Host Exception Occurred.", e, log); } } /** * Get load string * * @return String */ public String getLoadString() { return storageServiceMBean.getLoadString(); } /** * Get currant generation number * * @return int */ public int getCurrentGenerationNumber() { return storageServiceMBean.getCurrentGenerationNumber(); } private String getListAsCommaSeparatedString(List<String> list){ String string = ""; for(String item : list){ string = string.concat(item).concat(","); } return string.substring(0, string.length() - 1); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Provides the details of the <code>WorkflowExecutionSignaled</code> event. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/swf-2012-01-25/WorkflowExecutionSignaledEventAttributes" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class WorkflowExecutionSignaledEventAttributes implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the signal received. The decider can use the signal name and inputs to determine how to the process * the signal. * </p> */ private String signalName; /** * <p> * The inputs provided with the signal. The decider can use the signal name and inputs to determine how to process * the signal. * </p> */ private String input; /** * <p> * The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. * </p> */ private WorkflowExecution externalWorkflowExecution; /** * <p> * The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this ID can * be found in the history of the source workflow execution. This information can be useful for diagnosing problems * by tracing back the chain of events leading up to this event. This field is set only if the signal was initiated * by another workflow execution. * </p> */ private Long externalInitiatedEventId; /** * <p> * The name of the signal received. The decider can use the signal name and inputs to determine how to the process * the signal. * </p> * * @param signalName * The name of the signal received. The decider can use the signal name and inputs to determine how to the * process the signal. */ public void setSignalName(String signalName) { this.signalName = signalName; } /** * <p> * The name of the signal received. The decider can use the signal name and inputs to determine how to the process * the signal. * </p> * * @return The name of the signal received. The decider can use the signal name and inputs to determine how to the * process the signal. */ public String getSignalName() { return this.signalName; } /** * <p> * The name of the signal received. The decider can use the signal name and inputs to determine how to the process * the signal. * </p> * * @param signalName * The name of the signal received. The decider can use the signal name and inputs to determine how to the * process the signal. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkflowExecutionSignaledEventAttributes withSignalName(String signalName) { setSignalName(signalName); return this; } /** * <p> * The inputs provided with the signal. The decider can use the signal name and inputs to determine how to process * the signal. * </p> * * @param input * The inputs provided with the signal. The decider can use the signal name and inputs to determine how to * process the signal. */ public void setInput(String input) { this.input = input; } /** * <p> * The inputs provided with the signal. The decider can use the signal name and inputs to determine how to process * the signal. * </p> * * @return The inputs provided with the signal. The decider can use the signal name and inputs to determine how to * process the signal. */ public String getInput() { return this.input; } /** * <p> * The inputs provided with the signal. The decider can use the signal name and inputs to determine how to process * the signal. * </p> * * @param input * The inputs provided with the signal. The decider can use the signal name and inputs to determine how to * process the signal. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkflowExecutionSignaledEventAttributes withInput(String input) { setInput(input); return this; } /** * <p> * The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. * </p> * * @param externalWorkflowExecution * The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. */ public void setExternalWorkflowExecution(WorkflowExecution externalWorkflowExecution) { this.externalWorkflowExecution = externalWorkflowExecution; } /** * <p> * The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. * </p> * * @return The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. */ public WorkflowExecution getExternalWorkflowExecution() { return this.externalWorkflowExecution; } /** * <p> * The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. * </p> * * @param externalWorkflowExecution * The workflow execution that sent the signal. This is set only of the signal was sent by another workflow * execution. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkflowExecutionSignaledEventAttributes withExternalWorkflowExecution(WorkflowExecution externalWorkflowExecution) { setExternalWorkflowExecution(externalWorkflowExecution); return this; } /** * <p> * The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this ID can * be found in the history of the source workflow execution. This information can be useful for diagnosing problems * by tracing back the chain of events leading up to this event. This field is set only if the signal was initiated * by another workflow execution. * </p> * * @param externalInitiatedEventId * The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this * ID can be found in the history of the source workflow execution. This information can be useful for * diagnosing problems by tracing back the chain of events leading up to this event. This field is set only * if the signal was initiated by another workflow execution. */ public void setExternalInitiatedEventId(Long externalInitiatedEventId) { this.externalInitiatedEventId = externalInitiatedEventId; } /** * <p> * The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this ID can * be found in the history of the source workflow execution. This information can be useful for diagnosing problems * by tracing back the chain of events leading up to this event. This field is set only if the signal was initiated * by another workflow execution. * </p> * * @return The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this * ID can be found in the history of the source workflow execution. This information can be useful for * diagnosing problems by tracing back the chain of events leading up to this event. This field is set only * if the signal was initiated by another workflow execution. */ public Long getExternalInitiatedEventId() { return this.externalInitiatedEventId; } /** * <p> * The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this ID can * be found in the history of the source workflow execution. This information can be useful for diagnosing problems * by tracing back the chain of events leading up to this event. This field is set only if the signal was initiated * by another workflow execution. * </p> * * @param externalInitiatedEventId * The ID of the <code>SignalExternalWorkflowExecutionInitiated</code> event corresponding to the * <code>SignalExternalWorkflow</code> decision to signal this workflow execution.The source event with this * ID can be found in the history of the source workflow execution. This information can be useful for * diagnosing problems by tracing back the chain of events leading up to this event. This field is set only * if the signal was initiated by another workflow execution. * @return Returns a reference to this object so that method calls can be chained together. */ public WorkflowExecutionSignaledEventAttributes withExternalInitiatedEventId(Long externalInitiatedEventId) { setExternalInitiatedEventId(externalInitiatedEventId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSignalName() != null) sb.append("SignalName: ").append(getSignalName()).append(","); if (getInput() != null) sb.append("Input: ").append(getInput()).append(","); if (getExternalWorkflowExecution() != null) sb.append("ExternalWorkflowExecution: ").append(getExternalWorkflowExecution()).append(","); if (getExternalInitiatedEventId() != null) sb.append("ExternalInitiatedEventId: ").append(getExternalInitiatedEventId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof WorkflowExecutionSignaledEventAttributes == false) return false; WorkflowExecutionSignaledEventAttributes other = (WorkflowExecutionSignaledEventAttributes) obj; if (other.getSignalName() == null ^ this.getSignalName() == null) return false; if (other.getSignalName() != null && other.getSignalName().equals(this.getSignalName()) == false) return false; if (other.getInput() == null ^ this.getInput() == null) return false; if (other.getInput() != null && other.getInput().equals(this.getInput()) == false) return false; if (other.getExternalWorkflowExecution() == null ^ this.getExternalWorkflowExecution() == null) return false; if (other.getExternalWorkflowExecution() != null && other.getExternalWorkflowExecution().equals(this.getExternalWorkflowExecution()) == false) return false; if (other.getExternalInitiatedEventId() == null ^ this.getExternalInitiatedEventId() == null) return false; if (other.getExternalInitiatedEventId() != null && other.getExternalInitiatedEventId().equals(this.getExternalInitiatedEventId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSignalName() == null) ? 0 : getSignalName().hashCode()); hashCode = prime * hashCode + ((getInput() == null) ? 0 : getInput().hashCode()); hashCode = prime * hashCode + ((getExternalWorkflowExecution() == null) ? 0 : getExternalWorkflowExecution().hashCode()); hashCode = prime * hashCode + ((getExternalInitiatedEventId() == null) ? 0 : getExternalInitiatedEventId().hashCode()); return hashCode; } @Override public WorkflowExecutionSignaledEventAttributes clone() { try { return (WorkflowExecutionSignaledEventAttributes) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.simpleworkflow.model.transform.WorkflowExecutionSignaledEventAttributesMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.engine; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.index.translog.Translog; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.function.Function; import java.util.function.LongConsumer; /** * ShadowEngine is a specialized engine that only allows read-only operations * on the underlying Lucene index. An {@code IndexReader} is opened instead of * an {@code IndexWriter}. All methods that would usually perform write * operations are no-ops, this means: * * - No operations are written to or read from the translog * - Create, Index, and Delete do nothing * - Flush does not fsync any files, or make any on-disk changes * * In order for new segments to become visible, the ShadowEngine may perform * stage1 of the traditional recovery process (copying segment files) from a * regular primary (which uses {@link org.elasticsearch.index.engine.InternalEngine}) * * Notice that since this Engine does not deal with the translog, any * {@link #get(Get, Function, LongConsumer)} request goes directly to the searcher, * meaning it is non-realtime. */ public class ShadowEngine extends Engine { /** how long to wait for an index to exist */ public static final String NONEXISTENT_INDEX_RETRY_WAIT = "index.shadow.wait_for_initial_commit"; public static final TimeValue DEFAULT_NONEXISTENT_INDEX_RETRY_WAIT = TimeValue.timeValueSeconds(5); private volatile SearcherManager searcherManager; private volatile SegmentInfos lastCommittedSegmentInfos; public ShadowEngine(EngineConfig engineConfig) { super(engineConfig); if (engineConfig.getRefreshListeners() != null) { throw new IllegalArgumentException("ShadowEngine doesn't support RefreshListeners"); } SearcherFactory searcherFactory = new EngineSearcherFactory(engineConfig); final long nonexistentRetryTime = engineConfig.getIndexSettings().getSettings() .getAsTime(NONEXISTENT_INDEX_RETRY_WAIT, DEFAULT_NONEXISTENT_INDEX_RETRY_WAIT) .getMillis(); try { DirectoryReader reader = null; store.incRef(); boolean success = false; try { if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) { reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId); this.searcherManager = new SearcherManager(reader, searcherFactory); this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); success = true; } else { throw new IllegalStateException("failed to open a shadow engine after" + nonexistentRetryTime + "ms, " + "directory is not an index"); } } catch (Exception e) { logger.warn("failed to create new reader", e); throw e; } finally { if (success == false) { IOUtils.closeWhileHandlingException(reader); store.decRef(); } } } catch (IOException ex) { throw new EngineCreationFailureException(shardId, "failed to open index reader", ex); } logger.trace("created new ShadowEngine"); } @Override public IndexResult index(Index index) { throw new UnsupportedOperationException(shardId + " index operation not allowed on shadow engine"); } @Override public DeleteResult delete(Delete delete) { throw new UnsupportedOperationException(shardId + " delete operation not allowed on shadow engine"); } @Override public SyncedFlushResult syncFlush(String syncId, CommitId expectedCommitId) { throw new UnsupportedOperationException(shardId + " sync commit operation not allowed on shadow engine"); } @Override public CommitId flush() throws EngineException { return flush(false, false); } @Override public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineException { logger.trace("skipping FLUSH on shadow engine"); // reread the last committed segment infos refresh("flush"); /* * we have to inc-ref the store here since if the engine is closed by a tragic event * we don't acquire the write lock and wait until we have exclusive access. This might also * dec the store reference which can essentially close the store and unless we can inc the reference * we can't use it. */ store.incRef(); try (ReleasableLock lock = readLock.acquire()) { // reread the last committed segment infos lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager, store); } catch (Exception e) { if (isClosed.get() == false) { logger.warn("failed to read latest segment infos on flush", e); if (Lucene.isCorruptionException(e)) { throw new FlushFailedEngineException(shardId, e); } } } finally { store.decRef(); } return new CommitId(lastCommittedSegmentInfos.getId()); } @Override public void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, boolean upgrade, boolean upgradeOnlyAncientSegments) throws EngineException { // no-op logger.trace("skipping FORCE-MERGE on shadow engine"); } @Override public GetResult get(Get get, Function<String, Searcher> searcherFacotry, LongConsumer onRefresh) throws EngineException { // There is no translog, so we can get it directly from the searcher // Since we never refresh we just drop the onRefresh parameter on the floor return getFromSearcher(get, searcherFacotry); } @Override public Translog getTranslog() { throw new UnsupportedOperationException("shadow engines don't have translogs"); } @Override public List<Segment> segments(boolean verbose) { try (ReleasableLock lock = readLock.acquire()) { Segment[] segmentsArr = getSegmentInfo(lastCommittedSegmentInfos, verbose); for (int i = 0; i < segmentsArr.length; i++) { // hard code all segments as committed, because they are in // order for the shadow replica to see them segmentsArr[i].committed = true; } return Arrays.asList(segmentsArr); } } @Override public void refresh(String source) throws EngineException { // we obtain a read lock here, since we don't want a flush to happen while we are refreshing // since it flushes the index as well (though, in terms of concurrency, we are allowed to do it) try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); searcherManager.maybeRefreshBlocking(); } catch (AlreadyClosedException e) { throw e; } catch (Exception e) { try { failEngine("refresh failed", e); } catch (Exception inner) { e.addSuppressed(inner); } throw new RefreshFailedEngineException(shardId, e); } } @Override public IndexCommit acquireIndexCommit(boolean flushFirst) throws EngineException { throw new UnsupportedOperationException("Can not take snapshot from a shadow engine"); } @Override protected SearcherManager getSearcherManager() { return searcherManager; } @Override protected void closeNoLock(String reason) { if (isClosed.compareAndSet(false, true)) { try { logger.debug("shadow replica close searcher manager refCount: {}", store.refCount()); IOUtils.close(searcherManager); } catch (Exception e) { logger.warn("shadow replica failed to close searcher manager", e); } finally { store.decRef(); } } } @Override protected SegmentInfos getLastCommittedSegmentInfos() { return lastCommittedSegmentInfos; } @Override public long getIndexBufferRAMBytesUsed() { // No IndexWriter nor version map throw new UnsupportedOperationException("ShadowEngine has no IndexWriter"); } @Override public void writeIndexingBuffer() { // No indexing buffer throw new UnsupportedOperationException("ShadowEngine has no IndexWriter"); } @Override public void activateThrottling() { throw new UnsupportedOperationException("ShadowEngine has no IndexWriter"); } @Override public void deactivateThrottling() { throw new UnsupportedOperationException("ShadowEngine has no IndexWriter"); } @Override public boolean isThrottled() { return false; } @Override public long getIndexThrottleTimeInMillis() { return 0L; } @Override public Engine recoverFromTranslog() throws IOException { throw new UnsupportedOperationException("can't recover on a shadow engine"); } }
/******************************************************************************* * Copyright Duke Comprehensive Cancer Center and SemanticBits * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/c3pr/LICENSE.txt for details. ******************************************************************************/ package edu.duke.cabig.c3pr.domain.repository.impl; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import javax.persistence.Transient; import org.apache.log4j.Logger; import org.springframework.context.MessageSource; import org.springframework.transaction.annotation.Transactional; import edu.duke.cabig.c3pr.constants.APIName; import edu.duke.cabig.c3pr.constants.EpochType; import edu.duke.cabig.c3pr.constants.RandomizationType; import edu.duke.cabig.c3pr.constants.RegistrationDataEntryStatus; import edu.duke.cabig.c3pr.constants.RegistrationWorkFlowStatus; import edu.duke.cabig.c3pr.constants.ScheduledEpochDataEntryStatus; import edu.duke.cabig.c3pr.constants.ScheduledEpochWorkFlowStatus; import edu.duke.cabig.c3pr.constants.ServiceName; import edu.duke.cabig.c3pr.constants.WorkFlowStatusType; import edu.duke.cabig.c3pr.dao.EpochDao; import edu.duke.cabig.c3pr.dao.ParticipantDao; import edu.duke.cabig.c3pr.dao.ReasonDao; import edu.duke.cabig.c3pr.dao.PersonUserDao; import edu.duke.cabig.c3pr.dao.StratumGroupDao; import edu.duke.cabig.c3pr.dao.StudySubjectDao; import edu.duke.cabig.c3pr.domain.Arm; import edu.duke.cabig.c3pr.domain.BookRandomization; import edu.duke.cabig.c3pr.domain.BookRandomizationEntry; import edu.duke.cabig.c3pr.domain.EligibilityCriteria; import edu.duke.cabig.c3pr.domain.EndPoint; import edu.duke.cabig.c3pr.domain.Epoch; import edu.duke.cabig.c3pr.domain.Identifier; import edu.duke.cabig.c3pr.domain.OffEpochReason; import edu.duke.cabig.c3pr.domain.Participant; import edu.duke.cabig.c3pr.domain.PersonUser; import edu.duke.cabig.c3pr.domain.ScheduledEpoch; import edu.duke.cabig.c3pr.domain.Study; import edu.duke.cabig.c3pr.domain.StudySubject; import edu.duke.cabig.c3pr.domain.StudySubjectConsentVersion; import edu.duke.cabig.c3pr.domain.StudySubjectDemographics; import edu.duke.cabig.c3pr.domain.SubjectEligibilityAnswer; import edu.duke.cabig.c3pr.domain.SystemAssignedIdentifier; import edu.duke.cabig.c3pr.domain.factory.StudySubjectFactory; import edu.duke.cabig.c3pr.domain.repository.StudySubjectRepository; import edu.duke.cabig.c3pr.exception.C3PRBaseException; import edu.duke.cabig.c3pr.exception.C3PRBaseRuntimeException; import edu.duke.cabig.c3pr.exception.C3PRCodedException; import edu.duke.cabig.c3pr.exception.C3PRExceptionHelper; import edu.duke.cabig.c3pr.service.StudySubjectService; import edu.duke.cabig.c3pr.utils.IdentifierGenerator; import edu.duke.cabig.c3pr.utils.StudyTargetAccrualNotificationEmail; import gov.nih.nci.cabig.ctms.domain.AbstractMutableDomainObject; @Transactional public class StudySubjectRepositoryImpl implements StudySubjectRepository { private StudySubjectDao studySubjectDao; private ParticipantDao participantDao; private EpochDao epochDao; private StratumGroupDao stratumGroupDao; private C3PRExceptionHelper exceptionHelper; private MessageSource c3prErrorMessages; private StudySubjectFactory studySubjectFactory; private StudySubjectService studySubjectService; private StudyTargetAccrualNotificationEmail notificationEmailer; private IdentifierGenerator identifierGenerator ; private ReasonDao reasonDao; private PersonUserDao personUserDao; //private StudyService studyService; private Logger log = Logger.getLogger(StudySubjectRepositoryImpl.class.getName()); public void assignC3DIdentifier(StudySubject studySubject, String c3dIdentifierValue) { log.debug("loading study subject by grid id : "+studySubject.getGridId()); StudySubject loadedSubject = studySubjectDao.getByGridId(studySubject.getGridId()); log.debug("loaded study subject with database id : "+loadedSubject.getId()); log.debug("assigning c3d identifier value: "+c3dIdentifierValue); loadedSubject.setC3DIdentifier(c3dIdentifierValue); log.debug("assigned c3d identifier: "+loadedSubject.getC3DIdentifier()); studySubjectDao.save(loadedSubject); } public void assignMedidataIdentifier(StudySubject studySubject, String medidataIdentifierValue) { log.debug("loading study subject by grid id : "+studySubject.getGridId()); StudySubject loadedSubject = studySubjectDao.getByGridId(studySubject.getGridId()); log.debug("loaded study subject with database id : "+loadedSubject.getId()); log.debug("assigning medidata identifier value: "+medidataIdentifierValue); loadedSubject.setMedidataIdentifier(medidataIdentifierValue); log.debug("assigned medidata identifier: "+loadedSubject.getC3DIdentifier()); studySubjectDao.save(loadedSubject); } public void assignCoOrdinatingCenterIdentifier(StudySubject studySubject, String identifierValue) { StudySubject loadedSubject = studySubjectDao.getByGridId(studySubject.getGridId()); loadedSubject.setCoOrdinatingCenterIdentifier(identifierValue); studySubjectDao.save(loadedSubject); } public boolean isEpochAccrualCeilingReached(int epochId) { Epoch epoch = epochDao.getById(epochId); if (epoch.getType() == EpochType.RESERVING) { ScheduledEpoch scheduledEpoch = new ScheduledEpoch(true); scheduledEpoch.setEpoch(epoch); List<StudySubject> list = studySubjectDao.searchByScheduledEpoch(scheduledEpoch); Epoch nEpoch = epoch; if (nEpoch.getAccrualCeiling() != null && list.size() >= nEpoch.getAccrualCeiling().intValue()) { return true; } } return false; } // public StudySubject doLocalRegistration(StudySubject studySubject) throws C3PRCodedException { // continueEnrollment(studySubject); // return studySubject; // } /** * Saves the Imported StudySubject to the database. Moved it from the service as a part of the * refactoring effort. * * @param deserialedStudySubject * @return * @throws C3PRCodedException */ @Transactional(readOnly = false) public StudySubject importStudySubject(StudySubject deserialedStudySubject) throws C3PRCodedException { StudySubject studySubject = studySubjectFactory.buildStudySubject(deserialedStudySubject); if(studySubjectDao.getByIdentifiers(studySubject.getIdentifiers()).size()>0){ throw exceptionHelper.getException( getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } if (studySubject.getStudySubjectDemographics().getMasterSubject().getId() != null) { List<StudySubject> registrations = studySubjectDao.searchBySubjectAndStudyIdentifiers(studySubject. getStudySubjectDemographics().getMasterSubject().getPrimaryIdentifier(),studySubject.getStudySite().getStudy(). getCoordinatingCenterAssignedIdentifier()); if (registrations.size() > 0) { throw this.exceptionHelper .getException(getCode("C3PR.EXCEPTION.REGISTRATION.STUDYSUBJECTS_ALREADY_EXISTS.CODE")); } } else { if (studySubject.getStudySubjectDemographics().getMasterSubject().validateParticipant()) participantDao.save(studySubject.getStudySubjectDemographics().getMasterSubject()); else { throw this.exceptionHelper .getException(getCode("C3PR.EXCEPTION.REGISTRATION.SUBJECTS_INVALID_DETAILS.CODE")); } } if (studySubject.getScheduledEpoch().getEpoch().getRequiresArm()) { ScheduledEpoch scheduledTreatmentEpoch = studySubject .getScheduledEpoch(); if (scheduledTreatmentEpoch.getScheduledArm() == null || scheduledTreatmentEpoch.getScheduledArm().getArm() == null || scheduledTreatmentEpoch.getScheduledArm().getArm().getId() == null) throw this.exceptionHelper .getException(getCode("C3PR.EXCEPTION.REGISTRATION.IMPORT.REQUIRED.ARM.NOTFOUND.CODE")); } studySubject.setRegDataEntryStatus(studySubject.evaluateRegistrationDataEntryStatus()); studySubject.getScheduledEpoch().setScEpochDataEntryStatus(studySubject.evaluateScheduledEpochDataEntryStatus((List)new ArrayList<Error>())); if (studySubject.getRegDataEntryStatus() == RegistrationDataEntryStatus.INCOMPLETE) { throw this.exceptionHelper .getException(getCode("C3PR.EXCEPTION.REGISTRATION.DATA_ENTRY_INCOMPLETE.CODE")); } if (studySubject.getScheduledEpoch().getScEpochDataEntryStatus() == ScheduledEpochDataEntryStatus.INCOMPLETE) { throw this.exceptionHelper .getException(getCode("C3PR.EXCEPTION.REGISTRATION.SCHEDULEDEPOCH.DATA_ENTRY_INCOMPLETE.CODE")); } boolean hasC3PRAssignedIdentifier=false; for(SystemAssignedIdentifier systemAssignedIdentifier: studySubject.getSystemAssignedIdentifiers()){ if(systemAssignedIdentifier.getSystemName().equals("C3PR")){ hasC3PRAssignedIdentifier=true; break; } } if(!hasC3PRAssignedIdentifier){ studySubject.addIdentifier(identifierGenerator.generateSystemAssignedIdentifier(studySubject)); } studySubject.getScheduledEpoch().setScEpochWorkflowStatus(ScheduledEpochWorkFlowStatus.ON_EPOCH); if (studySubject.getScheduledEpoch().isReserving()) { studySubject.setRegWorkflowStatus(RegistrationWorkFlowStatus.RESERVED); } else if (studySubject.getScheduledEpoch().getEpoch().isEnrolling()) { studySubject.setRegWorkflowStatus(RegistrationWorkFlowStatus.ON_STUDY); } else { studySubject.setRegWorkflowStatus(RegistrationWorkFlowStatus.PENDING_ON_STUDY); } //make sure there is atleast one primaryIdentifier boolean hasPrimaryIdentifier = false; for(Identifier identifier : studySubject.getIdentifiers()){ if(identifier.getPrimaryIndicator()){ hasPrimaryIdentifier = true; break; } } if(!hasPrimaryIdentifier){ Identifier identifier = studySubject.getCoOrdinatingCenterIdentifier(); if(identifier != null){ identifier.setPrimaryIndicator(true); }else{ identifier = studySubject.getC3PRAssignedIdentifier(); if(identifier != null){ identifier.setPrimaryIndicator(true); } } } studySubjectDao.save(studySubject); log.debug("Registration saved with grid ID" + studySubject.getGridId()); return studySubject; } @Transient public Arm getNextArmForUnstratifiedStudy(StudySubject studySubject) throws C3PRBaseException { Arm arm = null; if ((studySubject.getScheduledEpoch()).getEpoch().hasBookRandomizationEntry()){ Iterator<BookRandomizationEntry> iter = ((BookRandomization)(studySubject.getScheduledEpoch()).getEpoch().getRandomization()).getBookRandomizationEntry().iterator(); BookRandomizationEntry breTemp; while (iter.hasNext()) { breTemp = iter.next(); if (breTemp.getPosition().equals((studySubject.getScheduledEpoch().getEpoch().getCurrentBookRandomizationEntryPosition()))) { synchronized (this) { (studySubject.getScheduledEpoch().getEpoch()).setCurrentBookRandomizationEntryPosition(breTemp.getPosition()+1); arm = breTemp.getArm(); break; } } } } if (arm == null) { throw this.exceptionHelper.getException( getCode("C3PR.EXCEPTION.REGISTRATION.NO.ARM.AVAILABLE.BOOK.EXHAUSTED.CODE")); } return arm; } public void setStudySubjectDao(StudySubjectDao studySubjectDao) { this.studySubjectDao = studySubjectDao; } public void setEpochDao(EpochDao epochDao) { this.epochDao = epochDao; } public void setStratumGroupDao(StratumGroupDao stratumGroupDao) { this.stratumGroupDao = stratumGroupDao; } public void setExceptionHelper(C3PRExceptionHelper exceptionHelper) { this.exceptionHelper = exceptionHelper; } public void setC3prErrorMessages(MessageSource errorMessages) { c3prErrorMessages = errorMessages; } private int getCode(String errortypeString) { return Integer.parseInt(this.c3prErrorMessages.getMessage(errortypeString, null, null)); } public StudySubject save(StudySubject studySubject) { return studySubjectDao.merge(studySubject); } public void setStudySubjectFactory(StudySubjectFactory studySubjectFactory) { this.studySubjectFactory = studySubjectFactory; } public void setParticipantDao(ParticipantDao participantDao) { this.participantDao = participantDao; } public List<StudySubject> findRegistrations(StudySubject exampleStudySubject) { return studySubjectDao.searchBySubjectAndStudyIdentifiers(exampleStudySubject.getStudySubjectDemographics().getMasterSubject(). getPrimaryIdentifier(), exampleStudySubject.getStudySite().getStudy().getCoordinatingCenterAssignedIdentifier()); } public StudySubject enroll(Identifier studySubjectIdentifier) throws C3PRCodedException { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); return enroll(studySubject); } //Send out the CCTS broadcast Message private void broadcastMessage(StudySubject studySubjectAfterSave) { try { studySubjectService.broadcastMessage(studySubjectAfterSave); } catch (C3PRCodedException e) { log.error(e.getMessage()); studySubjectAfterSave.setCctsWorkflowStatus(WorkFlowStatusType.MESSAGE_SEND_FAILED); } catch (Exception e) { // TODO throw a C3PRCodedUncheckedException log.error(e.getMessage()); throw new RuntimeException(e); } } //Send out the study accrual notification. private void sendStudyAccrualNotification(StudySubject studySubjectAfterSave) { try { this.notificationEmailer.sendEmail(studySubjectAfterSave); } catch (Exception e) { // TODO throw a C3PRCodedUncheckedException log.error(e.getMessage()); throw new RuntimeException(e); } } public StudySubject enroll(StudySubject studySubject) throws C3PRCodedException { List<StudySubject> studySubjects = new ArrayList<StudySubject>(); studySubjects=findRegistrations(studySubject); studySubjectDao.initialize(studySubject); if (studySubjects.size() > 1) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } this.continueEnrollment(studySubject); this.saveStratumGroup(studySubject); this.updateEpoch(studySubject); // check for the validity of the demographics snap shot and update participant if invalid if (!studySubject.getStudySubjectDemographics().getValid()){ takeSnapshotAndValidateStudySubjectDemographics(studySubject); } studySubject = studySubjectDao.merge(studySubject); sendStudyAccrualNotification(studySubject); broadcastMessage(studySubject); return studySubject; } public void continueEnrollment(StudySubject studySubject) throws C3PRCodedException { if (studySubject.getScheduledEpoch().getScEpochWorkflowStatus() != ScheduledEpochWorkFlowStatus.ON_EPOCH) { studySubject.prepareForEnrollment(); } if (!studySubject.getStudySite().getHostedMode() && !studySubject.getStudySite().getIsCoordinatingCenter() && !studySubject.getStudySite().getStudy().isCoOrdinatingCenter(studySubjectService.getLocalNCIInstituteCode())){ List<AbstractMutableDomainObject> domainObjects = new ArrayList<AbstractMutableDomainObject>(); domainObjects.add(studySubject); EndPoint endPoint=handleCoordinatingCenterBroadcast(studySubject.getStudySite().getStudy(), APIName.ENROLL_SUBJECT, domainObjects); if(endPoint.getStatus()!=WorkFlowStatusType.MESSAGE_SEND_CONFIRMED){ throw this.exceptionHelper.getMultisiteException(endPoint.getLastAttemptError()); } StudySubject multisiteReturnedStudySubject=(StudySubject)((List) endPoint.getReturnValue()).get(0); studySubjectDao.initialize(multisiteReturnedStudySubject); //StudySubject multisiteReturnedStudySubject = studySubjectServiceImpl.getArmAndCoordinatingAssignedIdentifier(studySubject); studySubject.doMutiSiteEnrollment(multisiteReturnedStudySubject.getScheduledEpoch(),multisiteReturnedStudySubject.getCoOrdinatingCenterIdentifier()); }else{ if (studySubject.getRegWorkflowStatus() != RegistrationWorkFlowStatus.ON_STUDY) { studySubject.addIdentifier(identifierGenerator.generateOrganizationAssignedIdentifier(studySubject)); } studySubject.doLocalEnrollment(); } for (StudySubject childStudySubject : studySubject.getChildStudySubjects()) { if (childStudySubject.getRegWorkflowStatus() == RegistrationWorkFlowStatus.PENDING_ON_STUDY && childStudySubject.getScheduledEpoch().getScEpochWorkflowStatus() != ScheduledEpochWorkFlowStatus.PENDING_ON_EPOCH) { continueEnrollment(childStudySubject); } } } public StudySubject register(StudySubject studySubject) { List<StudySubject> studySubjects = new ArrayList<StudySubject>(); studySubjects=findRegistrations(studySubject); if (studySubjects.size() > 1) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } // check for the validity of the demographics snap shot and update participant if invalid if (!studySubject.getStudySubjectDemographics().getValid()){ takeSnapshotAndValidateStudySubjectDemographics(studySubject); } studySubject.register(); return save(studySubject); } public StudySubject register(Identifier studySubjectIdentifier) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); return register(studySubject); } public StudySubject transferSubject(Identifier studySubjectIdentifier) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); if (studySubject.getScheduledEpoch().getScEpochWorkflowStatus() != ScheduledEpochWorkFlowStatus.ON_EPOCH) { studySubject.prepareForTransfer(); } if (!studySubject.getStudySite().getHostedMode() && !studySubject.getStudySite().getIsCoordinatingCenter() && !studySubject.getStudySite().getStudy().isCoOrdinatingCenter(studySubjectService.getLocalNCIInstituteCode())){ List<AbstractMutableDomainObject> domainObjects = new ArrayList<AbstractMutableDomainObject>(); domainObjects.add(studySubject); EndPoint endPoint=handleCoordinatingCenterBroadcast(studySubject.getStudySite().getStudy(), APIName.CHANGE_EPOCH, domainObjects); if(endPoint.getStatus()!=WorkFlowStatusType.MESSAGE_SEND_CONFIRMED){ throw this.exceptionHelper.getMultisiteException(endPoint.getLastAttemptError()); } StudySubject multisiteReturnedStudySubject=(StudySubject)((List) endPoint.getReturnValue()).get(0); studySubjectDao.initialize(multisiteReturnedStudySubject); //StudySubject multisiteReturnedStudySubject = studySubjectServiceImpl.getArmAndCoordinatingAssignedIdentifier(studySubject); studySubject.doMutiSiteTransfer(multisiteReturnedStudySubject.getScheduledEpoch()); }else{ studySubject.doLocalTransfer(); } this.saveStratumGroup(studySubject); this.updateEpoch(studySubject); return save(studySubject); } public StudySubject transferSubject(StudySubject studySubject) { List<StudySubject> studySubjects = new ArrayList<StudySubject>(); studySubjects=findRegistrations(studySubject); if (studySubjects.size() > 1) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } if (studySubject.getScheduledEpoch().getScEpochWorkflowStatus() != ScheduledEpochWorkFlowStatus.ON_EPOCH) { studySubject.prepareForTransfer(); } if (!studySubject.getStudySite().getHostedMode() && !studySubject.getStudySite().getIsCoordinatingCenter() && !studySubject.getStudySite().getStudy().isCoOrdinatingCenter(studySubjectService.getLocalNCIInstituteCode())){ List<AbstractMutableDomainObject> domainObjects = new ArrayList<AbstractMutableDomainObject>(); domainObjects.add(studySubject); EndPoint endPoint=handleCoordinatingCenterBroadcast(studySubject.getStudySite().getStudy(), APIName.CHANGE_EPOCH, domainObjects); if(endPoint.getStatus()!=WorkFlowStatusType.MESSAGE_SEND_CONFIRMED){ throw this.exceptionHelper.getMultisiteException(endPoint.getLastAttemptError()); } StudySubject multisiteReturnedStudySubject=(StudySubject)((List) endPoint.getReturnValue()).get(0); studySubjectDao.initialize(multisiteReturnedStudySubject); //StudySubject multisiteReturnedStudySubject = studySubjectServiceImpl.getArmAndCoordinatingAssignedIdentifier(studySubject); studySubject.doMutiSiteTransfer(multisiteReturnedStudySubject.getScheduledEpoch()); }else{ studySubject.doLocalTransfer(); } this.saveStratumGroup(studySubject); this.updateEpoch(studySubject); return save(studySubject); } public StudySubject create(StudySubject studySubject) { List<StudySubject> studySubjects = new ArrayList<StudySubject>(); studySubjects=findRegistrations(studySubject); if (studySubjects.size() > 0) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } if (!studySubject.hasC3PRSystemIdentifier()){ studySubject.addIdentifier(identifierGenerator.generateSystemAssignedIdentifier(studySubject)); } return save(studySubject); } public StudySubject reserve(StudySubject studySubject) { List<StudySubject> studySubjects = new ArrayList<StudySubject>(); studySubjects=findRegistrations(studySubject); if (studySubjects.size() > 1) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } studySubject.reserve(); // take a snapshot if not already taken (i.e not valid) and set the demographics record to valid if (!studySubject.getStudySubjectDemographics().getValid()){ takeSnapshotAndValidateStudySubjectDemographics(studySubject); } return studySubject; } public StudySubject reserve(Identifier studySubjectIdentifier) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); return reserve(studySubject); } public StudySubject getUniqueStudySubject(Identifier studySubjectIdentifier) { List<Identifier> studySubjectIdentifiers = new ArrayList<Identifier>(); studySubjectIdentifiers.add(studySubjectIdentifier); List<StudySubject> studySubjects = studySubjectDao.getByIdentifiers(studySubjectIdentifiers); if (studySubjects.size() == 0) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.NOT_FOUND_GIVEN_IDENTIFIERS.CODE")); } else if (studySubjects.size() > 1) { throw this.exceptionHelper.getRuntimeException(getCode("C3PR.EXCEPTION.REGISTRATION.MULTIPLE_STUDYSUBJECTS_FOUND.CODE")); } return studySubjects.get(0); } public EndPoint handleCoordinatingCenterBroadcast(Study study, APIName multisiteAPIName, List domainObjects) { for(EndPoint endPoint: study.getStudyCoordinatingCenters().get(0).getEndpoints()){ endPoint.getErrors().size(); studySubjectDao.evict(endPoint); } return studySubjectService.handleMultiSiteBroadcast(study.getStudyCoordinatingCenters() .get(0), ServiceName.REGISTRATION, multisiteAPIName, domainObjects); } public void setStudySubjectService(StudySubjectService studySubjectService) { this.studySubjectService = studySubjectService; } public StudyTargetAccrualNotificationEmail getNotificationEmailer() { return notificationEmailer; } public void setNotificationEmailer( StudyTargetAccrualNotificationEmail notificationEmailer) { this.notificationEmailer = notificationEmailer; } public void saveStratumGroup(StudySubject studySubject){ if(studySubject.getScheduledEpoch().getEpoch().getRandomizedIndicator() && studySubject.getScheduledEpoch().getEpoch().getStratificationIndicator() && studySubject.getStudySite().getStudy().getRandomizationType()==RandomizationType.BOOK){ try { stratumGroupDao.merge(studySubject.getScheduledEpoch().getStratumGroup()); } catch (C3PRBaseException e) { e.printStackTrace(); throw new C3PRBaseRuntimeException(e.getMessage()); } } for(StudySubject childStudySubject: studySubject.getChildStudySubjects()){ if(childStudySubject.getScheduledEpoch().getEpoch().getRandomizedIndicator() && childStudySubject.getScheduledEpoch().getEpoch().getStratificationIndicator() && childStudySubject.getStudySite().getStudy().getRandomizationType()==RandomizationType.BOOK){ try { stratumGroupDao.merge(childStudySubject.getScheduledEpoch().getStratumGroup()); } catch (C3PRBaseException e) { e.printStackTrace(); throw new C3PRBaseRuntimeException(e.getMessage()); } } } } public void updateEpoch(StudySubject studySubject){ if(studySubject.getScheduledEpoch().getEpoch().getRandomizedIndicator() && !studySubject.getScheduledEpoch().getEpoch().getStratificationIndicator() && studySubject.getStudySite().getStudy().getRandomizationType()==RandomizationType.BOOK){ this.epochDao.merge(studySubject.getScheduledEpoch().getEpoch()); } } public void setIdentifierGenerator(IdentifierGenerator identifierGenerator) { this.identifierGenerator = identifierGenerator; } public IdentifierGenerator getIdentifierGenerator() { return identifierGenerator; } public StudySubject invalidateRegistration(StudySubject studySubject) { studySubject.setRegWorkflowStatus(RegistrationWorkFlowStatus.INVALID); return studySubjectDao.merge(studySubject); } public void takeSnapshotAndValidateStudySubjectDemographics(StudySubject studySubject){ // set current studySubjectDemographics retiredIndicator to true; studySubject.getStudySubjectDemographics().setRetiredIndicatorAsTrue(); Participant masterSubject = studySubject.getStudySubjectDemographics().getMasterSubject(); StudySubjectDemographics snapShot = masterSubject.createStudySubjectDemographics(); snapShot.setValid(true); studySubject.setStudySubjectDemographics(snapShot); } public StudySubject allowEligibilityWaiver( Identifier studySubjectIdentifier, List<EligibilityCriteria> eligibilityCrieteria, String waivedByPersonnelAssignedIdentifier) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); PersonUser waivedBy = personUserDao.getByAssignedIdentifierFromLocal(waivedByPersonnelAssignedIdentifier); if(waivedBy == null){ throw new C3PRBaseRuntimeException("Cannot allow waiver. Null or unassociated study personnel."); } studySubject.allowEligibilityWaiver(eligibilityCrieteria, waivedBy); return studySubjectDao.merge(studySubject); } public StudySubject waiveEligibility( Identifier studySubjectIdentifier, List<SubjectEligibilityAnswer> subjectEligibilityAnswers) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); studySubject.waiveEligibility(subjectEligibilityAnswers); return studySubjectDao.merge(studySubject); } public StudySubject failScreening(Identifier studySubjectIdentifier, List<OffEpochReason> offScreeningReasons, Date failScreeningDate) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); for(OffEpochReason offEpochReason : offScreeningReasons){ if(offEpochReason.getReason().getId() != null){ offEpochReason.setReason(reasonDao.getById(offEpochReason.getReason().getId())); }else{ offEpochReason.setReason(reasonDao.getReasonByCode(offEpochReason.getReason().getCode())); } } studySubject.failScreening(offScreeningReasons, failScreeningDate); return studySubjectDao.merge(studySubject); } public StudySubject takeSubjectOffStudy(Identifier studySubjectIdentifier, List<OffEpochReason> offStudyReasons, Date offStudyDate) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); for(OffEpochReason offEpochReason : offStudyReasons){ if(offEpochReason.getReason().getId() != null){ offEpochReason.setReason(reasonDao.getById(offEpochReason.getReason().getId())); }else{ offEpochReason.setReason(reasonDao.getReasonByCode(offEpochReason.getReason().getCode())); } } studySubject.takeSubjectOffStudy(offStudyReasons,offStudyDate); return studySubjectDao.merge(studySubject); } public StudySubject takeSubjectOffCurrentEpoch(Identifier studySubjectIdentifier, List<OffEpochReason> offEpochReasons, Date offEpochDate) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); for(OffEpochReason offEpochReason : offEpochReasons){ if(offEpochReason.getReason().getId() != null){ offEpochReason.setReason(reasonDao.getById(offEpochReason.getReason().getId())); }else{ offEpochReason.setReason(reasonDao.getReasonByCode(offEpochReason.getReason().getCode())); } } studySubject.takeSubjectOffCurrentEpoch(offEpochReasons, offEpochDate); return studySubjectDao.merge(studySubject); } public void setReasonDao(ReasonDao reasonDao) { this.reasonDao = reasonDao; } public void setPersonUserDao(PersonUserDao personUserDao) { this.personUserDao = personUserDao; } public StudySubject reConsent(String studyVersionName, List<StudySubjectConsentVersion> studySubjectConsentVersionsHolder, Identifier studySubjectIdentifier) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); studySubject.reConsent(studyVersionName, studySubjectConsentVersionsHolder); return studySubjectDao.merge(studySubject); } public StudySubject discontinueEnrollment( Identifier studySubjectIdentifier, List<OffEpochReason> discontinueEpochReasons, Date discontinueEpochDate) { StudySubject studySubject = getUniqueStudySubject(studySubjectIdentifier); for(OffEpochReason offEpochReason : discontinueEpochReasons){ if(offEpochReason.getReason().getId() != null){ offEpochReason.setReason(reasonDao.getById(offEpochReason.getReason().getId())); }else{ offEpochReason.setReason(reasonDao.getReasonByCode(offEpochReason.getReason().getCode())); } } studySubject.discontinueEnrollment(discontinueEpochReasons, discontinueEpochDate); return studySubjectDao.merge(studySubject); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.stack; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.template.IndexTemplateConfig; import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; import org.elasticsearch.xpack.core.template.LifecyclePolicyConfig; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); // The stack template registry version. This number must be incremented when we make changes // to built-in templates. public static final int REGISTRY_VERSION = 1; public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; public static final Setting<Boolean> STACK_TEMPLATES_ENABLED = Setting.boolSetting( "stack.templates.enabled", true, Setting.Property.NodeScope, Setting.Property.Dynamic ); private final ClusterService clusterService; private volatile boolean stackTemplateEnabled; // General mappings conventions for any data that ends up in a data stream public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams-mappings"; public static final IndexTemplateConfig DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/data-streams-mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); ////////////////////////////////////////////////////////// // Logs components (for matching logs-*-* indices) ////////////////////////////////////////////////////////// public static final String LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "logs-mappings"; public static final String LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME = "logs-settings"; public static final String LOGS_ILM_POLICY_NAME = "logs"; public static final String LOGS_INDEX_TEMPLATE_NAME = "logs"; public static final IndexTemplateConfig LOGS_MAPPINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/logs-mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public static final IndexTemplateConfig LOGS_SETTINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/logs-settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public static final LifecyclePolicyConfig LOGS_ILM_POLICY = new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs-policy.json"); public static final IndexTemplateConfig LOGS_INDEX_TEMPLATE = new IndexTemplateConfig( LOGS_INDEX_TEMPLATE_NAME, "/logs-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); ////////////////////////////////////////////////////////// // Metrics components (for matching metric-*-* indices) ////////////////////////////////////////////////////////// public static final String METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "metrics-mappings"; public static final String METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics-settings"; public static final String METRICS_ILM_POLICY_NAME = "metrics"; public static final String METRICS_INDEX_TEMPLATE_NAME = "metrics"; public static final IndexTemplateConfig METRICS_MAPPINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/metrics-mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public static final IndexTemplateConfig METRICS_SETTINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/metrics-settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public static final LifecyclePolicyConfig METRICS_ILM_POLICY = new LifecyclePolicyConfig( METRICS_ILM_POLICY_NAME, "/metrics-policy.json" ); public static final IndexTemplateConfig METRICS_INDEX_TEMPLATE = new IndexTemplateConfig( METRICS_INDEX_TEMPLATE_NAME, "/metrics-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); ////////////////////////////////////////////////////////// // Synthetics components (for matching synthetics-*-* indices) ////////////////////////////////////////////////////////// public static final String SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "synthetics-mappings"; public static final String SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "synthetics-settings"; public static final String SYNTHETICS_ILM_POLICY_NAME = "synthetics"; public static final String SYNTHETICS_INDEX_TEMPLATE_NAME = "synthetics"; public static final IndexTemplateConfig SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, "/synthetics-mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public static final IndexTemplateConfig SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE = new IndexTemplateConfig( SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, "/synthetics-settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public static final LifecyclePolicyConfig SYNTHETICS_ILM_POLICY = new LifecyclePolicyConfig( SYNTHETICS_ILM_POLICY_NAME, "/synthetics-policy.json" ); public static final IndexTemplateConfig SYNTHETICS_INDEX_TEMPLATE = new IndexTemplateConfig( SYNTHETICS_INDEX_TEMPLATE_NAME, "/synthetics-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ); public StackTemplateRegistry( Settings nodeSettings, ClusterService clusterService, ThreadPool threadPool, Client client, NamedXContentRegistry xContentRegistry ) { super(nodeSettings, clusterService, threadPool, client, xContentRegistry); this.clusterService = clusterService; this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); } @Override public void initialize() { super.initialize(); clusterService.getClusterSettings().addSettingsUpdateConsumer(STACK_TEMPLATES_ENABLED, this::updateEnabledSetting); } private void updateEnabledSetting(boolean newValue) { if (newValue) { this.stackTemplateEnabled = true; } else { logger.info( "stack composable templates [{}] and component templates [{}] will not be installed or reinstalled", getComposableTemplateConfigs().stream().map(IndexTemplateConfig::getTemplateName).collect(Collectors.joining(",")), getComponentTemplateConfigs().stream().map(IndexTemplateConfig::getTemplateName).collect(Collectors.joining(",")) ); this.stackTemplateEnabled = false; } } @Override protected List<LifecyclePolicyConfig> getPolicyConfigs() { if (stackTemplateEnabled) { return Arrays.asList(LOGS_ILM_POLICY, METRICS_ILM_POLICY, SYNTHETICS_ILM_POLICY); } else { return Collections.emptyList(); } } @Override protected List<IndexTemplateConfig> getComponentTemplateConfigs() { if (stackTemplateEnabled) { return Arrays.asList( DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE, LOGS_MAPPINGS_COMPONENT_TEMPLATE, LOGS_SETTINGS_COMPONENT_TEMPLATE, METRICS_MAPPINGS_COMPONENT_TEMPLATE, METRICS_SETTINGS_COMPONENT_TEMPLATE, SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE, SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE ); } else { return Collections.emptyList(); } } @Override protected List<IndexTemplateConfig> getComposableTemplateConfigs() { if (stackTemplateEnabled) { return Arrays.asList(LOGS_INDEX_TEMPLATE, METRICS_INDEX_TEMPLATE, SYNTHETICS_INDEX_TEMPLATE); } else { return Collections.emptyList(); } } @Override protected String getOrigin() { return ClientHelper.STACK_ORIGIN; } @Override protected boolean requiresMasterNode() { // Stack templates use the composable index template and component APIs, // these APIs aren't supported in 7.7 and earlier and in mixed cluster // environments this can cause a lot of ActionNotFoundTransportException // errors in the logs during rolling upgrades. If these templates // are only installed via elected master node then the APIs are always // there and the ActionNotFoundTransportException errors are then prevented. return true; } }
package com.bs.interpreter; import java.util.ArrayList; import java.util.List; import com.bs.interpreter.stack.BsStack; import com.bs.interpreter.stack.Stack; import com.bs.lang.Bs; import com.bs.lang.BsConst; import com.bs.lang.BsObject; import com.bs.lang.builtin.BsBlock; import com.bs.lang.builtin.BsChar; import com.bs.lang.builtin.BsNumber; import com.bs.lang.builtin.BsString; import com.bs.lang.builtin.BsSymbol; import com.bs.lang.message.BsCodeData; import com.bs.parser.tree.ArgumentsNode; import com.bs.parser.tree.AssignNode; import com.bs.parser.tree.BlockNode; import com.bs.parser.tree.CallNode; import com.bs.parser.tree.CharacterNode; import com.bs.parser.tree.ExpressionNode; import com.bs.parser.tree.ExpressionsNode; import com.bs.parser.tree.IdentifierNode; import com.bs.parser.tree.IdentifierNode.State; import com.bs.parser.tree.ListNode; import com.bs.parser.tree.MessageNode; import com.bs.parser.tree.MessagesNode; import com.bs.parser.tree.Node; import com.bs.parser.tree.NumberNode; import com.bs.parser.tree.StatementsNode; import com.bs.parser.tree.StringNode; import com.bs.parser.tree.SymbolNode; public class BsInterpreter implements Interpreter { private Stack stack; private Node lastNode; public BsInterpreter(Stack stack) { this.stack = stack; } public BsInterpreter() { this(BsStack.getDefault()); } public BsInterpreter(BsInterpreter interpreter) { this(interpreter.stack); } @Override public Object interpretNumber(NumberNode numberNode) { return BsNumber.clone(numberNode.number()); } @Override public Object interpretVariable(IdentifierNode node) { if (node.state() == State.LOAD) { BsObject value = stack.lookup(node.variable()); return value; } return node.variable(); } @Override public Object interpretString(StringNode stringNode) { return BsString.clone(stringNode.string()); } @Override public Object interpretExpression(ExpressionNode node) { return interpret(node.left()); } @Override public Object interpretCall(CallNode node) { BsObject lhs = (BsObject) interpret(node.left()); /* * Null left hand side == call without a receiver */ if (lhs != null && lhs.isBreakingContext()) { Bs.updateError(lhs, node); return lhs; } /* * Thus, set lhs to the local object if lhs is null */ lhs = (BsObject) (lhs != null ? lhs : stack.local()); Interpreter interpreter = new BsCallInterpreter(this, lhs); return interpreter.interpret(node.messages()); } @Override public Object interpretExpressions(ExpressionsNode node) { List<BsObject> objects = new ArrayList<BsObject>(); for (Node expr : node.childrens()) { BsObject obj = (BsObject) interpret(expr); if (obj.isBreakingContext()) { return obj; } objects.add(obj); } return objects; } @Override public Object interpretMessage(MessageNode messageNode) { throw new UnsupportedOperationException("Handled in overriden method."); } @Override public Object interpretMessages(MessagesNode messagesNode) { throw new UnsupportedOperationException("Handled in overriden method."); } @Override public Object interpretStatements(StatementsNode node) { BsObject last = null; for (Node n : node.childrens()) { last = (BsObject) interpret(n); if (last.isError()) { // Bs.updateError(last, node); return last; } if (last.isReturning()) { return last; } } return last; } @Override public Object interpretAssign(AssignNode node) { String var = (String) interpret(node.identifier()); BsObject value = (BsObject) interpret(node.expression()); if (value.isError()) { Bs.updateError(value, node); return value; } // if (Character.isUpperCase(var.charAt(0))) { // stack.enterGlobal(var, value); // } else { stack.enter(var, value); // } return value; } @SuppressWarnings("unchecked") @Override public Object interpretBlock(BlockNode node) { List<String> args = (List<String>) interpret(node.arguments()); if (args == null) { args = new ArrayList<String>(); } BsCodeData data = new BsCodeData(args, node.statements()); data.stack = stack.clone(); if (node.isVariable()) { data.arity = -1; } BsObject block = BsBlock.create(data); return block; } @Override public Object interpretArguments(ArgumentsNode argumentsNode) { List<String> args = new ArrayList<String>(); for (Node n : argumentsNode.childrens()) { String arg = (String) interpret(n); args.add(arg); } return args; } @Override public Object interpret(Node node) { if (node != null) { lastNode = node; return node.visit(this); } return null; } @Override public Object interpretList(ListNode node) { Object objects = interpret(node.expressions()); if (objects instanceof BsObject && ((BsObject) objects).isError()) { Bs.updateError((BsObject) objects, node); return objects; } if (objects == null) { objects = new ArrayList<BsObject>(); } BsObject list = BsObject.value(BsConst.List, objects); return list; } @Override public Object interpretSymbol(SymbolNode node) { return BsSymbol.get(node.string()); } @Override public Object interpretCharacter(CharacterNode node) { return BsChar.clone((Character) node.value()); } public Node lastNode() { return lastNode; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iottwinmaker.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * An object that specifies how to interpolate data in a list. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iottwinmaker-2021-11-29/InterpolationParameters" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InterpolationParameters implements Serializable, Cloneable, StructuredPojo { /** * <p> * The interpolation type. * </p> */ private String interpolationType; /** * <p> * The interpolation time interval in seconds. * </p> */ private Long intervalInSeconds; /** * <p> * The interpolation type. * </p> * * @param interpolationType * The interpolation type. * @see InterpolationType */ public void setInterpolationType(String interpolationType) { this.interpolationType = interpolationType; } /** * <p> * The interpolation type. * </p> * * @return The interpolation type. * @see InterpolationType */ public String getInterpolationType() { return this.interpolationType; } /** * <p> * The interpolation type. * </p> * * @param interpolationType * The interpolation type. * @return Returns a reference to this object so that method calls can be chained together. * @see InterpolationType */ public InterpolationParameters withInterpolationType(String interpolationType) { setInterpolationType(interpolationType); return this; } /** * <p> * The interpolation type. * </p> * * @param interpolationType * The interpolation type. * @return Returns a reference to this object so that method calls can be chained together. * @see InterpolationType */ public InterpolationParameters withInterpolationType(InterpolationType interpolationType) { this.interpolationType = interpolationType.toString(); return this; } /** * <p> * The interpolation time interval in seconds. * </p> * * @param intervalInSeconds * The interpolation time interval in seconds. */ public void setIntervalInSeconds(Long intervalInSeconds) { this.intervalInSeconds = intervalInSeconds; } /** * <p> * The interpolation time interval in seconds. * </p> * * @return The interpolation time interval in seconds. */ public Long getIntervalInSeconds() { return this.intervalInSeconds; } /** * <p> * The interpolation time interval in seconds. * </p> * * @param intervalInSeconds * The interpolation time interval in seconds. * @return Returns a reference to this object so that method calls can be chained together. */ public InterpolationParameters withIntervalInSeconds(Long intervalInSeconds) { setIntervalInSeconds(intervalInSeconds); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInterpolationType() != null) sb.append("InterpolationType: ").append(getInterpolationType()).append(","); if (getIntervalInSeconds() != null) sb.append("IntervalInSeconds: ").append(getIntervalInSeconds()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof InterpolationParameters == false) return false; InterpolationParameters other = (InterpolationParameters) obj; if (other.getInterpolationType() == null ^ this.getInterpolationType() == null) return false; if (other.getInterpolationType() != null && other.getInterpolationType().equals(this.getInterpolationType()) == false) return false; if (other.getIntervalInSeconds() == null ^ this.getIntervalInSeconds() == null) return false; if (other.getIntervalInSeconds() != null && other.getIntervalInSeconds().equals(this.getIntervalInSeconds()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInterpolationType() == null) ? 0 : getInterpolationType().hashCode()); hashCode = prime * hashCode + ((getIntervalInSeconds() == null) ? 0 : getIntervalInSeconds().hashCode()); return hashCode; } @Override public InterpolationParameters clone() { try { return (InterpolationParameters) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.iottwinmaker.model.transform.InterpolationParametersMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.utils; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIManagerDatabaseException; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.APIManagerConfiguration; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; public final class APIMgtDBUtil { private static final Log log = LogFactory.getLog(APIMgtDBUtil.class); private static volatile DataSource dataSource = null; private static final String DB_CHECK_SQL = "SELECT * FROM AM_SUBSCRIBER"; private static final String DATA_SOURCE_NAME = "DataSourceName"; /** * Initializes the data source * * @throws APIManagementException if an error occurs while loading DB configuration */ public static void initialize() throws APIManagerDatabaseException { if (dataSource != null) { return; } synchronized (APIMgtDBUtil.class) { if (dataSource == null) { if (log.isDebugEnabled()) { log.debug("Initializing data source"); } APIManagerConfiguration config = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration(); String dataSourceName = config.getFirstProperty(DATA_SOURCE_NAME); if (dataSourceName != null) { try { Context ctx = new InitialContext(); dataSource = (DataSource) ctx.lookup(dataSourceName); } catch (NamingException e) { throw new APIManagerDatabaseException("Error while looking up the data " + "source: " + dataSourceName, e); } } else { log.error(DATA_SOURCE_NAME + " not defined in api-manager.xml."); } } } } /** * Utility method to get a new database connection * * @return Connection * @throws java.sql.SQLException if failed to get Connection */ public static Connection getConnection() throws SQLException { if (dataSource != null) { return dataSource.getConnection(); } throw new SQLException("Data source is not configured properly."); } /** * Utility method to close the connection streams. * @param preparedStatement PreparedStatement * @param connection Connection * @param resultSet ResultSet */ public static void closeAllConnections(PreparedStatement preparedStatement, Connection connection, ResultSet resultSet) { closeConnection(connection); closeResultSet(resultSet); closeStatement(preparedStatement); } /** * Close Connection * @param dbConnection Connection */ private static void closeConnection(Connection dbConnection) { if (dbConnection != null) { try { dbConnection.close(); } catch (SQLException e) { log.warn("Database error. Could not close database connection. Continuing with " + "others. - " + e.getMessage(), e); } } } /** * Close ResultSet * @param resultSet ResultSet */ private static void closeResultSet(ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (SQLException e) { log.warn("Database error. Could not close ResultSet - " + e.getMessage(), e); } } } /** * Close PreparedStatement * @param preparedStatement PreparedStatement */ public static void closeStatement(PreparedStatement preparedStatement) { if (preparedStatement != null) { try { preparedStatement.close(); } catch (SQLException e) { log.warn("Database error. Could not close PreparedStatement. Continuing with" + " others. - " + e.getMessage(), e); } } } /** * Function converts IS to String * Used for handling blobs * @param is - The Input Stream * @return - The inputStream as a String */ public static String getStringFromInputStream(InputStream is) { String str = null; try { str = IOUtils.toString(is, "UTF-8"); } catch (IOException e) { log.error("Error occurred while converting input stream to string.", e); } return str; } /** * Function converts IS to byte[] * Used for handling inputstreams * @param is - The Input Stream * @return - The inputStream as a byte array */ public static byte[] getBytesFromInputStream(InputStream is) { byte[] byteArray = null; try { byteArray = IOUtils.toByteArray(is); } catch (IOException e) { log.error("Error occurred while converting input stream to byte array.", e); } return byteArray; } /** * Set autocommit state of the connection * @param dbConnection Connection * @param autoCommit autoCommitState */ public static void setAutoCommit(Connection dbConnection, boolean autoCommit) { if (dbConnection != null) { try { dbConnection.setAutoCommit(autoCommit); } catch (SQLException e) { log.error("Could not set auto commit back to initial state", e); } } } /** * Handle connection rollback logic. Rethrow original exception so that it can be handled centrally. * @param connection Connection * @param error Error message to be logged * @param e Original SQLException * @throws SQLException */ public static void rollbackConnection(Connection connection, String error, SQLException e) throws SQLException { if (connection != null) { try { connection.rollback(); } catch (SQLException rollbackException) { // rollback failed log.error(error, rollbackException); } // Rethrow original exception so that it can be handled in the common catch clause of the calling method throw e; } } }
/* * * Copyright 2014 http://Bither.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * / */ package net.bither.viewsystem.froms; import net.bither.BitherSetting; import net.bither.bitherj.crypto.PasswordSeed; import net.bither.bitherj.crypto.SecureCharSequence; import net.bither.bitherj.delegate.IPasswordGetter; import net.bither.bitherj.delegate.IPasswordGetterDelegate; import net.bither.bitherj.utils.Utils; import net.bither.fonts.AwesomeIcon; import net.bither.languages.MessageKey; import net.bither.model.Check; import net.bither.preference.UserPreference; import net.bither.utils.CheckUtil; import net.bither.utils.LocaliserUtils; import net.bither.utils.PasswordStrengthUtil; import net.bither.viewsystem.TextBoxes; import net.bither.viewsystem.base.Labels; import net.bither.viewsystem.base.Panels; import net.bither.viewsystem.dialogs.DialogConfirmTask; import net.bither.viewsystem.dialogs.MessageDialog; import net.bither.viewsystem.listener.ICheckPasswordListener; import net.bither.viewsystem.listener.IDialogPasswordListener; import net.miginfocom.swing.MigLayout; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.util.ArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; public class PasswordPanel extends WizardPanel { private JLabel verificationStatusLabel; private JPasswordField newPassword; private JPasswordField repeatNewPassword; private JLabel labPassword; private JLabel labConfirmPassword; private PasswordSeed passwordSeed; private IDialogPasswordListener listener; private ICheckPasswordListener checkPasswordListener; private boolean passwordEntered = false; private boolean checkPre = true; private ExecutorService executor; private boolean etPasswordConfirmIsVisible = false; private JProgressBar pb; private JLabel labStrength; private JPanel progressPanel; private JPanel newPasswordPanel; public PasswordPanel(IDialogPasswordListener dialogPasswordListener) { super(MessageKey.SHOW_CHANGE_PASSWORD_WIZARD, AwesomeIcon.LOCK); updateTitle(LocaliserUtils.getString("import_private_key_qr_code_password")); this.listener = dialogPasswordListener; passwordSeed = getPasswordSeed(); progressPanel = getProgressPanel(); newPasswordPanel = getNewPasswordPanel(); setOkAction(new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { onOK(); } }); } private PasswordSeed getPasswordSeed() { return PasswordSeed.getPasswordSeed(); } @Override public void initialiseContent(JPanel panel) { panel.setLayout(new MigLayout( Panels.migXYLayout(), "[]", // Column constraints "[][]" // Row constraints )); panel.add(progressPanel, "align center,shrink,wrap"); panel.add(newPasswordPanel, "wrap"); if (PasswordSeed.hasPasswordSeed()) { labConfirmPassword.setVisible(false); repeatNewPassword.setVisible(false); } passwordCheck.setCheckListener(passwordCheckListener); newPassword.addKeyListener(passwordWatcher); repeatNewPassword.addKeyListener(passwordWatcher); configureCheckPre(); showCheckPre(); } @Override public void showPanel() { super.showPanel(); newPassword.requestFocus(); } private JPanel getProgressPanel() { JPanel pbPanel = Panels.newPanel(); pbPanel.setLayout(new MigLayout( Panels.migXYLayout(), "[]", // Column constraints "[][]" // Row constraints )); pb = new JProgressBar(); Painter p = new Painter() { @Override public void paint(Graphics2D g, Object object, int width, int height) { JProgressBar bar = (JProgressBar) object; g.setColor(bar.getForeground()); g.fillRect(0, 1, width - 2, height - 2); } }; // install custom painter on the bar UIDefaults properties = new UIDefaults(); properties.put("ProgressBar[Enabled].foregroundPainter", p); pb.setBorderPainted(false); pb.putClientProperty("Nimbus.Overrides", properties); pb.setStringPainted(false); pb.setMaximum(5); pb.setVisible(false); labStrength = Labels.newValueLabel(""); pbPanel.add(pb, "align center,shrink"); pbPanel.add(labStrength, "align center,shrink"); return pbPanel; } private JPanel getNewPasswordPanel() { JPanel panel = Panels.newPanel( new MigLayout( Panels.migXLayout(), // Layout "[][][][]", // Columns (require 4 columns for alignment with EnterPasswordView) "[][][]" // Rows )); newPassword = TextBoxes.newPassword(); newPassword.setName(MessageKey.ENTER_NEW_PASSWORD.getKey()); repeatNewPassword = TextBoxes.newPassword(); repeatNewPassword.setName(MessageKey.RETYPE_NEW_PASSWORD.getKey()); // Bind a document listener to allow instant update of UI to matched passwords newPassword.getDocument().addDocumentListener( new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { updateModel(); } @Override public void removeUpdate(DocumentEvent e) { updateModel(); } @Override public void changedUpdate(DocumentEvent e) { updateModel(); } private void updateModel() { if (!repeatNewPassword.isVisible()) { return; } if (!pb.isVisible()) { pb.setVisible(true); } SecureCharSequence secureCharSequence = new SecureCharSequence(newPassword.getPassword()); PasswordStrengthUtil.PasswordStrength strength = PasswordStrengthUtil.checkPassword (secureCharSequence); pb.setValue(strength.getValue() + 1); pb.setForeground(strength.getColor()); labStrength.setText(strength.getName()); secureCharSequence.wipe(); } } ); // Bind a document listener to allow instant update of UI to matched passwords repeatNewPassword.getDocument().addDocumentListener( new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { updateModel(); } @Override public void removeUpdate(DocumentEvent e) { updateModel(); } @Override public void changedUpdate(DocumentEvent e) { updateModel(); } private void updateModel() { } }); verificationStatusLabel = Labels.newVerificationStatus(".credentials", true); verificationStatusLabel.setVisible(false); labPassword = Labels.newEnterPassword(); panel.add(labPassword); panel.add(newPassword, " wrap"); labConfirmPassword = Labels.newRetypeNewPassword(); panel.add(labConfirmPassword); panel.add(repeatNewPassword, "wrap"); panel.add(verificationStatusLabel, "span 4,grow,push"); return panel; } private KeyListener passwordWatcher = new KeyListener() { private SecureCharSequence password; private SecureCharSequence passwordConfirm; @Override public void keyTyped(KeyEvent keyEvent) { } @Override public void keyPressed(KeyEvent keyEvent) { if (password != null) { password.wipe(); } if (passwordConfirm != null) { passwordConfirm.wipe(); } password = new SecureCharSequence(newPassword.getPassword()); passwordConfirm = new SecureCharSequence(repeatNewPassword.getPassword()); } @Override public void keyReleased(KeyEvent keyEvent) { SecureCharSequence p = new SecureCharSequence(newPassword.getPassword()); if (p.length() > 0) { if (!Utils.validPassword(p)) { newPassword.setText(password.toString()); } } p.wipe(); if (repeatNewPassword.isVisible()) { SecureCharSequence pc = new SecureCharSequence(repeatNewPassword.getPassword()); if (pc.length() > 0) { if (!Utils.validPassword(pc)) { repeatNewPassword.setText(passwordConfirm.toString()); } } pc.wipe(); } checkValid(); if (password != null) { password.wipe(); } if (passwordConfirm != null) { passwordConfirm.wipe(); } } }; private void onOK() { SecureCharSequence password = new SecureCharSequence(newPassword.getPassword()); SecureCharSequence passwordConfirm = new SecureCharSequence(repeatNewPassword.getPassword()); if (password == null || password.length() == 0) { return; } if (passwordSeed == null && !password.equals(passwordConfirm) && checkPre) { password.wipe(); passwordConfirm.wipe(); new MessageDialog((LocaliserUtils.getString ("add_address_generate_address_password_not_same"))).showMsg(); repeatNewPassword.requestFocus(); return; } PasswordStrengthUtil.PasswordStrength strength = PasswordStrengthUtil.checkPassword (password); password.wipe(); passwordConfirm.wipe(); if (UserPreference.getInstance().getCheckPasswordStrength() && repeatNewPassword.isVisible()) { if (strength == PasswordStrengthUtil.PasswordStrength.Weak) { String msg = Utils.format(LocaliserUtils.getString("password_strength_error"), strength.getName()); new MessageDialog(msg).showMsg(); return; } else if (strength == PasswordStrengthUtil.PasswordStrength.Normal) { String msg = Utils.format(LocaliserUtils.getString("password_strength_warning"), strength.getName()); DialogConfirmTask dialogConfirmTask = new DialogConfirmTask(msg, new Runnable() { @Override public void run() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { confirmPassword(); } }); } }); dialogConfirmTask.pack(); dialogConfirmTask.setVisible(true); return; } } confirmPassword(); } private void confirmPassword() { if ((passwordSeed != null && checkPre) || checkPasswordListener != null) { ArrayList<Check> checks = new ArrayList<Check>(); checks.add(passwordCheck); executor = CheckUtil.runChecks(checks, 1); } else { passwordEntered = true; closePanel(); } } private void configureCheckPre() { if (checkPre) { if (passwordSeed != null) { etPasswordConfirmIsVisible = false; repeatNewPassword.setVisible(false); labConfirmPassword.setVisible(false); } else { etPasswordConfirmIsVisible = true; repeatNewPassword.setVisible(true); labConfirmPassword.setVisible(true); } } else { etPasswordConfirmIsVisible = false; repeatNewPassword.setVisible(false); labConfirmPassword.setVisible(false); } } private void checkValid() { setOkEnabled(false); int passwordLength = newPassword.getPassword().length; if (passwordLength >= BitherSetting.PASSWORD_LENGTH_MIN && passwordLength <= BitherSetting.PASSWORD_LENGTH_MAX) { if (etPasswordConfirmIsVisible) { int passwordConfirmLength = repeatNewPassword.getPassword().length; if (passwordConfirmLength >= BitherSetting.PASSWORD_LENGTH_MIN && passwordConfirmLength <= BitherSetting.PASSWORD_LENGTH_MAX) { setOkEnabled(true); } else { setOkEnabled(false); } } else { setOkEnabled(true); } } } private void shake() { } public void setCheckPre(boolean check) { checkPre = check; configureCheckPre(); showCheckPre(); } public void setCheckPasswordListener(ICheckPasswordListener checkPasswordListener) { this.checkPasswordListener = checkPasswordListener; } public void showCheckPre() { if (checkPre) { if (etPasswordConfirmIsVisible) { updateTitle(LocaliserUtils.getString("add_address_generate_address_password_set_label")); } else { updateTitle(LocaliserUtils.getString("add_address_generate_address_password_label")); } } } private Check.CheckListener passwordCheckListener = new Check.CheckListener() { @Override public void onCheckBegin(Check check) { // pb.setVisible(true); } @Override public void onCheckEnd(Check check, boolean success) { if (executor != null) { executor.shutdown(); executor = null; } if (success) { passwordEntered = true; closePanel(); } else { newPassword.setText(""); checkValid(); new MessageDialog(LocaliserUtils.getString("password_wrong")).showMsg(); shake(); } } }; public void setTitle(String title) { updateTitle(title); } private Check passwordCheck = new Check("", new Check.ICheckAction() { @Override public boolean check() { SecureCharSequence password = new SecureCharSequence(newPassword.getPassword()); if (checkPasswordListener != null) { boolean result = checkPasswordListener.checkPassword(password); password.wipe(); return result; } else if (passwordSeed != null) { boolean result = passwordSeed.checkPassword(password); password.wipe(); return result; } else { return true; } } }); @Override public void closePanel() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { PasswordPanel.super.closePanel(); if (listener != null) { if (passwordEntered) { listener.onPasswordEntered(new SecureCharSequence(newPassword.getPassword())); newPassword.setText(""); repeatNewPassword.setText(""); } else { listener.onPasswordEntered(null); } } } }); } public static final class PasswordGetter implements IDialogPasswordListener, IPasswordGetter { private ReentrantLock getPasswordLock = new ReentrantLock(); private Condition withPasswordCondition = getPasswordLock.newCondition(); private SecureCharSequence password; private IPasswordGetterDelegate delegate; public PasswordGetter() { this(null); } public PasswordGetter(IPasswordGetterDelegate delegate) { this.delegate = delegate; } public void setPassword(SecureCharSequence password) { this.password = password; } public boolean hasPassword() { return password != null; } public SecureCharSequence getPassword() { if (password == null) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (delegate != null) { delegate.beforePasswordDialogShow(); } PasswordPanel d = new PasswordPanel(PasswordGetter.this); d.showPanel(); } }); try { getPasswordLock.lockInterruptibly(); withPasswordCondition.await(); } catch (InterruptedException e) { e.printStackTrace(); } finally { getPasswordLock.unlock(); } } return password; } @Override public void onPasswordEntered(SecureCharSequence password) { setPassword(password); try { getPasswordLock.lock(); withPasswordCondition.signal(); } finally { getPasswordLock.unlock(); } if (delegate != null && password != null) { delegate.afterPasswordDialogDismiss(); } } public void wipe() { if (password != null) { password.wipe(); password = null; } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.mapreduce.examples; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeMap; import org.apache.commons.cli.ParseException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.tez.client.TezClientUtils; import org.apache.tez.client.TezClient; import org.apache.tez.common.TezUtils; import org.apache.tez.common.security.DAGAccessControls; import org.apache.tez.dag.api.DAG; import org.apache.tez.dag.api.DataSourceDescriptor; import org.apache.tez.dag.api.Edge; import org.apache.tez.dag.api.PreWarmVertex; import org.apache.tez.dag.api.ProcessorDescriptor; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.Vertex; import org.apache.tez.dag.api.client.DAGClient; import org.apache.tez.dag.api.client.DAGStatus; import org.apache.tez.dag.api.client.StatusGetOpts; import org.apache.tez.mapreduce.examples.helpers.SplitsInClientOptionParser; import org.apache.tez.mapreduce.hadoop.MRHelpers; import org.apache.tez.mapreduce.hadoop.MRInputHelpers; import org.apache.tez.mapreduce.hadoop.MRJobConfig; import org.apache.tez.mapreduce.input.MRInputLegacy; import org.apache.tez.mapreduce.output.MROutputLegacy; import org.apache.tez.mapreduce.processor.map.MapProcessor; import org.apache.tez.mapreduce.processor.reduce.ReduceProcessor; import org.apache.tez.runtime.library.api.TezRuntimeConfiguration; import org.apache.tez.runtime.library.conf.OrderedPartitionedKVEdgeConfig; import org.apache.tez.runtime.library.partitioner.HashPartitioner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Maps; /** * An MRR job built on top of word count to return words sorted by * their frequency of occurrence. * * Use -DUSE_TEZ_SESSION=true to run jobs in a session mode. * If multiple input/outputs are provided, this job will process each pair * as a separate DAG in a sequential manner. * Use -DINTER_JOB_SLEEP_INTERVAL=<N> where N is the sleep interval in seconds * between the sequential DAGs. */ public class TestOrderedWordCount extends Configured implements Tool { private static Logger LOG = LoggerFactory.getLogger(TestOrderedWordCount.class); private static final String DAG_VIEW_ACLS = "tez.testorderedwordcount.view-acls"; private static final String DAG_MODIFY_ACLS = "tez.testorderedwordcount.modify-acls"; public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable>{ private final static IntWritable one = new IntWritable(1); private Text word = new Text(); public void map(Object key, Text value, Context context ) throws IOException, InterruptedException { StringTokenizer itr = new StringTokenizer(value.toString()); while (itr.hasMoreTokens()) { word.set(itr.nextToken()); context.write(word, one); } } } public static class IntSumReducer extends Reducer<Text,IntWritable,IntWritable, Text> { private IntWritable result = new IntWritable(); public void reduce(Text key, Iterable<IntWritable> values, Context context ) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(result, key); } } /** * Shuffle ensures ordering based on count of employees per department * hence the final reducer is a no-op and just emits the department name * with the employee count per department. */ public static class MyOrderByNoOpReducer extends Reducer<IntWritable, Text, Text, IntWritable> { public void reduce(IntWritable key, Iterable<Text> values, Context context ) throws IOException, InterruptedException { for (Text word : values) { context.write(word, key); } } } private Credentials credentials = new Credentials(); @VisibleForTesting public DAG createDAG(FileSystem fs, Configuration conf, Map<String, LocalResource> commonLocalResources, Path stagingDir, int dagIndex, String inputPath, String outputPath, boolean generateSplitsInClient, boolean useMRSettings, int intermediateNumReduceTasks) throws Exception { Configuration mapStageConf = new JobConf(conf); mapStageConf.set(MRJobConfig.MAP_CLASS_ATTR, TokenizerMapper.class.getName()); MRHelpers.translateMRConfToTez(mapStageConf, !useMRSettings); Configuration iReduceStageConf = new JobConf(conf); // TODO replace with auto-reduce parallelism iReduceStageConf.setInt(MRJobConfig.NUM_REDUCES, 2); iReduceStageConf.set(MRJobConfig.REDUCE_CLASS_ATTR, IntSumReducer.class.getName()); iReduceStageConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS, Text.class.getName()); iReduceStageConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS, IntWritable.class.getName()); iReduceStageConf.setBoolean("mapred.mapper.new-api", true); MRHelpers.translateMRConfToTez(iReduceStageConf, !useMRSettings); Configuration finalReduceConf = new JobConf(conf); finalReduceConf.setInt(MRJobConfig.NUM_REDUCES, 1); finalReduceConf.set(MRJobConfig.REDUCE_CLASS_ATTR, MyOrderByNoOpReducer.class.getName()); finalReduceConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS, IntWritable.class.getName()); finalReduceConf.set(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS, Text.class.getName()); MRHelpers.translateMRConfToTez(finalReduceConf, !useMRSettings); MRHelpers.configureMRApiUsage(mapStageConf); MRHelpers.configureMRApiUsage(iReduceStageConf); MRHelpers.configureMRApiUsage(finalReduceConf); List<Vertex> vertices = new ArrayList<Vertex>(); String mapStageHistoryText = TezUtils.convertToHistoryText("Initial Tokenizer Vertex", mapStageConf); DataSourceDescriptor dsd; if (generateSplitsInClient) { mapStageConf.set(MRJobConfig.INPUT_FORMAT_CLASS_ATTR, TextInputFormat.class.getName()); mapStageConf.set(FileInputFormat.INPUT_DIR, inputPath); mapStageConf.setBoolean("mapred.mapper.new-api", true); dsd = MRInputHelpers.configureMRInputWithLegacySplitGeneration(mapStageConf, stagingDir, true); } else { dsd = MRInputLegacy.createConfigBuilder(mapStageConf, TextInputFormat.class, inputPath).build(); } dsd.getInputDescriptor().setHistoryText(TezUtils.convertToHistoryText( "HDFS Input " + inputPath, mapStageConf)); Map<String, String> mapEnv = Maps.newHashMap(); MRHelpers.updateEnvBasedOnMRTaskEnv(mapStageConf, mapEnv, true); Map<String, String> reduceEnv = Maps.newHashMap(); MRHelpers.updateEnvBasedOnMRTaskEnv(mapStageConf, reduceEnv, false); Vertex mapVertex; ProcessorDescriptor mapProcessorDescriptor = ProcessorDescriptor.create(MapProcessor.class.getName()) .setUserPayload( TezUtils.createUserPayloadFromConf(mapStageConf)) .setHistoryText(mapStageHistoryText); if (!useMRSettings) { mapVertex = Vertex.create("initialmap", mapProcessorDescriptor); } else { mapVertex = Vertex.create("initialmap", mapProcessorDescriptor, -1, MRHelpers.getResourceForMRMapper(mapStageConf)); mapVertex.setTaskLaunchCmdOpts(MRHelpers.getJavaOptsForMRMapper(mapStageConf)); mapVertex.setTaskEnvironment(mapEnv); } mapVertex.addTaskLocalFiles(commonLocalResources) .addDataSource("MRInput", dsd); vertices.add(mapVertex); String iReduceStageHistoryText = TezUtils.convertToHistoryText("Intermediate Summation Vertex", iReduceStageConf); ProcessorDescriptor iReduceProcessorDescriptor = ProcessorDescriptor.create( ReduceProcessor.class.getName()) .setUserPayload(TezUtils.createUserPayloadFromConf(iReduceStageConf)) .setHistoryText(iReduceStageHistoryText); Vertex intermediateVertex; if (!useMRSettings) { intermediateVertex = Vertex.create("intermediate_reducer", iReduceProcessorDescriptor, intermediateNumReduceTasks); } else { intermediateVertex = Vertex.create("intermediate_reducer", iReduceProcessorDescriptor, intermediateNumReduceTasks, MRHelpers.getResourceForMRReducer(iReduceStageConf)); intermediateVertex.setTaskLaunchCmdOpts(MRHelpers.getJavaOptsForMRReducer(iReduceStageConf)); intermediateVertex.setTaskEnvironment(reduceEnv); } intermediateVertex.addTaskLocalFiles(commonLocalResources); vertices.add(intermediateVertex); String finalReduceStageHistoryText = TezUtils.convertToHistoryText("Final Sorter Vertex", finalReduceConf); UserPayload finalReducePayload = TezUtils.createUserPayloadFromConf(finalReduceConf); Vertex finalReduceVertex; ProcessorDescriptor finalReduceProcessorDescriptor = ProcessorDescriptor.create( ReduceProcessor.class.getName()) .setUserPayload(finalReducePayload) .setHistoryText(finalReduceStageHistoryText); if (!useMRSettings) { finalReduceVertex = Vertex.create("finalreduce", finalReduceProcessorDescriptor, 1); } else { finalReduceVertex = Vertex.create("finalreduce", finalReduceProcessorDescriptor, 1, MRHelpers.getResourceForMRReducer(finalReduceConf)); finalReduceVertex.setTaskLaunchCmdOpts(MRHelpers.getJavaOptsForMRReducer(finalReduceConf)); finalReduceVertex.setTaskEnvironment(reduceEnv); } finalReduceVertex.addTaskLocalFiles(commonLocalResources); finalReduceVertex.addDataSink("MROutput", MROutputLegacy.createConfigBuilder(finalReduceConf, TextOutputFormat.class, outputPath) .build()); finalReduceVertex.getDataSinks().get(0).getOutputDescriptor().setHistoryText( TezUtils.convertToHistoryText("HDFS Output " + outputPath, finalReduceConf)); vertices.add(finalReduceVertex); DAG dag = DAG.create("OrderedWordCount" + dagIndex); dag.setDAGInfo("{ \"context\": \"Tez\", \"description\": \"TestOrderedWordCount Job\" }"); for (int i = 0; i < vertices.size(); ++i) { dag.addVertex(vertices.get(i)); } OrderedPartitionedKVEdgeConfig edgeConf1 = OrderedPartitionedKVEdgeConfig .newBuilder(Text.class.getName(), IntWritable.class.getName(), HashPartitioner.class.getName()).setFromConfiguration(iReduceStageConf) .configureInput().useLegacyInput().done().build(); dag.addEdge( Edge.create(dag.getVertex("initialmap"), dag.getVertex("intermediate_reducer"), edgeConf1.createDefaultEdgeProperty())); OrderedPartitionedKVEdgeConfig edgeConf2 = OrderedPartitionedKVEdgeConfig .newBuilder(IntWritable.class.getName(), Text.class.getName(), HashPartitioner.class.getName()).setFromConfiguration(finalReduceConf) .configureInput().useLegacyInput().done().build(); dag.addEdge( Edge.create(dag.getVertex("intermediate_reducer"), dag.getVertex("finalreduce"), edgeConf2.createDefaultEdgeProperty())); updateDAGACls(conf, dag, dagIndex); return dag; } private void updateDAGACls(Configuration conf, DAG dag, int dagIndex) { LOG.info("Checking DAG specific ACLS"); DAGAccessControls accessControls = null; String suffix = "." + dagIndex; if (conf.get(DAG_VIEW_ACLS + suffix) != null || conf.get(DAG_MODIFY_ACLS + suffix) != null) { accessControls = new DAGAccessControls( conf.get(DAG_VIEW_ACLS + suffix), conf.get(DAG_MODIFY_ACLS + suffix)); } else if (conf.get(DAG_VIEW_ACLS) != null || conf.get(DAG_MODIFY_ACLS) != null) { accessControls = new DAGAccessControls( conf.get(DAG_VIEW_ACLS), conf.get(DAG_MODIFY_ACLS)); } if (accessControls != null) { LOG.info("Setting DAG specific ACLS"); dag.setAccessControls(accessControls); } } private static void printUsage() { String options = " [-generateSplitsInClient true/<false>]"; System.err.println("Usage: testorderedwordcount <in> <out>" + options); System.err.println("Usage (In Session Mode):" + " testorderedwordcount <in1> <out1> ... <inN> <outN>" + options); ToolRunner.printGenericCommandUsage(System.err); } @Override public int run(String[] args) throws Exception { Configuration conf = getConf(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); boolean generateSplitsInClient; SplitsInClientOptionParser splitCmdLineParser = new SplitsInClientOptionParser(); try { generateSplitsInClient = splitCmdLineParser.parse(otherArgs, false); otherArgs = splitCmdLineParser.getRemainingArgs(); } catch (ParseException e1) { System.err.println("Invalid options"); printUsage(); return 2; } boolean useTezSession = conf.getBoolean("USE_TEZ_SESSION", true); long interJobSleepTimeout = conf.getInt("INTER_JOB_SLEEP_INTERVAL", 0) * 1000; boolean retainStagingDir = conf.getBoolean("RETAIN_STAGING_DIR", false); boolean useMRSettings = conf.getBoolean("USE_MR_CONFIGS", true); // TODO needs to use auto reduce parallelism int intermediateNumReduceTasks = conf.getInt("IREDUCE_NUM_TASKS", 2); if (((otherArgs.length%2) != 0) || (!useTezSession && otherArgs.length != 2)) { printUsage(); return 2; } List<String> inputPaths = new ArrayList<String>(); List<String> outputPaths = new ArrayList<String>(); for (int i = 0; i < otherArgs.length; i+=2) { inputPaths.add(otherArgs[i]); outputPaths.add(otherArgs[i+1]); } UserGroupInformation.setConfiguration(conf); TezConfiguration tezConf = new TezConfiguration(conf); TestOrderedWordCount instance = new TestOrderedWordCount(); FileSystem fs = FileSystem.get(conf); String stagingDirStr = conf.get(TezConfiguration.TEZ_AM_STAGING_DIR, TezConfiguration.TEZ_AM_STAGING_DIR_DEFAULT) + Path.SEPARATOR + Long.toString(System.currentTimeMillis()); Path stagingDir = new Path(stagingDirStr); FileSystem pathFs = stagingDir.getFileSystem(tezConf); pathFs.mkdirs(new Path(stagingDirStr)); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDirStr); stagingDir = pathFs.makeQualified(new Path(stagingDirStr)); TokenCache.obtainTokensForNamenodes(instance.credentials, new Path[] {stagingDir}, conf); TezClientUtils.ensureStagingDirExists(tezConf, stagingDir); // No need to add jar containing this class as assumed to be part of // the tez jars. // TEZ-674 Obtain tokens based on the Input / Output paths. For now assuming staging dir // is the same filesystem as the one used for Input/Output. if (useTezSession) { LOG.info("Creating Tez Session"); tezConf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, true); } else { tezConf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, false); } TezClient tezSession = TezClient.create("OrderedWordCountSession", tezConf, null, instance.credentials); tezSession.start(); DAGStatus dagStatus = null; DAGClient dagClient = null; String[] vNames = { "initialmap", "intermediate_reducer", "finalreduce" }; Set<StatusGetOpts> statusGetOpts = EnumSet.of(StatusGetOpts.GET_COUNTERS); try { for (int dagIndex = 1; dagIndex <= inputPaths.size(); ++dagIndex) { if (dagIndex != 1 && interJobSleepTimeout > 0) { try { LOG.info("Sleeping between jobs, sleepInterval=" + (interJobSleepTimeout/1000)); Thread.sleep(interJobSleepTimeout); } catch (InterruptedException e) { LOG.info("Main thread interrupted. Breaking out of job loop"); break; } } String inputPath = inputPaths.get(dagIndex-1); String outputPath = outputPaths.get(dagIndex-1); if (fs.exists(new Path(outputPath))) { throw new FileAlreadyExistsException("Output directory " + outputPath + " already exists"); } LOG.info("Running OrderedWordCount DAG" + ", dagIndex=" + dagIndex + ", inputPath=" + inputPath + ", outputPath=" + outputPath); Map<String, LocalResource> localResources = new TreeMap<String, LocalResource>(); DAG dag = instance.createDAG(fs, tezConf, localResources, stagingDir, dagIndex, inputPath, outputPath, generateSplitsInClient, useMRSettings, intermediateNumReduceTasks); boolean doPreWarm = dagIndex == 1 && useTezSession && conf.getBoolean("PRE_WARM_SESSION", true); int preWarmNumContainers = 0; if (doPreWarm) { preWarmNumContainers = conf.getInt("PRE_WARM_NUM_CONTAINERS", 0); if (preWarmNumContainers <= 0) { doPreWarm = false; } } if (doPreWarm) { LOG.info("Pre-warming Session"); PreWarmVertex preWarmVertex = PreWarmVertex.create("PreWarm", preWarmNumContainers, dag .getVertex("initialmap").getTaskResource()); preWarmVertex.addTaskLocalFiles(dag.getVertex("initialmap").getTaskLocalFiles()); preWarmVertex.setTaskEnvironment(dag.getVertex("initialmap").getTaskEnvironment()); preWarmVertex.setTaskLaunchCmdOpts(dag.getVertex("initialmap").getTaskLaunchCmdOpts()); tezSession.preWarm(preWarmVertex); } if (useTezSession) { LOG.info("Waiting for TezSession to get into ready state"); waitForTezSessionReady(tezSession); LOG.info("Submitting DAG to Tez Session, dagIndex=" + dagIndex); dagClient = tezSession.submitDAG(dag); LOG.info("Submitted DAG to Tez Session, dagIndex=" + dagIndex); } else { LOG.info("Submitting DAG as a new Tez Application"); dagClient = tezSession.submitDAG(dag); } while (true) { dagStatus = dagClient.getDAGStatus(statusGetOpts); if (dagStatus.getState() == DAGStatus.State.RUNNING || dagStatus.getState() == DAGStatus.State.SUCCEEDED || dagStatus.getState() == DAGStatus.State.FAILED || dagStatus.getState() == DAGStatus.State.KILLED || dagStatus.getState() == DAGStatus.State.ERROR) { break; } try { Thread.sleep(500); } catch (InterruptedException e) { // continue; } } while (dagStatus.getState() != DAGStatus.State.SUCCEEDED && dagStatus.getState() != DAGStatus.State.FAILED && dagStatus.getState() != DAGStatus.State.KILLED && dagStatus.getState() != DAGStatus.State.ERROR) { if (dagStatus.getState() == DAGStatus.State.RUNNING) { ExampleDriver.printDAGStatus(dagClient, vNames); } try { try { Thread.sleep(1000); } catch (InterruptedException e) { // continue; } dagStatus = dagClient.getDAGStatus(statusGetOpts); } catch (TezException e) { LOG.error("Failed to get application progress. Exiting"); return -1; } } ExampleDriver.printDAGStatus(dagClient, vNames, true, true); LOG.info("DAG " + dagIndex + " completed. " + "FinalState=" + dagStatus.getState()); if (dagStatus.getState() != DAGStatus.State.SUCCEEDED) { LOG.info("DAG " + dagIndex + " diagnostics: " + dagStatus.getDiagnostics()); } } } catch (Exception e) { LOG.error("Error occurred when submitting/running DAGs", e); throw e; } finally { if (!retainStagingDir) { pathFs.delete(stagingDir, true); } LOG.info("Shutting down session"); tezSession.stop(); } if (!useTezSession) { ExampleDriver.printDAGStatus(dagClient, vNames); LOG.info("Application completed. " + "FinalState=" + dagStatus.getState()); } return dagStatus.getState() == DAGStatus.State.SUCCEEDED ? 0 : 1; } private static void waitForTezSessionReady(TezClient tezSession) throws IOException, TezException, InterruptedException { tezSession.waitTillReady(); } public static void main(String[] args) throws Exception { int res = ToolRunner.run(new TezConfiguration(), new TestOrderedWordCount(), args); System.exit(res); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.gateway.local; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentInfos; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.gateway.IndexShardGateway; import org.elasticsearch.index.gateway.IndexShardGatewayRecoveryException; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.service.IndexShard; import org.elasticsearch.index.shard.service.InternalIndexShard; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogStreams; import org.elasticsearch.index.translog.fs.FsTranslog; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Arrays; import java.util.concurrent.ScheduledFuture; /** * */ public class LocalIndexShardGateway extends AbstractIndexShardComponent implements IndexShardGateway { private final ThreadPool threadPool; private final InternalIndexShard indexShard; private final RecoveryState recoveryState = new RecoveryState(); private volatile ScheduledFuture flushScheduler; private final TimeValue syncInterval; @Inject public LocalIndexShardGateway(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool, IndexShard indexShard) { super(shardId, indexSettings); this.threadPool = threadPool; this.indexShard = (InternalIndexShard) indexShard; syncInterval = componentSettings.getAsTime("sync", TimeValue.timeValueSeconds(5)); if (syncInterval.millis() > 0) { this.indexShard.translog().syncOnEachOperation(false); flushScheduler = threadPool.schedule(syncInterval, ThreadPool.Names.SAME, new Sync()); } else if (syncInterval.millis() == 0) { flushScheduler = null; this.indexShard.translog().syncOnEachOperation(true); } else { flushScheduler = null; } } @Override public String toString() { return "local"; } @Override public RecoveryState recoveryState() { return recoveryState; } @Override public void recover(boolean indexShouldExists, RecoveryState recoveryState) throws IndexShardGatewayRecoveryException { recoveryState.getIndex().startTime(System.currentTimeMillis()); recoveryState.setStage(RecoveryState.Stage.INDEX); long version = -1; long translogId = -1; try { SegmentInfos si = null; try { si = Lucene.readSegmentInfos(indexShard.store().directory()); } catch (Throwable e) { String files = "_unknown_"; try { files = Arrays.toString(indexShard.store().directory().listAll()); } catch (Throwable e1) { files += " (failure=" + ExceptionsHelper.detailedMessage(e1) + ")"; } if (indexShouldExists && indexShard.store().indexStore().persistent()) { throw new IndexShardGatewayRecoveryException(shardId(), "shard allocated for local recovery (post api), should exist, but doesn't, current files: " + files, e); } } if (si != null) { if (indexShouldExists) { version = si.getVersion(); if (si.getUserData().containsKey(Translog.TRANSLOG_ID_KEY)) { translogId = Long.parseLong(si.getUserData().get(Translog.TRANSLOG_ID_KEY)); } else { translogId = version; } logger.trace("using existing shard data, translog id [{}]", translogId); } else { // it exists on the directory, but shouldn't exist on the FS, its a leftover (possibly dangling) // its a "new index create" API, we have to do something, so better to clean it than use same data logger.trace("cleaning existing shard, shouldn't exists"); IndexWriter writer = new IndexWriter(indexShard.store().directory(), new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER).setOpenMode(IndexWriterConfig.OpenMode.CREATE)); writer.close(); } } } catch (Throwable e) { throw new IndexShardGatewayRecoveryException(shardId(), "failed to fetch index version after copying it over", e); } recoveryState.getIndex().updateVersion(version); recoveryState.getIndex().time(System.currentTimeMillis() - recoveryState.getIndex().startTime()); // since we recover from local, just fill the files and size try { int numberOfFiles = 0; long totalSizeInBytes = 0; for (String name : indexShard.store().directory().listAll()) { numberOfFiles++; long length = indexShard.store().directory().fileLength(name); totalSizeInBytes += length; recoveryState.getIndex().addFileDetail(name, length, length); } recoveryState.getIndex().files(numberOfFiles, totalSizeInBytes, numberOfFiles, totalSizeInBytes); recoveryState.getIndex().recoveredFileCount(numberOfFiles); recoveryState.getIndex().recoveredByteCount(totalSizeInBytes); } catch (Exception e) { // ignore } recoveryState.getStart().startTime(System.currentTimeMillis()); recoveryState.setStage(RecoveryState.Stage.START); if (translogId == -1) { // no translog files, bail indexShard.postRecovery("post recovery from gateway, no translog"); // no index, just start the shard and bail recoveryState.getStart().time(System.currentTimeMillis() - recoveryState.getStart().startTime()); recoveryState.getStart().checkIndexTime(indexShard.checkIndexTook()); return; } // move an existing translog, if exists, to "recovering" state, and start reading from it FsTranslog translog = (FsTranslog) indexShard.translog(); String translogName = "translog-" + translogId; String recoverTranslogName = translogName + ".recovering"; File recoveringTranslogFile = null; for (File translogLocation : translog.locations()) { File tmpRecoveringFile = new File(translogLocation, recoverTranslogName); if (!tmpRecoveringFile.exists()) { File tmpTranslogFile = new File(translogLocation, translogName); if (tmpTranslogFile.exists()) { for (int i = 0; i < 3; i++) { if (tmpTranslogFile.renameTo(tmpRecoveringFile)) { recoveringTranslogFile = tmpRecoveringFile; break; } } } } else { recoveringTranslogFile = tmpRecoveringFile; break; } } if (recoveringTranslogFile == null || !recoveringTranslogFile.exists()) { // no translog to recovery from, start and bail // no translog files, bail indexShard.postRecovery("post recovery from gateway, no translog"); // no index, just start the shard and bail recoveryState.getStart().time(System.currentTimeMillis() - recoveryState.getStart().startTime()); recoveryState.getStart().checkIndexTime(indexShard.checkIndexTook()); return; } // recover from the translog file indexShard.performRecoveryPrepareForTranslog(); recoveryState.getStart().time(System.currentTimeMillis() - recoveryState.getStart().startTime()); recoveryState.getStart().checkIndexTime(indexShard.checkIndexTook()); recoveryState.getTranslog().startTime(System.currentTimeMillis()); recoveryState.setStage(RecoveryState.Stage.TRANSLOG); FileInputStream fs = null; try { fs = new FileInputStream(recoveringTranslogFile); InputStreamStreamInput si = new InputStreamStreamInput(fs); while (true) { Translog.Operation operation; try { int opSize = si.readInt(); operation = TranslogStreams.readTranslogOperation(si); } catch (EOFException e) { // ignore, not properly written the last op break; } catch (IOException e) { // ignore, not properly written last op break; } try { indexShard.performRecoveryOperation(operation); recoveryState.getTranslog().addTranslogOperations(1); } catch (ElasticsearchException e) { if (e.status() == RestStatus.BAD_REQUEST) { // mainly for MapperParsingException and Failure to detect xcontent logger.info("ignoring recovery of a corrupt translog entry", e); } else { throw e; } } } } catch (Throwable e) { // we failed to recovery, make sure to delete the translog file (and keep the recovering one) indexShard.translog().closeWithDelete(); throw new IndexShardGatewayRecoveryException(shardId, "failed to recover shard", e); } finally { try { fs.close(); } catch (IOException e) { // ignore } } indexShard.performRecoveryFinalization(true); recoveringTranslogFile.delete(); recoveryState.getTranslog().time(System.currentTimeMillis() - recoveryState.getTranslog().startTime()); } @Override public String type() { return "local"; } @Override public void close() { if (flushScheduler != null) { flushScheduler.cancel(false); } } class Sync implements Runnable { @Override public void run() { // don't re-schedule if its closed..., we are done if (indexShard.state() == IndexShardState.CLOSED) { return; } if (indexShard.state() == IndexShardState.STARTED && indexShard.translog().syncNeeded()) { threadPool.executor(ThreadPool.Names.FLUSH).execute(new Runnable() { @Override public void run() { try { indexShard.translog().sync(); } catch (Exception e) { if (indexShard.state() == IndexShardState.STARTED) { logger.warn("failed to sync translog", e); } } if (indexShard.state() != IndexShardState.CLOSED) { flushScheduler = threadPool.schedule(syncInterval, ThreadPool.Names.SAME, Sync.this); } } }); } else { flushScheduler = threadPool.schedule(syncInterval, ThreadPool.Names.SAME, Sync.this); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.reseller.model; /** * JSON template for address of a customer. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Google Workspace Reseller API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Address extends com.google.api.client.json.GenericJson { /** * A customer's physical address. An address can be composed of one to three lines. The * `addressline2` and `addressLine3` are optional. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String addressLine1; /** * Line 2 of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String addressLine2; /** * Line 3 of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String addressLine3; /** * The customer contact's name. This is required. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String contactName; /** * For `countryCode` information, see the ISO 3166 country code elements. Verify that country is * approved for resale of Google products. This property is required when creating a new customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String countryCode; /** * Identifies the resource as a customer address. Value: `customers#address` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * An example of a `locality` value is the city of `San Francisco`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String locality; /** * The company or company division name. This is required. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String organizationName; /** * A `postalCode` example is a postal zip code such as `94043`. This property is required when * creating a new customer. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String postalCode; /** * An example of a `region` value is `CA` for the state of California. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String region; /** * A customer's physical address. An address can be composed of one to three lines. The * `addressline2` and `addressLine3` are optional. * @return value or {@code null} for none */ public java.lang.String getAddressLine1() { return addressLine1; } /** * A customer's physical address. An address can be composed of one to three lines. The * `addressline2` and `addressLine3` are optional. * @param addressLine1 addressLine1 or {@code null} for none */ public Address setAddressLine1(java.lang.String addressLine1) { this.addressLine1 = addressLine1; return this; } /** * Line 2 of the address. * @return value or {@code null} for none */ public java.lang.String getAddressLine2() { return addressLine2; } /** * Line 2 of the address. * @param addressLine2 addressLine2 or {@code null} for none */ public Address setAddressLine2(java.lang.String addressLine2) { this.addressLine2 = addressLine2; return this; } /** * Line 3 of the address. * @return value or {@code null} for none */ public java.lang.String getAddressLine3() { return addressLine3; } /** * Line 3 of the address. * @param addressLine3 addressLine3 or {@code null} for none */ public Address setAddressLine3(java.lang.String addressLine3) { this.addressLine3 = addressLine3; return this; } /** * The customer contact's name. This is required. * @return value or {@code null} for none */ public java.lang.String getContactName() { return contactName; } /** * The customer contact's name. This is required. * @param contactName contactName or {@code null} for none */ public Address setContactName(java.lang.String contactName) { this.contactName = contactName; return this; } /** * For `countryCode` information, see the ISO 3166 country code elements. Verify that country is * approved for resale of Google products. This property is required when creating a new customer. * @return value or {@code null} for none */ public java.lang.String getCountryCode() { return countryCode; } /** * For `countryCode` information, see the ISO 3166 country code elements. Verify that country is * approved for resale of Google products. This property is required when creating a new customer. * @param countryCode countryCode or {@code null} for none */ public Address setCountryCode(java.lang.String countryCode) { this.countryCode = countryCode; return this; } /** * Identifies the resource as a customer address. Value: `customers#address` * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies the resource as a customer address. Value: `customers#address` * @param kind kind or {@code null} for none */ public Address setKind(java.lang.String kind) { this.kind = kind; return this; } /** * An example of a `locality` value is the city of `San Francisco`. * @return value or {@code null} for none */ public java.lang.String getLocality() { return locality; } /** * An example of a `locality` value is the city of `San Francisco`. * @param locality locality or {@code null} for none */ public Address setLocality(java.lang.String locality) { this.locality = locality; return this; } /** * The company or company division name. This is required. * @return value or {@code null} for none */ public java.lang.String getOrganizationName() { return organizationName; } /** * The company or company division name. This is required. * @param organizationName organizationName or {@code null} for none */ public Address setOrganizationName(java.lang.String organizationName) { this.organizationName = organizationName; return this; } /** * A `postalCode` example is a postal zip code such as `94043`. This property is required when * creating a new customer. * @return value or {@code null} for none */ public java.lang.String getPostalCode() { return postalCode; } /** * A `postalCode` example is a postal zip code such as `94043`. This property is required when * creating a new customer. * @param postalCode postalCode or {@code null} for none */ public Address setPostalCode(java.lang.String postalCode) { this.postalCode = postalCode; return this; } /** * An example of a `region` value is `CA` for the state of California. * @return value or {@code null} for none */ public java.lang.String getRegion() { return region; } /** * An example of a `region` value is `CA` for the state of California. * @param region region or {@code null} for none */ public Address setRegion(java.lang.String region) { this.region = region; return this; } @Override public Address set(String fieldName, Object value) { return (Address) super.set(fieldName, value); } @Override public Address clone() { return (Address) super.clone(); } }
/* The MIT License (MIT) * * Copyright (c) 2013 Jan Kerkenhoff, Miguel Gonzalez * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package nl.fontys.epic.util; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; /** * Response of a single command chain * * @author Miguel Gonzalez <miguel-gonzalez@gmx.de> * @since 1.0 * @version 1.0 */ public class MatrixList<Type extends Indexable> implements Collection<Type> { private int elementSize; private final HashMap<Integer, HashMap<Integer, Type>> chunks; public MatrixList() { elementSize = 0; chunks = new HashMap< >(); } @Override public boolean add(Type element) { if (chunks.containsKey(element.getX())) { HashMap<Integer, Type> yMap = chunks.get(element .getX()); if (!yMap.containsKey(element.getY())) { yMap.put(element.getY(), element); elementSize++; return true; } else { return false; } } else { HashMap<Integer, Type> yChunkMap = new HashMap<>(); yChunkMap.put(element.getY(), element); chunks.put(element.getX(), yChunkMap); elementSize++; return true; } } @Override public boolean addAll(Collection<? extends Type> objects) { boolean changed = false; for (Type object : objects) { if (!changed) { changed = add(object); } } return changed; } @Override public void clear() { chunks.clear(); } @Override public boolean contains(Object object) { if (object instanceof Indexable) { Indexable indexable = (Indexable)object; return contains(indexable.getX(), indexable.getY()); } else { return false; } } @Override public boolean containsAll(Collection<?> objects) { for (Type elem : this) { if (!objects.contains(elem)) { return false; } } return true; } @Override public boolean isEmpty() { return chunks.isEmpty(); } @Override public Iterator<Type> iterator() { return new MatrixIterator<>(chunks.values().iterator()); } @Override public boolean remove(Object object) { if (object instanceof Indexable) { Indexable indexable = (Indexable)object; return remove(indexable.getX(), indexable.getY()); } else { return false; } } @Override public boolean removeAll(Collection<?> objects) { boolean changed = false; for (Object object : objects) { if (!changed) { changed = remove((Type)object); } } return changed; } @Override public boolean retainAll(Collection<?> objects) { boolean changed = false; for (Object o : this) { if (!objects.contains(o)) { remove((Type)o); changed = true; } } return changed; } @Override public int size() { return elementSize; } @Override public Object[] toArray() { return toArray(new Object[size()]); } @SuppressWarnings("unchecked") @Override public <T> T[] toArray(T[] objects) { if (objects.length != size()) { objects = (T[]) new Object[size()]; } int index = 0; for (Type type : this) { objects[index++] = (T) type; } return objects; } public boolean remove(int indexX, int indexY) { HashMap<Integer, Type> yChunkMap = chunks.get(indexX); if (yChunkMap != null) { yChunkMap.remove(indexY); // X axis if (yChunkMap.isEmpty()) { chunks.remove(indexX); } else { return false; } elementSize--; return true; } else { return false; } } public MatrixList<Type> copy() { MatrixList<Type> copyList = new MatrixList<>(); for (Type element : this) { copyList.add(element); } return copyList; } public boolean contains(int indexX, int indexY) { return get(indexX, indexY) != null; } public Type get(int indexX, int indexY) { HashMap<Integer, Type> yChunkMap = chunks.get(indexX); if (yChunkMap != null) { Type element = yChunkMap.get(indexY); if (element != null) { return element; } else { return null; } } else { return null; } } public void set(MatrixList<Type> matrixList) { this.elementSize = matrixList.size(); clear(); for (Type elem : matrixList) { add(elem); } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import io.netty.buffer.ByteBufAllocator; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.DefaultFullHttpResponse; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.DefaultHttpResponse; import io.netty.handler.codec.http.FullHttpMessage; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaderValues; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMessage; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpResponse; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpUtil; import io.netty.handler.codec.http.HttpVersion; import io.netty.util.AsciiString; import io.netty.util.internal.UnstableApi; import java.net.URI; import java.util.Iterator; import java.util.Map.Entry; import static io.netty.handler.codec.http.HttpScheme.HTTP; import static io.netty.handler.codec.http.HttpScheme.HTTPS; import static io.netty.handler.codec.http.HttpUtil.isAsteriskForm; import static io.netty.handler.codec.http.HttpUtil.isOriginForm; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.handler.codec.http2.Http2Exception.streamError; import static io.netty.util.AsciiString.EMPTY_STRING; import static io.netty.util.ByteProcessor.FIND_SEMI_COLON; import static io.netty.util.internal.ObjectUtil.checkNotNull; import static io.netty.util.internal.StringUtil.isNullOrEmpty; import static io.netty.util.internal.StringUtil.length; /** * Provides utility methods and constants for the HTTP/2 to HTTP conversion */ @UnstableApi public final class HttpConversionUtil { /** * The set of headers that should not be directly copied when converting headers from HTTP to HTTP/2. */ private static final CharSequenceMap<AsciiString> HTTP_TO_HTTP2_HEADER_BLACKLIST = new CharSequenceMap<AsciiString>(); static { HTTP_TO_HTTP2_HEADER_BLACKLIST.add(HttpHeaderNames.CONNECTION, EMPTY_STRING); @SuppressWarnings("deprecation") AsciiString keepAlive = HttpHeaderNames.KEEP_ALIVE; HTTP_TO_HTTP2_HEADER_BLACKLIST.add(keepAlive, EMPTY_STRING); @SuppressWarnings("deprecation") AsciiString proxyConnection = HttpHeaderNames.PROXY_CONNECTION; HTTP_TO_HTTP2_HEADER_BLACKLIST.add(proxyConnection, EMPTY_STRING); HTTP_TO_HTTP2_HEADER_BLACKLIST.add(HttpHeaderNames.TRANSFER_ENCODING, EMPTY_STRING); HTTP_TO_HTTP2_HEADER_BLACKLIST.add(HttpHeaderNames.HOST, EMPTY_STRING); HTTP_TO_HTTP2_HEADER_BLACKLIST.add(HttpHeaderNames.UPGRADE, EMPTY_STRING); HTTP_TO_HTTP2_HEADER_BLACKLIST.add(ExtensionHeaderNames.STREAM_ID.text(), EMPTY_STRING); HTTP_TO_HTTP2_HEADER_BLACKLIST.add(ExtensionHeaderNames.SCHEME.text(), EMPTY_STRING); HTTP_TO_HTTP2_HEADER_BLACKLIST.add(ExtensionHeaderNames.PATH.text(), EMPTY_STRING); } /** * This will be the method used for {@link HttpRequest} objects generated out of the HTTP message flow defined in <a * href="http://tools.ietf.org/html/draft-ietf-httpbis-http2-16#section-8.1.">HTTP/2 Spec Message Flow</a> */ public static final HttpMethod OUT_OF_MESSAGE_SEQUENCE_METHOD = HttpMethod.OPTIONS; /** * This will be the path used for {@link HttpRequest} objects generated out of the HTTP message flow defined in <a * href="http://tools.ietf.org/html/draft-ietf-httpbis-http2-16#section-8.1.">HTTP/2 Spec Message Flow</a> */ public static final String OUT_OF_MESSAGE_SEQUENCE_PATH = ""; /** * This will be the status code used for {@link HttpResponse} objects generated out of the HTTP message flow defined * in <a href="http://tools.ietf.org/html/draft-ietf-httpbis-http2-16#section-8.1.">HTTP/2 Spec Message Flow</a> */ public static final HttpResponseStatus OUT_OF_MESSAGE_SEQUENCE_RETURN_CODE = HttpResponseStatus.OK; /** * <a href="https://tools.ietf.org/html/rfc7540#section-8.1.2.3">rfc7540, 8.1.2.3</a> states the path must not * be empty, and instead should be {@code /}. */ private static final AsciiString EMPTY_REQUEST_PATH = AsciiString.cached("/"); private HttpConversionUtil() { } /** * Provides the HTTP header extensions used to carry HTTP/2 information in HTTP objects */ public enum ExtensionHeaderNames { /** * HTTP extension header which will identify the stream id from the HTTP/2 event(s) responsible for generating a * {@code HttpObject} * <p> * {@code "x-http2-stream-id"} */ STREAM_ID("x-http2-stream-id"), /** * HTTP extension header which will identify the scheme pseudo header from the HTTP/2 event(s) responsible for * generating a {@code HttpObject} * <p> * {@code "x-http2-scheme"} */ SCHEME("x-http2-scheme"), /** * HTTP extension header which will identify the path pseudo header from the HTTP/2 event(s) responsible for * generating a {@code HttpObject} * <p> * {@code "x-http2-path"} */ PATH("x-http2-path"), /** * HTTP extension header which will identify the stream id used to create this stream in a HTTP/2 push promise * frame * <p> * {@code "x-http2-stream-promise-id"} */ STREAM_PROMISE_ID("x-http2-stream-promise-id"), /** * HTTP extension header which will identify the stream id which this stream is dependent on. This stream will * be a child node of the stream id associated with this header value. * <p> * {@code "x-http2-stream-dependency-id"} */ STREAM_DEPENDENCY_ID("x-http2-stream-dependency-id"), /** * HTTP extension header which will identify the weight (if non-default and the priority is not on the default * stream) of the associated HTTP/2 stream responsible responsible for generating a {@code HttpObject} * <p> * {@code "x-http2-stream-weight"} */ STREAM_WEIGHT("x-http2-stream-weight"); private final AsciiString text; ExtensionHeaderNames(String text) { this.text = AsciiString.cached(text); } public AsciiString text() { return text; } } /** * Apply HTTP/2 rules while translating status code to {@link HttpResponseStatus} * * @param status The status from an HTTP/2 frame * @return The HTTP/1.x status * @throws Http2Exception If there is a problem translating from HTTP/2 to HTTP/1.x */ public static HttpResponseStatus parseStatus(CharSequence status) throws Http2Exception { HttpResponseStatus result; try { result = HttpResponseStatus.parseLine(status); if (result == HttpResponseStatus.SWITCHING_PROTOCOLS) { throw connectionError(PROTOCOL_ERROR, "Invalid HTTP/2 status code '%d'", result.code()); } } catch (Http2Exception e) { throw e; } catch (Throwable t) { throw connectionError(PROTOCOL_ERROR, t, "Unrecognized HTTP status code '%s' encountered in translation to HTTP/1.x", status); } return result; } /** * Create a new object to contain the response data * * @param streamId The stream associated with the response * @param http2Headers The initial set of HTTP/2 headers to create the response with * @param alloc The {@link ByteBufAllocator} to use to generate the content of the message * @param validateHttpHeaders <ul> * <li>{@code true} to validate HTTP headers in the http-codec</li> * <li>{@code false} not to validate HTTP headers in the http-codec</li> * </ul> * @return A new response object which represents headers/data * @throws Http2Exception see {@link #addHttp2ToHttpHeaders(int, Http2Headers, FullHttpMessage, boolean)} */ public static FullHttpResponse toFullHttpResponse(int streamId, Http2Headers http2Headers, ByteBufAllocator alloc, boolean validateHttpHeaders) throws Http2Exception { HttpResponseStatus status = parseStatus(http2Headers.status()); // HTTP/2 does not define a way to carry the version or reason phrase that is included in an // HTTP/1.1 status line. FullHttpResponse msg = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, alloc.buffer(), validateHttpHeaders); try { addHttp2ToHttpHeaders(streamId, http2Headers, msg, false); } catch (Http2Exception e) { msg.release(); throw e; } catch (Throwable t) { msg.release(); throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error"); } return msg; } /** * Create a new object to contain the request data * * @param streamId The stream associated with the request * @param http2Headers The initial set of HTTP/2 headers to create the request with * @param alloc The {@link ByteBufAllocator} to use to generate the content of the message * @param validateHttpHeaders <ul> * <li>{@code true} to validate HTTP headers in the http-codec</li> * <li>{@code false} not to validate HTTP headers in the http-codec</li> * </ul> * @return A new request object which represents headers/data * @throws Http2Exception see {@link #addHttp2ToHttpHeaders(int, Http2Headers, FullHttpMessage, boolean)} */ public static FullHttpRequest toFullHttpRequest(int streamId, Http2Headers http2Headers, ByteBufAllocator alloc, boolean validateHttpHeaders) throws Http2Exception { // HTTP/2 does not define a way to carry the version identifier that is included in the HTTP/1.1 request line. final CharSequence method = checkNotNull(http2Headers.method(), "method header cannot be null in conversion to HTTP/1.x"); final CharSequence path = checkNotNull(http2Headers.path(), "path header cannot be null in conversion to HTTP/1.x"); FullHttpRequest msg = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method .toString()), path.toString(), alloc.buffer(), validateHttpHeaders); try { addHttp2ToHttpHeaders(streamId, http2Headers, msg, false); } catch (Http2Exception e) { msg.release(); throw e; } catch (Throwable t) { msg.release(); throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error"); } return msg; } /** * Create a new object to contain the request data. * * @param streamId The stream associated with the request * @param http2Headers The initial set of HTTP/2 headers to create the request with * @param validateHttpHeaders <ul> * <li>{@code true} to validate HTTP headers in the http-codec</li> * <li>{@code false} not to validate HTTP headers in the http-codec</li> * </ul> * @return A new request object which represents headers for a chunked request * @throws Http2Exception see {@link #addHttp2ToHttpHeaders(int, Http2Headers, FullHttpMessage, boolean)} */ public static HttpRequest toHttpRequest(int streamId, Http2Headers http2Headers, boolean validateHttpHeaders) throws Http2Exception { // HTTP/2 does not define a way to carry the version identifier that is included in the HTTP/1.1 request line. final CharSequence method = checkNotNull(http2Headers.method(), "method header cannot be null in conversion to HTTP/1.x"); final CharSequence path = checkNotNull(http2Headers.path(), "path header cannot be null in conversion to HTTP/1.x"); HttpRequest msg = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.valueOf(method.toString()), path.toString(), validateHttpHeaders); try { addHttp2ToHttpHeaders(streamId, http2Headers, msg.headers(), msg.protocolVersion(), false, true); } catch (Http2Exception e) { throw e; } catch (Throwable t) { throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error"); } return msg; } /** * Create a new object to contain the response data. * * @param streamId The stream associated with the response * @param http2Headers The initial set of HTTP/2 headers to create the response with * @param validateHttpHeaders <ul> * <li>{@code true} to validate HTTP headers in the http-codec</li> * <li>{@code false} not to validate HTTP headers in the http-codec</li> * </ul> * @return A new response object which represents headers for a chunked response * @throws Http2Exception see {@link #addHttp2ToHttpHeaders(int, Http2Headers, * HttpHeaders, HttpVersion, boolean, boolean)} */ public static HttpResponse toHttpResponse(final int streamId, final Http2Headers http2Headers, final boolean validateHttpHeaders) throws Http2Exception { final HttpResponseStatus status = parseStatus(http2Headers.status()); // HTTP/2 does not define a way to carry the version or reason phrase that is included in an // HTTP/1.1 status line. final HttpResponse msg = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status, validateHttpHeaders); try { addHttp2ToHttpHeaders(streamId, http2Headers, msg.headers(), msg.protocolVersion(), false, true); } catch (final Http2Exception e) { throw e; } catch (final Throwable t) { throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error"); } return msg; } /** * Translate and add HTTP/2 headers to HTTP/1.x headers. * * @param streamId The stream associated with {@code sourceHeaders}. * @param sourceHeaders The HTTP/2 headers to convert. * @param destinationMessage The object which will contain the resulting HTTP/1.x headers. * @param addToTrailer {@code true} to add to trailing headers. {@code false} to add to initial headers. * @throws Http2Exception If not all HTTP/2 headers can be translated to HTTP/1.x. * @see #addHttp2ToHttpHeaders(int, Http2Headers, HttpHeaders, HttpVersion, boolean, boolean) */ public static void addHttp2ToHttpHeaders(int streamId, Http2Headers sourceHeaders, FullHttpMessage destinationMessage, boolean addToTrailer) throws Http2Exception { addHttp2ToHttpHeaders(streamId, sourceHeaders, addToTrailer ? destinationMessage.trailingHeaders() : destinationMessage.headers(), destinationMessage.protocolVersion(), addToTrailer, destinationMessage instanceof HttpRequest); } /** * Translate and add HTTP/2 headers to HTTP/1.x headers. * * @param streamId The stream associated with {@code sourceHeaders}. * @param inputHeaders The HTTP/2 headers to convert. * @param outputHeaders The object which will contain the resulting HTTP/1.x headers.. * @param httpVersion What HTTP/1.x version {@code outputHeaders} should be treated as when doing the conversion. * @param isTrailer {@code true} if {@code outputHeaders} should be treated as trailing headers. * {@code false} otherwise. * @param isRequest {@code true} if the {@code outputHeaders} will be used in a request message. * {@code false} for response message. * @throws Http2Exception If not all HTTP/2 headers can be translated to HTTP/1.x. */ public static void addHttp2ToHttpHeaders(int streamId, Http2Headers inputHeaders, HttpHeaders outputHeaders, HttpVersion httpVersion, boolean isTrailer, boolean isRequest) throws Http2Exception { Http2ToHttpHeaderTranslator translator = new Http2ToHttpHeaderTranslator(streamId, outputHeaders, isRequest); try { for (Entry<CharSequence, CharSequence> entry : inputHeaders) { translator.translate(entry); } } catch (Http2Exception ex) { throw ex; } catch (Throwable t) { throw streamError(streamId, PROTOCOL_ERROR, t, "HTTP/2 to HTTP/1.x headers conversion error"); } outputHeaders.remove(HttpHeaderNames.TRANSFER_ENCODING); outputHeaders.remove(HttpHeaderNames.TRAILER); if (!isTrailer) { outputHeaders.setInt(ExtensionHeaderNames.STREAM_ID.text(), streamId); HttpUtil.setKeepAlive(outputHeaders, httpVersion, true); } } /** * Converts the given HTTP/1.x headers into HTTP/2 headers. * The following headers are only used if they can not be found in from the {@code HOST} header or the * {@code Request-Line} as defined by <a href="https://tools.ietf.org/html/rfc7230">rfc7230</a> * <ul> * <li>{@link ExtensionHeaderNames#SCHEME}</li> * </ul> * {@link ExtensionHeaderNames#PATH} is ignored and instead extracted from the {@code Request-Line}. */ public static Http2Headers toHttp2Headers(HttpMessage in, boolean validateHeaders) { HttpHeaders inHeaders = in.headers(); final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size()); if (in instanceof HttpRequest) { HttpRequest request = (HttpRequest) in; URI requestTargetUri = URI.create(request.uri()); out.path(toHttp2Path(requestTargetUri)); out.method(request.method().asciiName()); setHttp2Scheme(inHeaders, requestTargetUri, out); if (!isOriginForm(requestTargetUri) && !isAsteriskForm(requestTargetUri)) { // Attempt to take from HOST header before taking from the request-line String host = inHeaders.getAsString(HttpHeaderNames.HOST); setHttp2Authority((host == null || host.isEmpty()) ? requestTargetUri.getAuthority() : host, out); } } else if (in instanceof HttpResponse) { HttpResponse response = (HttpResponse) in; out.status(response.status().codeAsText()); } // Add the HTTP headers which have not been consumed above toHttp2Headers(inHeaders, out); return out; } public static Http2Headers toHttp2Headers(HttpHeaders inHeaders, boolean validateHeaders) { if (inHeaders.isEmpty()) { return EmptyHttp2Headers.INSTANCE; } final Http2Headers out = new DefaultHttp2Headers(validateHeaders, inHeaders.size()); toHttp2Headers(inHeaders, out); return out; } public static void toHttp2Headers(HttpHeaders inHeaders, Http2Headers out) { Iterator<Entry<CharSequence, CharSequence>> iter = inHeaders.iteratorCharSequence(); while (iter.hasNext()) { Entry<CharSequence, CharSequence> entry = iter.next(); final AsciiString aName = AsciiString.of(entry.getKey()).toLowerCase(); if (!HTTP_TO_HTTP2_HEADER_BLACKLIST.contains(aName)) { // https://tools.ietf.org/html/rfc7540#section-8.1.2.2 makes a special exception for TE if (aName.contentEqualsIgnoreCase(HttpHeaderNames.TE) && !AsciiString.contentEqualsIgnoreCase(entry.getValue(), HttpHeaderValues.TRAILERS)) { throw new IllegalArgumentException("Invalid value for " + HttpHeaderNames.TE + ": " + entry.getValue()); } if (aName.contentEqualsIgnoreCase(HttpHeaderNames.COOKIE)) { AsciiString value = AsciiString.of(entry.getValue()); // split up cookies to allow for better compression // https://tools.ietf.org/html/rfc7540#section-8.1.2.5 try { int index = value.forEachByte(FIND_SEMI_COLON); if (index != -1) { int start = 0; do { out.add(HttpHeaderNames.COOKIE, value.subSequence(start, index, false)); // skip 2 characters "; " (see https://tools.ietf.org/html/rfc6265#section-4.2.1) start = index + 2; } while (start < value.length() && (index = value.forEachByte(start, value.length() - start, FIND_SEMI_COLON)) != -1); if (start >= value.length()) { throw new IllegalArgumentException("cookie value is of unexpected format: " + value); } out.add(HttpHeaderNames.COOKIE, value.subSequence(start, value.length(), false)); } else { out.add(HttpHeaderNames.COOKIE, value); } } catch (Exception e) { // This is not expect to happen because FIND_SEMI_COLON never throws but must be caught // because of the ByteProcessor interface. throw new IllegalStateException(e); } } else { out.add(aName, entry.getValue()); } } } } /** * Generate a HTTP/2 {code :path} from a URI in accordance with * <a href="https://tools.ietf.org/html/rfc7230#section-5.3">rfc7230, 5.3</a>. */ private static AsciiString toHttp2Path(URI uri) { StringBuilder pathBuilder = new StringBuilder(length(uri.getRawPath()) + length(uri.getRawQuery()) + length(uri.getRawFragment()) + 2); if (!isNullOrEmpty(uri.getRawPath())) { pathBuilder.append(uri.getRawPath()); } if (!isNullOrEmpty(uri.getRawQuery())) { pathBuilder.append('?'); pathBuilder.append(uri.getRawQuery()); } if (!isNullOrEmpty(uri.getRawFragment())) { pathBuilder.append('#'); pathBuilder.append(uri.getRawFragment()); } String path = pathBuilder.toString(); return path.isEmpty() ? EMPTY_REQUEST_PATH : new AsciiString(path); } // package-private for testing only static void setHttp2Authority(String authority, Http2Headers out) { // The authority MUST NOT include the deprecated "userinfo" subcomponent if (authority != null) { if (authority.isEmpty()) { out.authority(EMPTY_STRING); } else { int start = authority.indexOf('@') + 1; int length = authority.length() - start; if (length == 0) { throw new IllegalArgumentException("authority: " + authority); } out.authority(new AsciiString(authority, start, length)); } } } private static void setHttp2Scheme(HttpHeaders in, URI uri, Http2Headers out) { String value = uri.getScheme(); if (value != null) { out.scheme(new AsciiString(value)); return; } // Consume the Scheme extension header if present CharSequence cValue = in.get(ExtensionHeaderNames.SCHEME.text()); if (cValue != null) { out.scheme(AsciiString.of(cValue)); return; } if (uri.getPort() == HTTPS.port()) { out.scheme(HTTPS.name()); } else if (uri.getPort() == HTTP.port()) { out.scheme(HTTP.name()); } else { throw new IllegalArgumentException(":scheme must be specified. " + "see https://tools.ietf.org/html/rfc7540#section-8.1.2.3"); } } /** * Utility which translates HTTP/2 headers to HTTP/1 headers. */ private static final class Http2ToHttpHeaderTranslator { /** * Translations from HTTP/2 header name to the HTTP/1.x equivalent. */ private static final CharSequenceMap<AsciiString> REQUEST_HEADER_TRANSLATIONS = new CharSequenceMap<AsciiString>(); private static final CharSequenceMap<AsciiString> RESPONSE_HEADER_TRANSLATIONS = new CharSequenceMap<AsciiString>(); static { RESPONSE_HEADER_TRANSLATIONS.add(Http2Headers.PseudoHeaderName.AUTHORITY.value(), HttpHeaderNames.HOST); RESPONSE_HEADER_TRANSLATIONS.add(Http2Headers.PseudoHeaderName.SCHEME.value(), ExtensionHeaderNames.SCHEME.text()); REQUEST_HEADER_TRANSLATIONS.add(RESPONSE_HEADER_TRANSLATIONS); RESPONSE_HEADER_TRANSLATIONS.add(Http2Headers.PseudoHeaderName.PATH.value(), ExtensionHeaderNames.PATH.text()); } private final int streamId; private final HttpHeaders output; private final CharSequenceMap<AsciiString> translations; /** * Create a new instance * * @param output The HTTP/1.x headers object to store the results of the translation * @param request if {@code true}, translates headers using the request translation map. Otherwise uses the * response translation map. */ Http2ToHttpHeaderTranslator(int streamId, HttpHeaders output, boolean request) { this.streamId = streamId; this.output = output; translations = request ? REQUEST_HEADER_TRANSLATIONS : RESPONSE_HEADER_TRANSLATIONS; } public void translate(Entry<CharSequence, CharSequence> entry) throws Http2Exception { final CharSequence name = entry.getKey(); final CharSequence value = entry.getValue(); AsciiString translatedName = translations.get(name); if (translatedName != null) { output.add(translatedName, AsciiString.of(value)); } else if (!Http2Headers.PseudoHeaderName.isPseudoHeader(name)) { // https://tools.ietf.org/html/rfc7540#section-8.1.2.3 // All headers that start with ':' are only valid in HTTP/2 context if (name.length() == 0 || name.charAt(0) == ':') { throw streamError(streamId, PROTOCOL_ERROR, "Invalid HTTP/2 header '%s' encountered in translation to HTTP/1.x", name); } if (HttpHeaderNames.COOKIE.equals(name)) { // combine the cookie values into 1 header entry. // https://tools.ietf.org/html/rfc7540#section-8.1.2.5 String existingCookie = output.get(HttpHeaderNames.COOKIE); output.set(HttpHeaderNames.COOKIE, (existingCookie != null) ? (existingCookie + "; " + value) : value); } else { output.add(name, value); } } } } }
/** * Copyright 2015 Peter Nerg * * <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * <p>http://www.apache.org/licenses/LICENSE-2.0 * * <p>Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package javascalautils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Locale; /** * Base test class. * * @author Peter Nerg */ public class BaseAssert extends Assert { static { // Configure language for proper logging outputs Locale.setDefault(Locale.US); System.setProperty("user.country", Locale.US.getCountry()); System.setProperty("user.language", Locale.US.getLanguage()); System.setProperty("user.variant", Locale.US.getVariant()); } @BeforeClass public static final void setTempDirectoryToTarget() { System.setProperty("java.io.tmpdir", "target/"); } @AfterClass public static final void resetTempDirectoryToTarget() { System.clearProperty("java.io.tmpdir"); } /** * Asserts that the provided class has a private default (non-argument) constructor. <br> * This is a stupid workaround to please the coverage tools that otherwise whine about not * covering private constructors. * * @param clazz * @throws NoSuchMethodException * @throws SecurityException * @throws InstantiationException * @throws IllegalAccessException * @throws IllegalArgumentException * @throws InvocationTargetException */ public static <T extends Object> void assertPrivateConstructor(Class<T> clazz) throws ReflectiveOperationException { Constructor<T> constructor = clazz.getDeclaredConstructor(); assertTrue(Modifier.isPrivate(constructor.getModifiers())); try { constructor.setAccessible(true); constructor.newInstance(); } finally { constructor.setAccessible(false); } } /** * Assert that the provided arrays do not contain the same data. * * @param expected * @param actual */ public static void assertNotEquals(byte[] expected, byte[] actual) { if (expected == null && actual == null) { return; } // different size, can not be the same data if (expected.length != actual.length) { return; } // check each position in the arrays, return on the first found non-match for (int i = 0; i < actual.length; i++) { if (actual[i] != expected[i]) { return; } } fail("The expected and the actual array are the same"); } /** * Assert that the provided arrays contain the same data. * * @param expected * @param actual */ public static void assertEquals(byte[] expected, byte[] actual) { if (expected == null && actual == null) { return; } assertEquals("The length of the arrays do not match", expected.length, actual.length); for (int i = 0; i < actual.length; i++) { assertEquals("The data on index [" + i + "] does not match", expected[i], actual[i]); } } /** * Assert that the provided arrays contain the same data. * * @param expected * @param actual */ public static void assertEquals(char[] expected, char[] actual) { if (expected == null && actual == null) { return; } assertEquals("The length of the arrays do not match", expected.length, actual.length); for (int i = 0; i < actual.length; i++) { assertEquals("The data on index [" + i + "] does not match", expected[i], actual[i]); } } /** * Assert that the provided arrays contain the same data. * * @param expected * @param actual */ public static void assertEquals(int[] expected, int[] actual) { if (expected == null && actual == null) { return; } assertEquals("The length of the arrays do not match", expected.length, actual.length); for (int i = 0; i < actual.length; i++) { assertEquals("The data on index [" + i + "] does not match", expected[i], actual[i]); } } /** * Assert that a collection is empty. * * @param collection */ public static void assertIsEmpty(Collection<?> collection) { assertNotNull(collection); assertTrue(collection.isEmpty()); } /** * Assert a collection. * * @param collection * @param expectedSize */ public static void assertCollection(Collection<?> collection, int expectedSize) { assertNotNull(collection); assertEquals(expectedSize, collection.size()); } /** * Method that does...nothing. <br> * Used in test cases concerning executables that don't fulfill their Promise. */ public static void doNothing() {} /** * Dummy method to raise a {@link DummyException}. <br> * This is used since one cannot raise exceptions directly from Lambda expressions. * * @return Never ever, since it always raises a DummyException */ public static <T> T throwDummyException() { return throwException(new DummyException()); } /** * Throws the provided exception. <br> * This is used since one cannot raise exceptions directly from Lambda expressions. * * @param ex The exception to throw * @return Never ever, since it always raises a DummyException */ public static <T> T throwException(RuntimeException ex) { throw ex; } /** * Method used for testing to either raise a {@link DummyException} or return the provided value. * * @param arg * @return */ public static final String throwIfNull(String arg) { if (arg == null) { throwDummyException(); } return arg; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.coord; import java.io.IOException; import java.util.Date; import org.apache.hadoop.conf.Configuration; import org.apache.oozie.service.Services; import org.apache.oozie.test.XTestCase; import org.apache.oozie.util.DateUtils; import java.io.StringReader; import org.apache.oozie.util.ELEvaluator; import org.apache.oozie.util.XConfiguration; import org.apache.oozie.util.XmlUtils; import org.jdom.Element; public class TestCoordELEvaluator extends XTestCase { @Override public void setUp() throws Exception { super.setUp(); new Services().init(); } @Override protected void tearDown() throws Exception { if (Services.get() != null) { Services.get().destroy(); } super.tearDown(); } public void testCreateFreqELValuator() throws Exception { // System.out.println("CP :" + System.getProperty("java.class.path")); // Configuration conf = new // XConfiguration(IOUtils.getResourceAsReader("org/apache/oozie/coord/conf.xml", // -1)); Configuration conf = new XConfiguration(new StringReader( getConfString())); ELEvaluator eval = CoordELEvaluator.createELEvaluatorForGroup(conf, "coord-job-submit-freq"); String expr = "<coordinator-app name=\"mycoordinator-app\" start=\"${start}\" end=\"${end}\"" + " frequency=\"${coord:hours(12)}\"><data-in name=\"A\" dataset=\"a\"></data-in>"; String reply = expr.replace("${start}", conf.get("start")).replace( "${end}", conf.get("end")).replace("${coord:hours(12)}", "720"); assertEquals(reply, CoordELFunctions.evalAndWrap(eval, expr)); expr = "<coordinator-app name=\"mycoordinator-app\" start=\"${start}\" end=\"${end}\"" + " frequency=\"${coord:days(7)}\"><data-in name=\"A\" dataset=\"a\"></data-in>"; reply = expr.replace("${start}", conf.get("start")).replace("${end}", conf.get("end")).replace("${coord:days(7)}", "7"); assertEquals(reply, CoordELFunctions.evalAndWrap(eval, expr)); expr = "<coordinator-app name=\"mycoordinator-app\" start=\"${start}\" end=\"${end}\"" + " frequency=\"${coord:months(1)}\"><data-in name=\"A\" dataset=\"a\"></data-in>"; reply = expr.replace("${start}", conf.get("start")).replace("${end}", conf.get("end")).replace("${coord:months(1)}", "1"); // System.out.println("****testCreateELValuator :"+ // CoordELFunctions.evaluateFunction(eval, expr)); assertEquals(reply, CoordELFunctions.evalAndWrap(eval, expr)); expr = "frequency=${coord:days(2)}"; expr = "frequency=60"; CoordELFunctions.evalAndWrap(eval, expr); expr = "frequency=${coord:daysInMonth(2)}"; try { CoordELFunctions.evalAndWrap(eval, expr); fail(); } catch (Exception ex) { } expr = "frequency=${coord:hoursInDay(2)}"; try { CoordELFunctions.evalAndWrap(eval, expr); fail(); } catch (Exception ex) { } expr = "frequency=${coord:tzOffset()}"; try { CoordELFunctions.evalAndWrap(eval, expr); fail(); } catch (Exception ex) { } expr = "<frequency=120"; assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr)); } public void testCreateURIELEvaluator() throws Exception { ELEvaluator eval = CoordELEvaluator .createURIELEvaluator("2009-08-09T23:59Z"); String expr = "hdfs://p1/p2/${YEAR}/${MONTH}/${DAY}/${HOUR}/${MINUTE}/"; // System.out.println("OUTPUT "+ eval.evaluate(expr, String.class)); assertEquals("hdfs://p1/p2/2009/08/09/23/59/", CoordELFunctions .evalAndWrap(eval, expr)); expr = "hdfs://p1/p2/${YEAR}/${MONTH}/${DAY}/${MINUTE}/"; assertEquals("hdfs://p1/p2/2009/08/09/59/", CoordELFunctions .evalAndWrap(eval, expr)); } public void testCreateDataEvaluator() throws Exception { String jobXml = "<coordinator-app name=\"mycoordinator-app\" start=\"2009-02-01T01:00GMT\" end=\"2009-02-03T23:59GMT\" timezone=\"UTC\""; jobXml += " frequency=\"720\" freq_timeunit=\"MINUTE\""; jobXml += " action-nominal-time='2009-09-01T00:00Z' action-actual-time='2010-10-01T00:00Z'>"; jobXml += "<input-events><data-in name=\"A\" dataset=\"a\"><uris>file:///tmp/coord/US/2009/1/30|file:///tmp/coord/US/2009/1/31</uris>"; jobXml += "<dataset name=\"a\" frequency=\"1440\" initial-instance=\"2009-01-01T00:00Z\">"; jobXml += "<uri-template>file:///tmp/coord/US/${YEAR}/${MONTH}/${DAY}</uri-template></dataset></data-in></input-events>"; jobXml += "<action><workflow><url>http://foobar.com:8080/oozie</url><app-path>hdfs://foobarfoobar.com:9000/usr/tucu/mywf</app-path>"; jobXml += "<configuration><property><name>inputA</name><value>${coord:dataIn('A')}</value></property>"; jobXml += "<property><name>ACTIONID</name><value>${coord:actionId()}</value></property>"; jobXml += "<property><name>NAME</name><value>${coord:name()}</value></property>"; jobXml += "<property><name>NOMINALTIME</name><value>${coord:nominalTime()}</value></property>"; jobXml += "<property><name>ACTUALTIME</name><value>${coord:actualTime()}</value></property>"; jobXml += "</configuration></workflow></action></coordinator-app>"; String reply = "<action><workflow><url>http://foobar.com:8080/oozie</url><app-path>hdfs://foobarfoobar.com:9000/usr/tucu/mywf</app-path>"; reply += "<configuration><property><name>inputA</name><value>file:///tmp/coord/US/2009/1/30|file:///tmp/coord/US/2009/1/31</value></property>"; reply += "<property><name>ACTIONID</name><value>00000-oozie-C@1</value></property>"; reply += "<property><name>NAME</name><value>mycoordinator-app</value></property>"; reply += "<property><name>NOMINALTIME</name><value>2009-09-01T00:00Z</value></property>"; reply += "<property><name>ACTUALTIME</name><value>2010-10-01T00:00Z</value></property>"; reply += "</configuration></workflow></action>"; Element eJob = XmlUtils.parseXml(jobXml); Configuration conf = new XConfiguration(new StringReader(getConfString())); ELEvaluator eval = CoordELEvaluator.createDataEvaluator(eJob, conf, "00000-oozie-C@1"); Element action = eJob.getChild("action", eJob.getNamespace()); String str = XmlUtils.prettyPrint(action).toString(); assertEquals(XmlUtils.prettyPrint(XmlUtils.parseXml(reply)).toString(), CoordELFunctions.evalAndWrap(eval, str)); } public void testCreateInstancesELEvaluator() throws Exception { String dataEvntXML = "<data-in name=\"A\" dataset=\"a\"><uris>file:///tmp/coord/US/2009/1/30|file:///tmp/coord/US/2009/1/31</uris>"; dataEvntXML += "<dataset name=\"a\" frequency=\"1440\" initial-instance=\"2009-01-01T00:00Z\" freq_timeunit=\"MINUTE\" timezone=\"UTC\" end_of_duration=\"NONE\">"; dataEvntXML += "<uri-template>file:///tmp/coord/US/${YEAR}/${MONTH}/${DAY}</uri-template></dataset></data-in>"; Element event = XmlUtils.parseXml(dataEvntXML); SyncCoordAction appInst = new SyncCoordAction(); appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-09-08T01:00Z")); appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z")); appInst.setTimeUnit(TimeUnit.MINUTE); // Configuration conf = new // XConfiguration(IOUtils.getResourceAsReader("org/apache/oozie/coord/conf.xml", // -1)); Configuration conf = new XConfiguration(new StringReader( getConfString())); ELEvaluator eval = CoordELEvaluator.createInstancesELEvaluator(event, appInst, conf); String expr = "${coord:current(0)}"; // System.out.println("OUTPUT :" + eval.evaluate(expr, String.class)); assertEquals("2009-09-08T00:00Z", eval.evaluate(expr, String.class)); } public void testCreateLazyEvaluator() throws Exception { // Configuration conf = new // XConfiguration(IOUtils.getResourceAsReader("org/apache/oozie/coord/conf.xml", // -1)); String testCaseDir = getTestCaseDir(); Configuration conf = new XConfiguration(new StringReader(getConfString())); Date actualTime = DateUtils.parseDateOozieTZ("2009-09-01T01:00Z"); Date nominalTime = DateUtils.parseDateOozieTZ("2009-09-01T00:00Z"); String dataEvntXML = "<data-in name=\"A\" dataset=\"a\"><uris>file:///"+testCaseDir+"/US/2009/1/30|file:///tmp/coord/US/2009/1/31</uris>"; dataEvntXML += "<dataset name=\"a\" frequency=\"1440\" initial-instance=\"2009-01-01T00:00Z\" freq_timeunit=\"MINUTE\" timezone=\"UTC\" end_of_duration=\"NONE\">"; dataEvntXML += "<uri-template>file:///"+testCaseDir+"/${YEAR}/${MONTH}/${DAY}</uri-template></dataset></data-in>"; Element dEvent = XmlUtils.parseXml(dataEvntXML); ELEvaluator eval = CoordELEvaluator.createLazyEvaluator(actualTime, nominalTime, dEvent, conf); createDir(testCaseDir+"/2009/01/02"); String expr = "${coord:latest(0)} ${coord:latest(-1)}"; // Dependent on the directory structure // TODO: Create the directory assertEquals("2009-01-02T00:00Z ${coord:latest(-1)}", eval.evaluate(expr, String.class)); // future createDir(testCaseDir+"/2009/09/04"); createDir(testCaseDir+"/2009/09/05"); expr = "${coord:future(1, 30)}"; assertEquals("2009-09-05T00:00Z", eval.evaluate(expr, String.class)); // System.out.println("OUTPUT :" + eval.evaluate(expr, String.class)); } public void testCleanup() throws Exception { Services.get().destroy(); } private void createDir(String dir) { Process pr; try { pr = Runtime.getRuntime().exec("mkdir -p " + dir + "/_SUCCESS"); pr.waitFor(); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } private String getConfString() { StringBuilder conf = new StringBuilder(); conf.append("<configuration> <property><name>baseFsURI</name> <value>file:///tmp/coord/</value> </property>"); conf.append("<property><name>language</name> <value>en</value> </property>"); conf .append("<property> <name>country</name> <value>US</value> </property> " + "<property> <name>market</name> <value>teens</value> </property> " + "<property> <name>app_path</name> <value>file:///tmp/coord/workflows</value> </property> " + "<property> <name>start</name> <value>2009-02-01T01:00Z</value> </property>" + "<property> <name>end</name> <value>2009-02-03T23:59Z</value> </property> " + "<property> <name>timezone</name> <value>UTC</value> </property> " + "<property> <name>user.name</name> <value>test_user</value> </property> " + "<property> <name>timeout</name> <value>180</value> </property> " + "<property> <name>concurrency_level</name> <value>1</value> </property> " + "<property> <name>execution_order</name> <value>LIFO</value> </property>" + "<property> <name>include_ds_files</name> <value>file:///homes/" + getTestUser() + "/workspace/oozie-main/core/src/main/java/org/apache/oozie/coord/datasets.xml</value>" + " </property></configuration>"); return conf.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.DataSerializer; import org.apache.geode.SystemFailure; import org.apache.geode.cache.Cache; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.HighPriorityDistributionMessage; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.MessageWithReply; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.ReplySender; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.cache.partitioned.PartitionMessage; import org.apache.geode.internal.logging.LogService; /** * Message to all the peers to ask which member hosts the transaction for the given transaction id */ public class FindRemoteTXMessage extends HighPriorityDistributionMessage implements MessageWithReply { private static final Logger logger = LogService.getLogger(); private TXId txId; private int processorId; public FindRemoteTXMessage() { // do nothing } public FindRemoteTXMessage(TXId txid, int processorId, Set recipients) { super(); setRecipients(recipients); this.txId = txid; this.processorId = processorId; } /** * Asks all the peers if they host a transaction for the given txId * * @param txId the transaction id * @return reply processor containing memberId of the member that hosts the transaction and a * recently committed transactionMessage if any */ public static FindRemoteTXMessageReplyProcessor send(Cache cache, TXId txId) { final InternalDistributedSystem system = (InternalDistributedSystem) cache.getDistributedSystem(); DistributionManager dm = system.getDistributionManager(); Set recipients = dm.getOtherDistributionManagerIds(); FindRemoteTXMessageReplyProcessor processor = new FindRemoteTXMessageReplyProcessor(dm, recipients, txId); FindRemoteTXMessage msg = new FindRemoteTXMessage(txId, processor.getProcessorId(), recipients); dm.putOutgoing(msg); return processor; } @Override public int getDSFID() { return FIND_REMOTE_TX_MESSAGE; } @Override protected void process(ClusterDistributionManager dm) { boolean sendReply = true; Throwable thr = null; try { if (logger.isDebugEnabled()) { logger.debug("processing {}", this); } FindRemoteTXMessageReply reply = new FindRemoteTXMessageReply(); InternalCache cache = dm.getCache(); if (cache != null) { TXManagerImpl mgr = (TXManagerImpl) cache.getCacheTransactionManager(); mgr.waitForCompletingTransaction(txId); // in case there is a lost commit going on reply.isHostingTx = mgr.isHostedTxInProgress(txId) || mgr.isHostedTxRecentlyCompleted(txId); if (!reply.isHostingTx) { // lookup in CMTTracker if a partial commit message exists TXCommitMessage partialMessage = TXCommitMessage.getTracker().getTXCommitMessage(txId); if (partialMessage != null) { reply.txCommitMessage = partialMessage; reply.isPartialCommitMessage = true; } // cleanup the local txStateProxy fixes bug 43069 mgr.removeHostedTXState(txId); } } reply.setRecipient(getSender()); reply.setProcessorId(processorId); getReplySender(dm).putOutgoing(reply); sendReply = false; if (logger.isDebugEnabled()) { logger.debug("TX: FoundRemoteTXMessage: isHostingTx for txid:{}? {} isPartialCommit? {}", txId, reply.isHostingTx, reply.isPartialCommitMessage); } } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); if (sendReply) { thr = t; } } finally { ReplySender rs = getReplySender(dm); if (sendReply && (this.processorId != 0 || (rs != dm))) { ReplyException rex = null; if (thr != null) { rex = new ReplyException(thr); } ReplyMessage.send(getSender(), this.processorId, rex, getReplySender(dm)); } } } @Override public String toString() { StringBuilder buff = new StringBuilder(); String className = getClass().getName(); buff.append(className.substring( className.indexOf(PartitionMessage.PN_TOKEN) + PartitionMessage.PN_TOKEN.length())); // partition.<foo> buff.append("(txId=").append(this.txId).append("; sender=").append(getSender()) .append("; processorId=").append(this.processorId); buff.append(")"); return buff.toString(); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); DataSerializer.writeObject(this.txId, out); out.writeInt(this.processorId); } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.txId = DataSerializer.readObject(in); this.processorId = in.readInt(); } public static class FindRemoteTXMessageReplyProcessor extends ReplyProcessor21 { private InternalDistributedMember hostingMember; private TXCommitMessage txCommit; private TXId txId; private Set<TXCommitMessage> partialCommitMessages = new HashSet<TXCommitMessage>(); public FindRemoteTXMessageReplyProcessor(DistributionManager dm, Collection initMembers, TXId txId) { super(dm, initMembers); this.txId = txId; } @Override public void process(DistributionMessage msg) { if (msg instanceof FindRemoteTXMessageReply) { FindRemoteTXMessageReply reply = (FindRemoteTXMessageReply) msg; if (reply.isHostingTx) { this.hostingMember = msg.getSender(); } else if (reply.isPartialCommitMessage) { this.partialCommitMessages.add(reply.txCommitMessage); } } super.process(msg); } /** * @return the member that is hosting the tx */ public InternalDistributedMember getHostingMember() { return hostingMember; } @Override public boolean stillWaiting() { return this.hostingMember == null && super.stillWaiting(); } /** * @return if hosting member is null, the rebuilt TXCommitMessage from partial TXCommitMessages * distributed to peers during commit processing */ public TXCommitMessage getTxCommitMessage() { if (this.txCommit != null) { return this.txCommit; } if (!this.partialCommitMessages.isEmpty()) { TXCommitMessage localTXMessage = TXCommitMessage.getTracker().getTXCommitMessage(txId); if (localTXMessage != null) { partialCommitMessages.add(localTXMessage); } txCommit = TXCommitMessage.combine(partialCommitMessages); } return txCommit; } } @Override public boolean sendViaUDP() { return true; } /** * Reply message for {@link FindRemoteTXMessage}. Reply is a boolean to indicate if the recipient * hosts or has recently hosted the tx state. If the member did host the txState previously, reply * contains the complete TXCommitMessage representing the tx. */ public static class FindRemoteTXMessageReply extends ReplyMessage { protected boolean isHostingTx; protected boolean isPartialCommitMessage; protected TXCommitMessage txCommitMessage; public FindRemoteTXMessageReply() {} @Override public int getDSFID() { return FIND_REMOTE_TX_REPLY; } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeBoolean(this.isHostingTx); boolean sendTXCommitMessage = this.txCommitMessage != null; out.writeBoolean(sendTXCommitMessage); if (sendTXCommitMessage) { out.writeBoolean(this.isPartialCommitMessage); // since this message is going to a peer, reset client version txCommitMessage.setClientVersion(null); // fixes bug 46529 InternalDataSerializer.writeDSFID(txCommitMessage, out); } } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.isHostingTx = in.readBoolean(); if (in.readBoolean()) { this.isPartialCommitMessage = in.readBoolean(); txCommitMessage = (TXCommitMessage) InternalDataSerializer.readDSFID(in); } } } }
/* * ARX: Powerful Data Anonymization * Copyright 2012 - 2018 Fabian Prasser and contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deidentifier.arx.gui.view.impl.utility; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.deidentifier.arx.ARXClassificationConfiguration; import org.deidentifier.arx.ARXFeatureScaling; import org.deidentifier.arx.aggregates.StatisticsBuilderInterruptible; import org.deidentifier.arx.aggregates.StatisticsClassification; import org.deidentifier.arx.aggregates.StatisticsClassification.ROCCurve; import org.deidentifier.arx.gui.Controller; import org.deidentifier.arx.gui.model.ModelEvent; import org.deidentifier.arx.gui.model.ModelEvent.ModelPart; import org.deidentifier.arx.gui.resources.Resources; import org.deidentifier.arx.gui.view.SWTUtil; import org.deidentifier.arx.gui.view.impl.common.ClipboardHandlerTable; import org.deidentifier.arx.gui.view.impl.common.ComponentStatusLabelProgressProvider; import org.deidentifier.arx.gui.view.impl.common.ComponentTitledFolder; import org.deidentifier.arx.gui.view.impl.common.ComponentTitledFolderButtonBar; import org.deidentifier.arx.gui.view.impl.common.async.Analysis; import org.deidentifier.arx.gui.view.impl.common.async.AnalysisContext; import org.deidentifier.arx.gui.view.impl.common.async.AnalysisManager; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.ControlAdapter; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.TableItem; import org.swtchart.Chart; import org.swtchart.IAxis; import org.swtchart.IAxisSet; import org.swtchart.ILineSeries; import org.swtchart.ILineSeries.PlotSymbolType; import org.swtchart.ISeries; import org.swtchart.ISeries.SeriesType; import org.swtchart.ISeriesSet; import org.swtchart.ITitle; import org.swtchart.Range; import de.linearbits.swt.table.DynamicTable; import de.linearbits.swt.table.DynamicTableColumn; /** * This view displays a statistics about the performance of logistic regression classifiers * * @author Fabian Prasser * @author Johanna Eicher */ public abstract class ViewStatisticsClassification extends ViewStatistics<AnalysisContextClassification> { /** Minimal width of a category label. */ private static final int MIN_CATEGORY_WIDTH = 10; /** Internal stuff. */ private AnalysisManager manager; /** View */ private ViewStatisticsClassification other; /** View */ private ComponentTitledFolder folder; /** View */ private DynamicTable performanceTableOverview; /** View */ private DynamicTable performanceTableSensitivitySpecificity; /** View */ private Composite performanceRoot; /** View */ private SashForm performanceSash; /** View */ private Composite root; /** View */ private DynamicTable rocTable; /** View */ private Composite rocRoot; /** View */ private SashForm rocSash; /** View */ private Chart rocChart; /** Widget */ private Combo rocCombo; /** Model */ private boolean isOutput; /** Model */ private boolean[] columnInOverviewIsBarchart; /** Model */ private Map<String, Map<String, ROCCurve>> rocCurves; /** Model */ private Map<String, Map<String, ROCCurve>> originalRocCurves; /** Model */ private Map<String, Map<String, ROCCurve>> zerorRocCurves; /** * Creates a new instance. * * @param parent * @param controller * @param part */ public ViewStatisticsClassification(final Composite parent, final Controller controller, final ModelPart part) { super(parent, controller, part, null, false); this.manager = new AnalysisManager(parent.getDisplay()); this.isOutput = part != ModelPart.INPUT; this.rocCurves = new HashMap<>(); this.originalRocCurves = new HashMap<>(); this.zerorRocCurves = new HashMap<>(); controller.addListener(ModelPart.CLASSIFICATION_CONFIGURATION, this); controller.addListener(ModelPart.DATA_TYPE, this); controller.addListener(ModelPart.SELECTED_ATTRIBUTE, this); controller.addListener(ModelPart.SELECTED_CLASS_VALUE, this); } @Override public LayoutUtility.ViewUtilityType getType() { return LayoutUtility.ViewUtilityType.CLASSIFICATION_PRECISION_RECALL; } @Override public void update(ModelEvent event) { super.update(event); if (event.part == ModelPart.CLASSIFICATION_CONFIGURATION || event.part == ModelPart.DATA_TYPE) { if (getModel() != null && (getModel().getSelectedFeatures().isEmpty() || getModel().getSelectedClasses().isEmpty())) { doReset(); return; } else { triggerUpdate(); } } if (event.part == ModelPart.SELECTED_ATTRIBUTE) { updateSelectedTarget(getModel().getSelectedAttribute()); } if (event.part == ModelPart.SELECTED_CLASS_VALUE) { updateSelectedClassValue(super.getModel().getSelectedClassValue()); } } /** * Builds overall performance view * @param parent * @return */ private Control createOverviewControl(Composite parent) { // Root this.performanceRoot = parent; this.performanceRoot.setLayout(new FillLayout()); // Sash this.performanceSash = new SashForm(this.performanceRoot, SWT.VERTICAL); // Table: performance per target this.performanceTableOverview = SWTUtil.createTableDynamic(this.performanceSash, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | SWT.FULL_SELECTION); this.performanceTableOverview.setHeaderVisible(true); this.performanceTableOverview.setLinesVisible(true); this.performanceTableOverview.setMenu(new ClipboardHandlerTable(performanceTableOverview).getMenu()); // Columns String[] columns = getColumnHeadersForPerformanceForOverallPerformanceTable(); String width = String.valueOf(Math.round(100d / ((double) columns.length + 2) * 100d) / 100d) + "%"; //$NON-NLS-1$ this.columnInOverviewIsBarchart = getColumnTypesForPerformanceForOverallPerformanceTable(); // Column for target DynamicTableColumn c = new DynamicTableColumn(performanceTableOverview, SWT.LEFT); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(Resources.getMessage("ViewStatisticsClassificationInput.0")); //$NON-NLS-1$ for (int i = 0; i < columns.length; i++) { String column = columns[i]; c = new DynamicTableColumn(performanceTableOverview, SWT.LEFT); if (columnInOverviewIsBarchart[i]) { SWTUtil.createColumnWithBarCharts(performanceTableOverview, c); } c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(column); } for (final TableColumn col : performanceTableOverview.getColumns()) { col.pack(); } SWTUtil.createGenericTooltip(performanceTableOverview); // Update table performanceTableOverview.addListener(SWT.MouseDown, new Listener() { public void handleEvent(Event event) { Rectangle clientArea = performanceTableOverview.getClientArea(); Point pt = new Point(event.x, event.y); int index = performanceTableOverview.getTopIndex(); while (index < performanceTableOverview.getItemCount()) { boolean visible = false; TableItem item = performanceTableOverview.getItem(index); for (int i = 0; i < performanceTableOverview.getColumnCount(); i++) { Rectangle rect = item.getBounds(i); if (rect.contains(pt)) { String attribute = item.getText(0); getModel().setSelectedAttribute(attribute); updateSelectedTarget(attribute); getController().update(new ModelEvent(ViewStatisticsClassification.this, ModelPart.SELECTED_ATTRIBUTE, attribute)); return; } if (!visible && rect.intersects(clientArea)) { visible = true; } } if (!visible) return; index++; } } }); // Table: performance for each class of a target this.performanceTableSensitivitySpecificity = SWTUtil.createTableDynamic(this.performanceSash, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | SWT.FULL_SELECTION); this.performanceTableSensitivitySpecificity.setHeaderVisible(true); this.performanceTableSensitivitySpecificity.setLinesVisible(true); this.performanceTableSensitivitySpecificity.setMenu(new ClipboardHandlerTable(performanceTableSensitivitySpecificity).getMenu()); width = String.valueOf(Math.round(100d / ((double) 4) * 100d) / 100d) + "%"; //$NON-NLS-1$ // Column for class c = new DynamicTableColumn(performanceTableSensitivitySpecificity, SWT.LEFT); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(Resources.getMessage("ViewStatisticsClassificationInput.22")); //$NON-NLS-1$ // Column for sensitivity c = new DynamicTableColumn(performanceTableSensitivitySpecificity, SWT.LEFT); SWTUtil.createColumnWithBarCharts(performanceTableSensitivitySpecificity, c); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(Resources.getMessage("ViewStatisticsClassificationInput.11")); //$NON-NLS-1$ // Column for specificity c = new DynamicTableColumn(performanceTableSensitivitySpecificity, SWT.LEFT); SWTUtil.createColumnWithBarCharts(performanceTableSensitivitySpecificity, c); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(Resources.getMessage("ViewStatisticsClassificationInput.10")); //$NON-NLS-1$ // Column for brier score c = new DynamicTableColumn(performanceTableSensitivitySpecificity, SWT.LEFT); SWTUtil.createColumnWithBarCharts(performanceTableSensitivitySpecificity, c); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(Resources.getMessage("ViewStatisticsClassificationInput.8")); //$NON-NLS-1$ for (final TableColumn col : performanceTableSensitivitySpecificity.getColumns()) { col.pack(); } SWTUtil.createGenericTooltip(performanceTableSensitivitySpecificity); this.performanceSash.setWeights(new int[] {2, 2}); return this.performanceRoot; } /** * Creates control for ROC curves * @param parent * @return */ private Control createROCControl(Composite parent) { // Root this.rocRoot = parent; this.rocRoot.setLayout(new FillLayout()); // Sash this.rocSash = new SashForm(this.rocRoot, SWT.VERTICAL); final Composite composite = new Composite(this.rocSash, SWT.NONE); composite.setLayout(SWTUtil.createGridLayout(1)); // Combo for selecting a target variable final Composite composite2 = new Composite(composite, SWT.NONE); composite2.setLayoutData(SWTUtil.createFillHorizontallyGridData()); composite2.setLayout(SWTUtil.createGridLayout(2, false)); final Label lblTargetVariable = new Label(composite2, SWT.PUSH); lblTargetVariable.setText(Resources.getMessage("ViewStatisticsClassificationInput.21")); //$NON-NLS-1$ this.rocCombo = new Combo(composite2, SWT.READ_ONLY); this.rocCombo.setLayoutData(SWTUtil.createFillHorizontallyGridData()); this.rocCombo.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(final SelectionEvent arg0) { if(rocCombo.getSelectionIndex() >=0){ String attribute = rocCombo.getItem(rocCombo.getSelectionIndex()); getModel().setSelectedAttribute(attribute); updateSelectedTarget(attribute); getController().update(new ModelEvent(ViewStatisticsClassification.this, ModelPart.SELECTED_ATTRIBUTE, attribute)); } } }); // Table this.rocTable = SWTUtil.createTableDynamic(composite, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL | SWT.FULL_SELECTION); this.rocTable.setHeaderVisible(true); this.rocTable.setLinesVisible(true); this.rocTable.setMenu(new ClipboardHandlerTable(rocTable).getMenu()); this.rocTable.setLayoutData(SWTUtil.createFillGridData(2)); // Columns String[] columns = getColumnHeadersForAUCTable(); String width = String.valueOf(Math.round(100d / ((double) (getTarget() == ModelPart.OUTPUT ? 4 : 3)) * 100d) / 100d) + "%"; //$NON-NLS-1$ // Column for class DynamicTableColumn c = new DynamicTableColumn(rocTable, SWT.LEFT); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(Resources.getMessage("ViewStatisticsClassificationInput.22")); //$NON-NLS-1$ for (String column : columns) { c = new DynamicTableColumn(rocTable, SWT.LEFT); SWTUtil.createColumnWithBarCharts(rocTable, c); c.setWidth(width, "100px"); //$NON-NLS-1$ c.setText(column); } for (final TableColumn col : performanceTableOverview.getColumns()) { col.pack(); } SWTUtil.createGenericTooltip(performanceTableOverview); // Chart and sash resetChart(); this.rocSash.setWeights(new int[] {2, 2}); // Tool tip final StringBuilder builder = new StringBuilder(); this.rocSash.addListener(SWT.MouseMove, new Listener() { @Override public void handleEvent(Event event) { if (rocChart != null) { IAxisSet axisSet = rocChart.getAxisSet(); if (axisSet != null) { IAxis xAxis = axisSet.getXAxis(0); IAxis yAxis = axisSet.getYAxis(0); if (xAxis != null && yAxis != null) { Point cursor = rocChart.getPlotArea().toControl(Display.getCurrent().getCursorLocation()); if (cursor.x >= 0 && cursor.x < rocChart.getPlotArea().getSize().x && cursor.y >= 0 && cursor.y < rocChart.getPlotArea().getSize().y) { ISeries[] data = rocChart.getSeriesSet().getSeries(); if (data != null && data.length > 0) { // Use baseline for tool tip double[] x = data[0].getXSeries(); double[] y = data[0].getYSeries(); int index = getIndex(x, xAxis.getDataCoordinate(cursor.x)); if (index >= 0) { builder.setLength(0); builder.append("("); //$NON-NLS-1$ builder.append(Resources.getMessage("ViewStatisticsClassificationInput.20")).append(": "); //$NON-NLS-1$ //$NON-NLS-3$ builder.append(SWTUtil.getPrettyString(x[index])).append(", "); //$NON-NLS-1$ builder.append(Resources.getMessage("ViewStatisticsClassificationInput.19")).append(": "); //$NON-NLS-1$ //$NON-NLS-3$ builder.append(SWTUtil.getPrettyString(y[index])); builder.append(")"); //$NON-NLS-1$ rocSash.setToolTipText(builder.toString()); return; } } } } } rocSash.setToolTipText(null); } } }); // Update curve rocTable.addListener(SWT.MouseDown, new Listener() { public void handleEvent(Event event) { Rectangle clientArea = rocTable.getClientArea(); Point pt = new Point(event.x, event.y); int index = rocTable.getTopIndex(); while (index < rocTable.getItemCount()) { boolean visible = false; TableItem item = rocTable.getItem(index); for (int i = 0; i < rocTable.getColumnCount(); i++) { Rectangle rect = item.getBounds(i); if (rect.contains(pt)) { String attribute = item.getText(0); getModel().setSelectedClassValue(attribute); updateSelectedClassValue(attribute); getController().update(new ModelEvent(ViewStatisticsClassification.this, ModelPart.SELECTED_CLASS_VALUE, attribute)); return; } if (!visible && rect.intersects(clientArea)) { visible = true; } } if (!visible) return; index++; } } }); return this.rocRoot; } /** * Returns the value of the given point * * @param data * @param value * @return */ private int getIndex(double[] data, double value){ int index = Arrays.binarySearch(data, value); if (index < 0) { index = -index + 1; } if (index > data.length - 1) { index = data.length - 1; } return index; } /** * Returns the index of the given value, 0 if it is not found * @param values * @param value * @return */ private int getIndexOf(String[] values, String value) { int index = 0; for (String element : values) { if (element.equals(value)) { return index; } index++; } return 0; } /** * Resets the chart */ private void resetChart() { if (rocChart != null) { rocChart.dispose(); } rocChart = new Chart(this.rocSash, SWT.NONE); rocChart.setOrientation(SWT.HORIZONTAL); // Show/Hide axis rocChart.addControlListener(new ControlAdapter(){ @Override public void controlResized(ControlEvent arg0) { updateCategories(rocChart); } }); // Update font FontData[] fd = rocChart.getFont().getFontData(); fd[0].setHeight(8); final Font font = new Font(rocChart.getDisplay(), fd[0]); rocChart.setFont(font); rocChart.addDisposeListener(new DisposeListener(){ public void widgetDisposed(DisposeEvent arg0) { if (font != null && !font.isDisposed()) { font.dispose(); } } }); // Update title ITitle graphTitle = rocChart.getTitle(); graphTitle.setText(""); //$NON-NLS-1$ graphTitle.setFont(rocChart.getFont()); // Set colors rocChart.setBackground(rocRoot.getBackground()); rocChart.setForeground(rocRoot.getForeground()); // OSX workaround if (System.getProperty("os.name").toLowerCase().contains("mac")){ //$NON-NLS-1$ //$NON-NLS-2$ int r = rocChart.getBackground().getRed()-13; int g = rocChart.getBackground().getGreen()-13; int b = rocChart.getBackground().getBlue()-13; r = r>0 ? r : 0; r = g>0 ? g : 0; r = b>0 ? b : 0; final Color background = new Color(rocChart.getDisplay(), r, g, b); rocChart.setBackground(background); rocChart.addDisposeListener(new DisposeListener(){ public void widgetDisposed(DisposeEvent arg0) { if (background != null && !background.isDisposed()) { background.dispose(); } } }); } // Initialize axes IAxisSet axisSet = rocChart.getAxisSet(); IAxis yAxis = axisSet.getYAxis(0); IAxis xAxis = axisSet.getXAxis(0); ITitle xAxisTitle = xAxis.getTitle(); xAxisTitle.setText(""); //$NON-NLS-1$ xAxis.getTitle().setFont(rocChart.getFont()); yAxis.getTitle().setFont(rocChart.getFont()); xAxis.getTick().setFont(rocChart.getFont()); yAxis.getTick().setFont(rocChart.getFont()); xAxis.getTick().setForeground(rocChart.getForeground()); yAxis.getTick().setForeground(rocChart.getForeground()); xAxis.getTitle().setForeground(rocChart.getForeground()); yAxis.getTitle().setForeground(rocChart.getForeground()); // Initialize axes ITitle yAxisTitle = yAxis.getTitle(); yAxisTitle.setText(Resources.getMessage("ViewStatisticsClassificationInput.19")); //$NON-NLS-1$ xAxisTitle.setText(Resources.getMessage("ViewStatisticsClassificationInput.20")); //$NON-NLS-1$ rocChart.setEnabled(false); updateCategories(rocChart); } /** * Makes the chart show category labels or not. */ private void updateCategories(Chart chart){ if (chart != null){ IAxisSet axisSet = chart.getAxisSet(); if (axisSet != null) { IAxis xAxis = axisSet.getXAxis(0); if (xAxis != null) { String[] series = xAxis.getCategorySeries(); if (series != null) { boolean enoughSpace = chart.getPlotArea().getSize().x / series.length >= MIN_CATEGORY_WIDTH; xAxis.enableCategory(enoughSpace); xAxis.getTick().setVisible(enoughSpace); } } } } } /** * Updates the chart with a new ROC Curve * * @param data */ private void updateChartSeries(ROCCurve[] data) { ROCCurve baseline = data[0]; ROCCurve output = data[1]; ROCCurve original = data[2]; // Init data rocChart.setRedraw(false); ISeriesSet seriesSet = rocChart.getSeriesSet(); ISeries[] seriesArray = seriesSet.getSeries(); // Clear set for (ISeries s : seriesArray) { rocChart.getSeriesSet().deleteSeries(s.getId()); } // Baseline (ZeroR) ILineSeries seriesZeror = (ILineSeries) seriesSet.createSeries(SeriesType.LINE, Resources.getMessage("ViewStatisticsClassificationInput.12")); // $NON-NLS-1$ seriesZeror.getLabel().setVisible(false); seriesZeror.getLabel().setFont(rocChart.getFont()); seriesZeror.setLineColor(Display.getDefault().getSystemColor(SWT.COLOR_BLUE)); seriesZeror.setXSeries(baseline.getFalsePositiveRate()); seriesZeror.setYSeries(baseline.getTruePositiveRate()); seriesZeror.setAntialias(SWT.ON); seriesZeror.setSymbolType(PlotSymbolType.NONE); seriesZeror.enableArea(false); // Output if (output != null) { ILineSeries seriesOutput = (ILineSeries) seriesSet.createSeries(SeriesType.LINE, Resources.getMessage("ViewStatisticsClassificationInput.26")); // $NON-NLS-1$ seriesOutput.getLabel().setVisible(false); seriesOutput.getLabel().setFont(rocChart.getFont()); seriesOutput.setLineColor(Display.getDefault().getSystemColor(SWT.COLOR_BLACK)); seriesOutput.setXSeries(output.getFalsePositiveRate()); seriesOutput.setYSeries(output.getTruePositiveRate()); seriesOutput.setAntialias(SWT.ON); seriesOutput.setSymbolType(PlotSymbolType.NONE); seriesOutput.enableArea(false); } // Original ILineSeries series = (ILineSeries) seriesSet.createSeries(SeriesType.LINE, isOutput ? Resources.getMessage("ViewStatisticsClassificationInput.25") : Resources.getMessage("ViewStatisticsClassificationInput.26")); // $NON-NLS-1$ series.getLabel().setVisible(false); series.getLabel().setFont(rocChart.getFont()); series.setLineColor(Display.getDefault().getSystemColor(SWT.COLOR_RED)); series.setXSeries(original.getFalsePositiveRate()); series.setYSeries(original.getTruePositiveRate()); series.setAntialias(SWT.ON); series.setSymbolType(PlotSymbolType.NONE); series.enableArea(false); rocChart.getLegend().setVisible(true); rocChart.getLegend().setPosition(SWT.TOP); IAxisSet axisSet = rocChart.getAxisSet(); IAxis yAxis = axisSet.getYAxis(0); yAxis.setRange(new Range(0d, 1d)); IAxis xAxis = axisSet.getXAxis(0); xAxis.setRange(new Range(0d, 1d)); xAxis.adjustRange(); rocChart.setRedraw(true); rocChart.updateLayout(); rocChart.update(); rocChart.redraw(); } /** * Updates the selected class value * @param classValue */ private void updateSelectedClassValue(String classValue) { // Check if (rocCombo.getItemCount() == 0 || rocTable.getItemCount() == 0) { return; } // Redraw root.setRedraw(false); // Find and update int index = 0; ROCCurve[] curve = null; for (TableItem item : rocTable.getItems()) { // Found if (item.getText(0).equals(classValue)) { // Select rocTable.select(index); curve = (ROCCurve[])item.getData(); break; } // Next index index++; } // If found if (curve != null) { updateChartSeries(curve); } // Redraw root.setRedraw(true); } /** * Updates the view when a new target variable has been set. * Selects the first class value available for this target. * @param targetAttribute */ private void updateSelectedTarget(String targetAttribute) { // Check if (performanceTableOverview.getItemCount() == 0) { return; } // Redraw this.root.setRedraw(false); // ------------------------------------------------------ // Update selection in performance overview // ------------------------------------------------------ int index = 0; boolean selected = false; for (TableItem item : performanceTableOverview.getItems()) { if (item.getText(0).equals(targetAttribute)) { performanceTableOverview.select(index); selected = true; break; } index++; } // Break if not found if (!selected) { performanceTableOverview.select(0); targetAttribute = performanceTableOverview.getItem(0).getText(0); } // ------------------------------------------------------ // Clear entries in performance details // ------------------------------------------------------ performanceTableSensitivitySpecificity.setRedraw(false); for (final TableItem i : performanceTableSensitivitySpecificity.getItems()) { i.dispose(); } performanceTableSensitivitySpecificity.setRedraw(true); // Check if(originalRocCurves.containsKey(targetAttribute)) { // ------------------------------------------------------ // Update entries in performance details // ------------------------------------------------------ // Create entries List<String> values = new ArrayList<>(originalRocCurves.get(targetAttribute).keySet()); Collections.sort(values); // Prepare List<Double> sensitivities = new ArrayList<Double>(); List<Double> specificities = new ArrayList<Double>(); List<Double> brierscores = new ArrayList<Double>(); // For each class for (String clazz : values) { ROCCurve c; if (isOutput) { c = rocCurves.get(targetAttribute).get(clazz); } else { c = originalRocCurves.get(targetAttribute).get(clazz); } // Create entry TableItem item = new TableItem(performanceTableSensitivitySpecificity, SWT.NONE); item.setText(0, clazz); item.setData("1", c.getSensitivity()); item.setData("2", c.getSpecificity()); item.setData("3", c.getBrierScore()); // Collect measurements sensitivities.add(c.getSensitivity()); specificities.add(c.getSpecificity()); brierscores.add(c.getBrierScore()); } // Prepare double[] min = new double[3]; double[] avg = new double[3]; double[] max = new double[3]; // Determine aggregates for (int i = 0; i < sensitivities.size(); i++) { double sensitivity = sensitivities.get(i); min[0] = min[0]==0d ? sensitivity : Math.min(min[0], sensitivity); max[0] = Math.max(max[0], sensitivity); avg[0] += sensitivity; double specificity = specificities.get(i); min[1] = min[1]==0d ? specificity : Math.min(min[1], specificity); max[1] = Math.max(max[1], specificity); avg[1] += specificity; double brierscore = brierscores.get(i); min[2] = min[2]==0d ? brierscore: Math.min(min[2], brierscore); max[2] = Math.max(max[2], brierscore); avg[2] += brierscore; } // Minimum TableItem item = new TableItem(performanceTableSensitivitySpecificity, SWT.NONE); item.setText(0, Resources.getMessage("ViewStatisticsClassificationInput.7")); item.setData("1", min[0]); item.setData("2", min[1]); item.setData("3", min[2]); // Average item = new TableItem(performanceTableSensitivitySpecificity, SWT.NONE); item.setText(0, Resources.getMessage("ViewStatisticsClassificationInput.6")); item.setData("1", avg[0] / values.size()); item.setData("2", avg[1] / values.size()); item.setData("3", avg[2] / values.size()); // Maximum item = new TableItem(performanceTableSensitivitySpecificity, SWT.NONE); item.setText(0, Resources.getMessage("ViewStatisticsClassificationInput.4")); item.setData("1", max[0]); item.setData("2", max[1]); item.setData("3", max[2]); // Check if (rocCombo.getItemCount() != 0) { // ------------------------------------------------------ // Update ROC combo // ------------------------------------------------------ // Determine indices String[] targetVariables = getModel().getSelectedClassesAsArray(); int targetIndex = getIndexOf(targetVariables, targetAttribute); // Update combo rocCombo.select(targetIndex); // ------------------------------------------------------ // Clear ROC table // ------------------------------------------------------ for (final TableItem i : rocTable.getItems()) { i.dispose(); } // ------------------------------------------------------ // Update ROC table // ------------------------------------------------------ // For each class List<String> classes = new ArrayList<>(originalRocCurves.get(targetAttribute).keySet()); Collections.sort(classes); for(String value : classes){ // Class item = new TableItem(rocTable, SWT.NONE); item.setText(0, value); ROCCurve[] data = {null, null, null}; item.setData(data); // Baseline AUC ROCCurve rocZeror = zerorRocCurves.get(targetAttribute).get(value); item.setData("1", rocZeror.getAUC()); data[0] = rocZeror; // Original AUC ROCCurve rocOriginal = originalRocCurves.get(targetAttribute).get(value); item.setData(isOutput ? "3" : "2", rocOriginal.getAUC()); data[2] = rocOriginal; // Output if (isOutput) { // AUC (anonymized) ROCCurve rocOutput = rocCurves.get(targetAttribute).get(value); item.setData("2", rocOutput.getAUC()); data[1] = rocOutput; // Relative AUC double relativeAUC; if(rocOriginal.getAUC() - rocZeror.getAUC() == 0d) { relativeAUC = rocOutput.getAUC() / rocZeror.getAUC(); } else { relativeAUC = (rocOutput.getAUC() - rocZeror.getAUC()) / (rocOriginal.getAUC() - rocZeror.getAUC()); } relativeAUC = Double.isNaN(relativeAUC) ? 0d : relativeAUC; item.setData("4", relativeAUC); } } // Select first class if (classes.contains(getModel().getSelectedClassValue())) { updateSelectedClassValue(getModel().getSelectedClassValue()); } else if (!classes.isEmpty()) { updateSelectedClassValue(classes.get(0)); } } } // Redraw this.root.setRedraw(true); } @Override protected Control createControl(Composite parent) { // Create top composite this.root = new Composite(parent, SWT.NONE); root.setLayout(new FillLayout()); ComponentTitledFolderButtonBar bar = new ComponentTitledFolderButtonBar(); // Add update button bar.add(Resources.getMessage("ViewStatisticsClassificationInput.29"), getController().getResources().getManagedImage("arrow_refresh.png"), new Runnable(){ //$NON-NLS-1$ //$NON-NLS-2$ public void run() { triggerUpdate(); } }); this.folder = new ComponentTitledFolder(root, null, bar, null, true, false); // Performance overview Composite item1 = folder.createItem(Resources.getMessage("ViewStatisticsClassificationInput.27"), //$NON-NLS-1$ getController().getResources().getManagedImage("precision_recall.png")); //$NON-NLS-1$ item1.setLayoutData(SWTUtil.createFillGridData()); this.createOverviewControl(item1); // Roc Composite item2 = folder.createItem(Resources.getMessage("ViewStatisticsClassificationInput.28"), //$NON-NLS-1$ getController().getResources().getManagedImage("roc.png")); //$NON-NLS-1$ item2.setLayoutData(SWTUtil.createFillGridData()); this.createROCControl(item2); // Synchronize this.folder.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent arg0) { if (other != null) { other.setSelectionIndex(folder.getSelectionIndex()); } } }); // Init this.folder.setSelection(0); // Return return root; } @Override protected AnalysisContextClassification createViewConfig(AnalysisContext context) { return new AnalysisContextClassification(context); } @Override protected void doReset() { // Manager if (this.manager != null) { this.manager.stop(); } // Performance overview performanceTableOverview.setRedraw(false); for (final TableItem i : performanceTableOverview.getItems()) { i.dispose(); } performanceTableOverview.setRedraw(true); performanceTableSensitivitySpecificity.setRedraw(false); for (final TableItem i : performanceTableSensitivitySpecificity.getItems()) { i.dispose(); } performanceTableSensitivitySpecificity.setRedraw(true); // ROC rocTable.setRedraw(false); for (final TableItem i : rocTable.getItems()) { i.dispose(); } rocTable.setRedraw(true); if (rocCombo != null && rocCombo.getItemCount() != 0) { rocCombo.removeAll(); } if (rocCurves != null) { rocCurves.clear(); } if (originalRocCurves != null) { originalRocCurves.clear(); } if (zerorRocCurves != null) { zerorRocCurves.clear(); } resetChart(); // Reset view setStatusEmpty(); } @Override protected void doUpdate(final AnalysisContextClassification context) { // The statistics builder final StatisticsBuilderInterruptible builder = context.handle.getStatistics().getInterruptibleInstance(); final String[] features = context.model.getSelectedFeaturesAsArray(); final String[] targetVariables = context.model.getSelectedClassesAsArray(); final ARXClassificationConfiguration<?> config = context.model.getClassificationModel().getCurrentConfiguration(); final ARXFeatureScaling scaling = context.model.getClassificationModel().getFeatureScaling(); // Break, if nothing do if (context.model.getSelectedFeatures().isEmpty() || context.model.getSelectedClasses().isEmpty()) { doReset(); return; } // Create an analysis Analysis analysis = new Analysis(){ private boolean stopped = false; private List<List<Double>> values = new ArrayList<>(); private int progress = 0; @Override public int getProgress() { double result = 0d; double perBatch = 100d / (double)targetVariables.length; result += (double)progress * perBatch; result += (double)builder.getProgress() / 100d * perBatch; result = result <= 100d ? result : 100d; return (int)result; } @Override public void onError() { rocCurves.clear(); originalRocCurves.clear(); zerorRocCurves.clear(); setStatusEmpty(); } @Override public void onFinish() { // Check if (stopped || !isEnabled() || getModel().getSelectedFeatures().isEmpty() || getModel().getSelectedClasses().isEmpty()) { setStatusEmpty(); return; } // Redraw root.setRedraw(false); // Update overview table for (final TableItem i : performanceTableOverview.getItems()) { i.dispose(); } for (int i = 0; i < targetVariables.length; i++) { TableItem item = new TableItem(performanceTableOverview, SWT.NONE); item.setText(0, targetVariables[i]); for (int j = 0; j < values.get(i).size(); j++) { if (columnInOverviewIsBarchart[j]) { item.setData(String.valueOf(j + 1), values.get(i).get(j)); } else { item.setText(j + 1, SWTUtil.getPrettyString(values.get(i).get(j))); } } } // Update combo box rocCombo.setItems(targetVariables); // Update complete view updateSelectedTarget(getModel().getSelectedAttribute()); // Layout performanceRoot.layout(); performanceSash.setWeights(new int[] {2, 2}); rocRoot.layout(); rocSash.setWeights(new int[] {2, 2}); // Redraw root.setRedraw(true); // Done setStatusDone(); } @Override public void onInterrupt() { if (!isEnabled() || getModel().getSelectedFeatures().isEmpty() || getModel().getSelectedClasses().isEmpty()) { setStatusEmpty(); } else { setStatusWorking(); } } @Override public void run() throws InterruptedException { // Timestamp long time = System.currentTimeMillis(); // Clear rocCurves.clear(); originalRocCurves.clear(); zerorRocCurves.clear(); // Do work for (String targetVariable : targetVariables) { // Compute StatisticsClassification result = builder.getClassificationPerformance(features, targetVariable, config, scaling); progress++; if (stopped) { break; } // Collect performance data values.add(getColumnValuesForOverallPerformanceTable(result)); // Collect ROC curves if(!originalRocCurves.containsKey(targetVariable)){ originalRocCurves.put(targetVariable, new HashMap<String, ROCCurve>()); zerorRocCurves.put(targetVariable, new HashMap<String, ROCCurve>()); rocCurves.put(targetVariable, new HashMap<String, ROCCurve>()); } for (String c : result.getClassValues()) { originalRocCurves.get(targetVariable).put(c, result.getOriginalROCCurve(c)); zerorRocCurves.get(targetVariable).put(c, result.getZeroRROCCurve(c)); if (result.getROCCurve(c) != null) { rocCurves.get(targetVariable).put(c, result.getROCCurve(c)); } } } // Our users are patient while (System.currentTimeMillis() - time < MINIMAL_WORKING_TIME && !stopped){ Thread.sleep(10); } } @Override public void stop() { builder.interrupt(); this.stopped = true; } }; this.manager.start(analysis); } /** * Returns all column headers for the AUC table * @return */ protected abstract String[] getColumnHeadersForAUCTable(); /** * Returns all column headers for the overall performance table * @return */ protected abstract String[] getColumnHeadersForPerformanceForOverallPerformanceTable(); /** * Returns all column types, true for display as a barchart * @return */ protected abstract boolean[] getColumnTypesForPerformanceForOverallPerformanceTable(); /** * Returns all values for one row of the overall performance table * @param result * @return */ protected abstract List<Double> getColumnValuesForOverallPerformanceTable(StatisticsClassification result); @Override protected ComponentStatusLabelProgressProvider getProgressProvider() { return new ComponentStatusLabelProgressProvider(){ public int getProgress() { if (manager == null) { return 0; } else { return manager.getProgress(); } } }; } /** * Is an analysis running, or are we displaying an empty result */ protected boolean isRunning() { return (manager != null && manager.isRunning()) || this.isEmpty(); } /** * Sets the other view for synchronization * @param other */ protected void setOtherView(ViewStatisticsClassification other) { this.other = other; } /** * Sets the selection index in the underlying folder * @param index */ protected void setSelectionIndex(int index) { this.folder.setSelection(index); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/clouddebugger/v2/data.proto package com.google.devtools.clouddebugger.v2; /** * <pre> * Represents a contextual status message. * The message can indicate an error or informational status, and refer to * specific parts of the containing object. * For example, the `Breakpoint.status` field can indicate an error referring * to the `BREAKPOINT_SOURCE_LOCATION` with the message `Location not found`. * </pre> * * Protobuf type {@code google.devtools.clouddebugger.v2.StatusMessage} */ public final class StatusMessage extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.clouddebugger.v2.StatusMessage) StatusMessageOrBuilder { // Use StatusMessage.newBuilder() to construct. private StatusMessage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private StatusMessage() { isError_ = false; refersTo_ = 0; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private StatusMessage( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 8: { isError_ = input.readBool(); break; } case 16: { int rawValue = input.readEnum(); refersTo_ = rawValue; break; } case 26: { com.google.devtools.clouddebugger.v2.FormatMessage.Builder subBuilder = null; if (description_ != null) { subBuilder = description_.toBuilder(); } description_ = input.readMessage(com.google.devtools.clouddebugger.v2.FormatMessage.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(description_); description_ = subBuilder.buildPartial(); } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.clouddebugger.v2.DataProto.internal_static_google_devtools_clouddebugger_v2_StatusMessage_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.clouddebugger.v2.DataProto.internal_static_google_devtools_clouddebugger_v2_StatusMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.clouddebugger.v2.StatusMessage.class, com.google.devtools.clouddebugger.v2.StatusMessage.Builder.class); } /** * <pre> * Enumerates references to which the message applies. * </pre> * * Protobuf enum {@code google.devtools.clouddebugger.v2.StatusMessage.Reference} */ public enum Reference implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Status doesn't refer to any particular input. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * Status applies to the breakpoint and is related to its location. * </pre> * * <code>BREAKPOINT_SOURCE_LOCATION = 3;</code> */ BREAKPOINT_SOURCE_LOCATION(3), /** * <pre> * Status applies to the breakpoint and is related to its condition. * </pre> * * <code>BREAKPOINT_CONDITION = 4;</code> */ BREAKPOINT_CONDITION(4), /** * <pre> * Status applies to the breakpoint and is related to its expressions. * </pre> * * <code>BREAKPOINT_EXPRESSION = 7;</code> */ BREAKPOINT_EXPRESSION(7), /** * <pre> * Status applies to the entire variable. * </pre> * * <code>VARIABLE_NAME = 5;</code> */ VARIABLE_NAME(5), /** * <pre> * Status applies to variable value (variable name is valid). * </pre> * * <code>VARIABLE_VALUE = 6;</code> */ VARIABLE_VALUE(6), UNRECOGNIZED(-1), ; /** * <pre> * Status doesn't refer to any particular input. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * Status applies to the breakpoint and is related to its location. * </pre> * * <code>BREAKPOINT_SOURCE_LOCATION = 3;</code> */ public static final int BREAKPOINT_SOURCE_LOCATION_VALUE = 3; /** * <pre> * Status applies to the breakpoint and is related to its condition. * </pre> * * <code>BREAKPOINT_CONDITION = 4;</code> */ public static final int BREAKPOINT_CONDITION_VALUE = 4; /** * <pre> * Status applies to the breakpoint and is related to its expressions. * </pre> * * <code>BREAKPOINT_EXPRESSION = 7;</code> */ public static final int BREAKPOINT_EXPRESSION_VALUE = 7; /** * <pre> * Status applies to the entire variable. * </pre> * * <code>VARIABLE_NAME = 5;</code> */ public static final int VARIABLE_NAME_VALUE = 5; /** * <pre> * Status applies to variable value (variable name is valid). * </pre> * * <code>VARIABLE_VALUE = 6;</code> */ public static final int VARIABLE_VALUE_VALUE = 6; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Reference valueOf(int value) { return forNumber(value); } public static Reference forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 3: return BREAKPOINT_SOURCE_LOCATION; case 4: return BREAKPOINT_CONDITION; case 7: return BREAKPOINT_EXPRESSION; case 5: return VARIABLE_NAME; case 6: return VARIABLE_VALUE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Reference> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< Reference> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Reference>() { public Reference findValueByNumber(int number) { return Reference.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.devtools.clouddebugger.v2.StatusMessage.getDescriptor().getEnumTypes().get(0); } private static final Reference[] VALUES = values(); public static Reference valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Reference(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.devtools.clouddebugger.v2.StatusMessage.Reference) } public static final int IS_ERROR_FIELD_NUMBER = 1; private boolean isError_; /** * <pre> * Distinguishes errors from informational messages. * </pre> * * <code>optional bool is_error = 1;</code> */ public boolean getIsError() { return isError_; } public static final int REFERS_TO_FIELD_NUMBER = 2; private int refersTo_; /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public int getRefersToValue() { return refersTo_; } /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public com.google.devtools.clouddebugger.v2.StatusMessage.Reference getRefersTo() { com.google.devtools.clouddebugger.v2.StatusMessage.Reference result = com.google.devtools.clouddebugger.v2.StatusMessage.Reference.valueOf(refersTo_); return result == null ? com.google.devtools.clouddebugger.v2.StatusMessage.Reference.UNRECOGNIZED : result; } public static final int DESCRIPTION_FIELD_NUMBER = 3; private com.google.devtools.clouddebugger.v2.FormatMessage description_; /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public boolean hasDescription() { return description_ != null; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public com.google.devtools.clouddebugger.v2.FormatMessage getDescription() { return description_ == null ? com.google.devtools.clouddebugger.v2.FormatMessage.getDefaultInstance() : description_; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public com.google.devtools.clouddebugger.v2.FormatMessageOrBuilder getDescriptionOrBuilder() { return getDescription(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (isError_ != false) { output.writeBool(1, isError_); } if (refersTo_ != com.google.devtools.clouddebugger.v2.StatusMessage.Reference.UNSPECIFIED.getNumber()) { output.writeEnum(2, refersTo_); } if (description_ != null) { output.writeMessage(3, getDescription()); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (isError_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, isError_); } if (refersTo_ != com.google.devtools.clouddebugger.v2.StatusMessage.Reference.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, refersTo_); } if (description_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, getDescription()); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.clouddebugger.v2.StatusMessage)) { return super.equals(obj); } com.google.devtools.clouddebugger.v2.StatusMessage other = (com.google.devtools.clouddebugger.v2.StatusMessage) obj; boolean result = true; result = result && (getIsError() == other.getIsError()); result = result && refersTo_ == other.refersTo_; result = result && (hasDescription() == other.hasDescription()); if (hasDescription()) { result = result && getDescription() .equals(other.getDescription()); } return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + IS_ERROR_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getIsError()); hash = (37 * hash) + REFERS_TO_FIELD_NUMBER; hash = (53 * hash) + refersTo_; if (hasDescription()) { hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; hash = (53 * hash) + getDescription().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.devtools.clouddebugger.v2.StatusMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.devtools.clouddebugger.v2.StatusMessage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Represents a contextual status message. * The message can indicate an error or informational status, and refer to * specific parts of the containing object. * For example, the `Breakpoint.status` field can indicate an error referring * to the `BREAKPOINT_SOURCE_LOCATION` with the message `Location not found`. * </pre> * * Protobuf type {@code google.devtools.clouddebugger.v2.StatusMessage} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.clouddebugger.v2.StatusMessage) com.google.devtools.clouddebugger.v2.StatusMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.clouddebugger.v2.DataProto.internal_static_google_devtools_clouddebugger_v2_StatusMessage_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.clouddebugger.v2.DataProto.internal_static_google_devtools_clouddebugger_v2_StatusMessage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.clouddebugger.v2.StatusMessage.class, com.google.devtools.clouddebugger.v2.StatusMessage.Builder.class); } // Construct using com.google.devtools.clouddebugger.v2.StatusMessage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); isError_ = false; refersTo_ = 0; if (descriptionBuilder_ == null) { description_ = null; } else { description_ = null; descriptionBuilder_ = null; } return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.clouddebugger.v2.DataProto.internal_static_google_devtools_clouddebugger_v2_StatusMessage_descriptor; } public com.google.devtools.clouddebugger.v2.StatusMessage getDefaultInstanceForType() { return com.google.devtools.clouddebugger.v2.StatusMessage.getDefaultInstance(); } public com.google.devtools.clouddebugger.v2.StatusMessage build() { com.google.devtools.clouddebugger.v2.StatusMessage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.devtools.clouddebugger.v2.StatusMessage buildPartial() { com.google.devtools.clouddebugger.v2.StatusMessage result = new com.google.devtools.clouddebugger.v2.StatusMessage(this); result.isError_ = isError_; result.refersTo_ = refersTo_; if (descriptionBuilder_ == null) { result.description_ = description_; } else { result.description_ = descriptionBuilder_.build(); } onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.clouddebugger.v2.StatusMessage) { return mergeFrom((com.google.devtools.clouddebugger.v2.StatusMessage)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.devtools.clouddebugger.v2.StatusMessage other) { if (other == com.google.devtools.clouddebugger.v2.StatusMessage.getDefaultInstance()) return this; if (other.getIsError() != false) { setIsError(other.getIsError()); } if (other.refersTo_ != 0) { setRefersToValue(other.getRefersToValue()); } if (other.hasDescription()) { mergeDescription(other.getDescription()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.devtools.clouddebugger.v2.StatusMessage parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.devtools.clouddebugger.v2.StatusMessage) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private boolean isError_ ; /** * <pre> * Distinguishes errors from informational messages. * </pre> * * <code>optional bool is_error = 1;</code> */ public boolean getIsError() { return isError_; } /** * <pre> * Distinguishes errors from informational messages. * </pre> * * <code>optional bool is_error = 1;</code> */ public Builder setIsError(boolean value) { isError_ = value; onChanged(); return this; } /** * <pre> * Distinguishes errors from informational messages. * </pre> * * <code>optional bool is_error = 1;</code> */ public Builder clearIsError() { isError_ = false; onChanged(); return this; } private int refersTo_ = 0; /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public int getRefersToValue() { return refersTo_; } /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public Builder setRefersToValue(int value) { refersTo_ = value; onChanged(); return this; } /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public com.google.devtools.clouddebugger.v2.StatusMessage.Reference getRefersTo() { com.google.devtools.clouddebugger.v2.StatusMessage.Reference result = com.google.devtools.clouddebugger.v2.StatusMessage.Reference.valueOf(refersTo_); return result == null ? com.google.devtools.clouddebugger.v2.StatusMessage.Reference.UNRECOGNIZED : result; } /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public Builder setRefersTo(com.google.devtools.clouddebugger.v2.StatusMessage.Reference value) { if (value == null) { throw new NullPointerException(); } refersTo_ = value.getNumber(); onChanged(); return this; } /** * <pre> * Reference to which the message applies. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.StatusMessage.Reference refers_to = 2;</code> */ public Builder clearRefersTo() { refersTo_ = 0; onChanged(); return this; } private com.google.devtools.clouddebugger.v2.FormatMessage description_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.clouddebugger.v2.FormatMessage, com.google.devtools.clouddebugger.v2.FormatMessage.Builder, com.google.devtools.clouddebugger.v2.FormatMessageOrBuilder> descriptionBuilder_; /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public boolean hasDescription() { return descriptionBuilder_ != null || description_ != null; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public com.google.devtools.clouddebugger.v2.FormatMessage getDescription() { if (descriptionBuilder_ == null) { return description_ == null ? com.google.devtools.clouddebugger.v2.FormatMessage.getDefaultInstance() : description_; } else { return descriptionBuilder_.getMessage(); } } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public Builder setDescription(com.google.devtools.clouddebugger.v2.FormatMessage value) { if (descriptionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } description_ = value; onChanged(); } else { descriptionBuilder_.setMessage(value); } return this; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public Builder setDescription( com.google.devtools.clouddebugger.v2.FormatMessage.Builder builderForValue) { if (descriptionBuilder_ == null) { description_ = builderForValue.build(); onChanged(); } else { descriptionBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public Builder mergeDescription(com.google.devtools.clouddebugger.v2.FormatMessage value) { if (descriptionBuilder_ == null) { if (description_ != null) { description_ = com.google.devtools.clouddebugger.v2.FormatMessage.newBuilder(description_).mergeFrom(value).buildPartial(); } else { description_ = value; } onChanged(); } else { descriptionBuilder_.mergeFrom(value); } return this; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public Builder clearDescription() { if (descriptionBuilder_ == null) { description_ = null; onChanged(); } else { description_ = null; descriptionBuilder_ = null; } return this; } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public com.google.devtools.clouddebugger.v2.FormatMessage.Builder getDescriptionBuilder() { onChanged(); return getDescriptionFieldBuilder().getBuilder(); } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ public com.google.devtools.clouddebugger.v2.FormatMessageOrBuilder getDescriptionOrBuilder() { if (descriptionBuilder_ != null) { return descriptionBuilder_.getMessageOrBuilder(); } else { return description_ == null ? com.google.devtools.clouddebugger.v2.FormatMessage.getDefaultInstance() : description_; } } /** * <pre> * Status message text. * </pre> * * <code>optional .google.devtools.clouddebugger.v2.FormatMessage description = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.clouddebugger.v2.FormatMessage, com.google.devtools.clouddebugger.v2.FormatMessage.Builder, com.google.devtools.clouddebugger.v2.FormatMessageOrBuilder> getDescriptionFieldBuilder() { if (descriptionBuilder_ == null) { descriptionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.devtools.clouddebugger.v2.FormatMessage, com.google.devtools.clouddebugger.v2.FormatMessage.Builder, com.google.devtools.clouddebugger.v2.FormatMessageOrBuilder>( getDescription(), getParentForChildren(), isClean()); description_ = null; } return descriptionBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.devtools.clouddebugger.v2.StatusMessage) } // @@protoc_insertion_point(class_scope:google.devtools.clouddebugger.v2.StatusMessage) private static final com.google.devtools.clouddebugger.v2.StatusMessage DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.clouddebugger.v2.StatusMessage(); } public static com.google.devtools.clouddebugger.v2.StatusMessage getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<StatusMessage> PARSER = new com.google.protobuf.AbstractParser<StatusMessage>() { public StatusMessage parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new StatusMessage(input, extensionRegistry); } }; public static com.google.protobuf.Parser<StatusMessage> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<StatusMessage> getParserForType() { return PARSER; } public com.google.devtools.clouddebugger.v2.StatusMessage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.ovirt.engine.ui.uicommonweb.models; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import org.ovirt.engine.core.common.mode.ApplicationMode; import org.ovirt.engine.core.common.queries.ConfigurationValues; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.ui.uicommonweb.Configurator; import org.ovirt.engine.ui.uicommonweb.ICommandTarget; import org.ovirt.engine.ui.uicommonweb.ILogger; import org.ovirt.engine.ui.uicommonweb.TypeResolver; import org.ovirt.engine.ui.uicommonweb.UICommand; import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider; import org.ovirt.engine.ui.uicommonweb.help.HelpTag; import org.ovirt.engine.ui.uicommonweb.models.common.ProgressModel; import org.ovirt.engine.ui.uicompat.ConstantsManager; import org.ovirt.engine.ui.uicompat.Event; import org.ovirt.engine.ui.uicompat.EventArgs; import org.ovirt.engine.ui.uicompat.IEventListener; import org.ovirt.engine.ui.uicompat.IProvidePropertyChangedEvent; import org.ovirt.engine.ui.uicompat.ObservableCollection; import org.ovirt.engine.ui.uicompat.PropertyChangedEventArgs; import org.ovirt.engine.ui.uicompat.ProvidePropertyChangedEvent; import com.google.gwt.event.shared.EventBus; import com.google.gwt.event.shared.GwtEvent; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.event.shared.HasHandlers; import com.google.inject.Inject; public class Model implements IEventListener<EventArgs>, ICommandTarget, IProvidePropertyChangedEvent, HasHandlers, IModel { /** * The GWT event bus. */ private EventBus eventBus; /** * Set of invalid tabs, empty if the model doesn't support tabs. */ private final Set<TabName> invalidTabs = new HashSet<>(); private final List<HandlerRegistration> handlerRegistrations = new ArrayList<>(); public static final String CANCEL_COMMAND = "Cancel"; //$NON-NLS-1$ private Event<PropertyChangedEventArgs> propertyChangedEvent; @Override public Event<PropertyChangedEventArgs> getPropertyChangedEvent() { return propertyChangedEvent; } private void setPropertyChangedEvent(Event<PropertyChangedEventArgs> value) { propertyChangedEvent = value; } private Model window; @Override public Model getWindow() { return window; } @Override public void setWindow(Model value) { if (window != value) { window = value; onPropertyChanged(new PropertyChangedEventArgs("Window")); //$NON-NLS-1$ } } private Model confirmWindow; @Override public Model getConfirmWindow() { return confirmWindow; } @Override public void setConfirmWindow(Model value) { if (confirmWindow != value) { confirmWindow = value; onPropertyChanged(new PropertyChangedEventArgs("ConfirmWindow")); //$NON-NLS-1$ } } private Model widgetModel; @Override public Model getWidgetModel() { return widgetModel; } public void setWidgetModel(Model value) { if (widgetModel != value) { widgetModel = value; onPropertyChanged(new PropertyChangedEventArgs("WidgetModel")); //$NON-NLS-1$ } } private Configurator configurator; public Configurator getConfigurator() { return configurator; } private void setConfigurator(Configurator value) { configurator = value; } private ILogger logger; protected ILogger getLogger() { return logger; } private void setLogger(ILogger value) { logger = value; } private UICommand lastExecutedCommand; @Override public UICommand getLastExecutedCommand() { return lastExecutedCommand; } private void setLastExecutedCommand(UICommand value) { lastExecutedCommand = value; } private ProgressModel progress; /** * Represents a progress operation on the model. */ @Override public ProgressModel getProgress() { return progress; } private void setProgress(ProgressModel value) { if (progress != value) { progress = value; onPropertyChanged(new PropertyChangedEventArgs(PropertyChangedEventArgs.PROGRESS)); } } private EntityModel<Boolean> advancedMode; public EntityModel<Boolean> getAdvancedMode() { return advancedMode; } public void setAdvancedMode(EntityModel<Boolean> advancedMode) { this.advancedMode = advancedMode; } private String hashName; @Override public String getHashName() { return hashName; } public void setHashName(String value) { hashName = value; onPropertyChanged(new PropertyChangedEventArgs("HashName")); //$NON-NLS-1$ } private HelpTag helpTag; /** * Set the help tag for this model. This is used to connect context-sensitive help to a model/dialog. * <b>IMPORTANT</b>: only use values from {@code HelpTag}. * @param helpTag unique id from {@code HelpTag} */ public void setHelpTag(HelpTag helpTag) { this.helpTag = helpTag; setOpenDocumentationCommand(new UICommand("OpenDocumentation", this)); //$NON-NLS-1$ onPropertyChanged(new PropertyChangedEventArgs("HelpTag")); //$NON-NLS-1$ } public HelpTag getHelpTag() { return helpTag; } private String applicationPlace; /** * Returns the logical application place associated with this model or {@code null} if this model * has no such place associated. */ public String getApplicationPlace() { return applicationPlace; } protected void setApplicationPlace(String applicationPlace) { this.applicationPlace = applicationPlace; } /** * Gets or sets the title of this model. There is no specific purpose for this property, use it on your convenience. */ private String title; public String getTitle() { return title; } public Model setTitle(String value) { if (!Objects.equals(title, value)) { title = value; onPropertyChanged(new PropertyChangedEventArgs("Title")); //$NON-NLS-1$ } return this; } private boolean isValid; public boolean getIsValid() { return isValid; } public void setIsValid(boolean value) { if (isValid != value) { isValid = value; onPropertyChanged(new PropertyChangedEventArgs("IsValid")); //$NON-NLS-1$ if (isValid) { getInvalidityReasons().clear(); } } } private List<String> invalidityReasons; public List<String> getInvalidityReasons() { return invalidityReasons; } public void setInvalidityReasons(List<String> value) { invalidityReasons = value; } private int availableInModes; public int getAvailableInModes() { return availableInModes; } public void setAvailableInModes(int value) { if (availableInModes != value) { availableInModes = value; onPropertyChanged(new PropertyChangedEventArgs("AvailableInModes")); //$NON-NLS-1$ } } public void setAvailableInModes(ApplicationMode uiMode) { int value = uiMode.getValue(); setAvailableInModes(value); } private boolean isAvailable; @Override public boolean getIsAvailable() { return isAvailable && ApplicationModeHelper.isAvailableInMode(getAvailableInModes()); } public void setIsAvailable(boolean value) { if (isAvailable != value) { isAvailable = value; onIsAvailableChanged(); onPropertyChanged(new PropertyChangedEventArgs("IsAvailable")); //$NON-NLS-1$ } } private boolean isChangable; public boolean getIsChangable() { return isChangable; } public Model setIsChangeable(boolean value) { if (isChangable != value) { isChangable = value; onPropertyChanged(new PropertyChangedEventArgs("IsChangable")); //$NON-NLS-1$ if (isChangable) { setChangeProhibitionReason(null); } } return this; } /** * If {@code value} parameter is true, {@code reason} parameter is ignored and prohibition reason is set to * {@code null}. */ public Model setIsChangeable(boolean value, String reason) { setIsChangeable(value); if (!value) { setChangeProhibitionReason(reason); } return this; } private String changeProhibitionReason; public String getChangeProhibitionReason() { return changeProhibitionReason; } public void setChangeProhibitionReason(String value) { if (!Objects.equals(changeProhibitionReason, value)) { changeProhibitionReason = value; onPropertyChanged(new PropertyChangedEventArgs("ChangeProhibitionReason")); //$NON-NLS-1$ } } /** * Update Model's changeability based on the support of given feature in given version. * * @param feature {@link org.ovirt.engine.core.common.queries.ConfigurationValues} [SomeFeature]Supported value * @param version compatibility version to check the feature against */ public void updateChangeability(ConfigurationValues feature, Version version) { boolean featureSupported = (Boolean) AsyncDataProvider.getInstance().getConfigValuePreConverted(feature, version.getValue()); setIsChangeable(featureSupported); setChangeProhibitionReason(ConstantsManager.getInstance().getMessages().optionNotSupportedClusterVersionTooOld(version.getValue())); } private boolean isSelected; public boolean getIsSelected() { return isSelected; } public void setIsSelected(boolean value) { if (isSelected != value) { isSelected = value; onPropertyChanged(new PropertyChangedEventArgs("IsSelected")); //$NON-NLS-1$ } } private boolean isSelectable; public boolean getIsSelectable() { return isSelectable; } public void setIsSelectable(boolean value) { if (isSelectable != value) { isSelectable = value; onPropertyChanged(new PropertyChangedEventArgs("IsSelectable")); //$NON-NLS-1$ } } private List<UICommand> commands; public List<UICommand> getCommands() { return commands; } public void setCommands(List<UICommand> value) { commands = value; } public UICommand getDefaultCommand() { List<UICommand> commands = getCommands(); if (commands != null) { for (UICommand c : commands) { if (c.getIsDefault()) { return c; } } } return null; } public UICommand getCancelCommand() { List<UICommand> commands = getCommands(); if (commands != null) { for (UICommand c : commands) { if (c.getIsCancel()) { return c; } } } return null; } private UICommand openDocumentationCommand; public UICommand getOpenDocumentationCommand() { return openDocumentationCommand; } public void setOpenDocumentationCommand(UICommand value) { openDocumentationCommand = value; } private String message; public String getMessage() { return message; } public void setMessage(String value) { if (!Objects.equals(message, value)) { message = value; onPropertyChanged(new PropertyChangedEventArgs("Message")); //$NON-NLS-1$ } } public Model() { setPropertyChangedEvent(new Event<PropertyChangedEventArgs>(ProvidePropertyChangedEvent.definition)); // Resolve commonly used types. setConfigurator(lookupConfigurator()); setLogger(lookupLogger()); setInvalidityReasons(new ObservableCollection<String>()); setIsValid(true); setIsChangeable(true); setAvailableInModes(ApplicationMode.AllModes); setIsAvailable(true); setIsSelectable(true); setCommands(new ObservableCollection<UICommand>()); } protected ILogger lookupLogger() { return (ILogger) TypeResolver.getInstance().resolve(ILogger.class); } protected Configurator lookupConfigurator() { return (Configurator) TypeResolver.getInstance().resolve(Configurator.class); } /** * Override this method to initialize model, for example populate some properties with data here rather than in * constructor. But instantiation still should be done in constructor. */ public void initialize() { } protected void onIsAvailableChanged() { } protected void onPropertyChanged(PropertyChangedEventArgs e) { getPropertyChangedEvent().raise(this, e); } @Override public void eventRaised(Event<? extends EventArgs> ev, Object sender, EventArgs args) { } @Override public void executeCommand(UICommand command) { setLastExecutedCommand(command); if (command == getOpenDocumentationCommand()) { onPropertyChanged(new PropertyChangedEventArgs("OpenDocumentation")); //$NON-NLS-1$ } } @Override public void executeCommand(UICommand uiCommand, Object... parameters) { executeCommand(uiCommand); } public void startProgress() { startProgress(null); } /** * Assigns a new instance to the Progress property, indicating start of some operation on the model. */ public void startProgress(String operation) { ProgressModel tempVar = new ProgressModel(); tempVar.setCurrentOperation(operation); setProgress(tempVar); } /** * Assigns null to the Progress property, indicating end of some operation on the model. */ public void stopProgress() { setProgress(null); } /** * Get the GWT event bus. * @return The {@code EventBus}, can be null. */ protected final EventBus getEventBus() { return eventBus; } /** * Set the GWT event bus. * @param eventBus The {@code EventBus}, can be null. */ @Inject public final void setEventBus(EventBus eventBus) { this.eventBus = eventBus; registerHandlers(); } /** * Unset the GWT event bus, use this when cleaning up models. */ public final void unsetEventBus() { unregisterHandlers(); this.eventBus = null; } /** * Allows one to check if the event bus has been set. * @return {@code true} if the event bus is set already, {@code false} otherwise. */ public final boolean hasEventBusSet() { return this.eventBus != null; } /** * Register handlers after the {@code EventBus} has been set. * <p> * Make sure to use {@link #registerHandler} to ensure proper * handler cleanup when {@link #unsetEventBus} is called. */ protected void registerHandlers() { // No-op, override as necessary } /** * Register a handler. * @param reg The {@code HandlerRegistration} returned from registering a handler. */ public final void registerHandler(HandlerRegistration reg) { if (reg != null && !handlerRegistrations.contains(reg)) { handlerRegistrations.add(reg); } } /** * Unregister all registered handlers. */ public final void unregisterHandlers() { for (HandlerRegistration reg: handlerRegistrations) { reg.removeHandler(); // can't call unregisterHandler(reg) as that would modify the list during iteration } handlerRegistrations.clear(); } /** * Unregister a specific handler using its {@code HandlerRegistration}. * @param reg The {@code HandlerRegistration} to use to remove the handler. */ public final void unregisterHandler(HandlerRegistration reg) { if (reg != null) { reg.removeHandler(); handlerRegistrations.remove(reg); } } @Override public void fireEvent(GwtEvent<?> event) { getEventBus().fireEventFromSource(event, this); } public Set<TabName> getInvalidTabs() { return invalidTabs; } public void setValidTab(TabName tab, boolean value) { if (value) { invalidTabs.remove(tab); } else { invalidTabs.add(tab); } } public boolean isValidTab(TabName tab) { return !invalidTabs.contains(tab); } public boolean allTabsValid() { return invalidTabs.isEmpty(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.repo; import java.io.BufferedWriter; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.RandomAccessFile; import java.io.Writer; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.Properties; import java.util.Scanner; import javax.inject.Inject; import javax.inject.Named; /** * A {@link Repository} that persists content to file. <br/> * <br/> * The {@link Repository} stores all of its data in a single base directory. * Within this directory each {@link Subject} is represented by a nested * directory with the same name as the {@link Subject}. Within each * {@link Subject} directory there are three file types: <li> * A properties file named 'subject.properties' containing the configured * properties for the Subject. At this time, the only used property is * "avro.repo.validator.class".</li> <li> * A text file named 'schema_ids' containing the schema ids, in order of their * creation, delimited by newline, encoded in UTF-8. This is used to track the * order of schema registration for {@link Subject#latest()} and * {@link Subject#allEntries()}</li> <li> * One file per schema the contents of which are the schema encoded in UTF-8 and * the name of which is the schema id followed by the postfix '.schema'.</li> * */ public class FileRepository implements Repository, Closeable { private static final String LOCKFILE = ".repo.lock"; private static final String SUBJECT_PROPERTIES = "subject.properties"; private static final String SCHEMA_IDS = "schema_ids"; private static final String SCHEMA_POSTFIX = ".schema"; private final InMemorySubjectCache subjects = new InMemorySubjectCache(); private final ValidatorFactory validators; private final File rootDir; private final FileChannel lockChannel; private final FileLock fileLock; private boolean closed = false; /** * Create a FileRepository in the directory path provided. Locks a file * "repository.lock" to ensure no other object or process is running a * FileRepository from the same place. The lock is released if * {@link #close()} is called, the object is finalized, or the JVM exits. * * Not all platforms support file locks. See {@link FileLock} * * @param repoPath * The */ @Inject public FileRepository(@Named("avro.repo.file-repo-path") String repoPath, ValidatorFactory validators) { this.validators = validators; this.rootDir = new File(repoPath); if ((!rootDir.exists() && !rootDir.mkdirs()) || !rootDir.isDirectory()) { throw new java.lang.RuntimeException( "Unable to create repo directory, or not a directory: " + rootDir.getAbsolutePath()); } // lock repository try { File lockfile = new File(rootDir, LOCKFILE); lockfile.createNewFile(); @SuppressWarnings("resource") // raf is closed when lockChannel is closed RandomAccessFile raf = new RandomAccessFile(lockfile, "rw"); lockChannel = raf.getChannel(); fileLock = lockChannel.tryLock(); if (fileLock != null) { lockfile.deleteOnExit(); } else { throw new IllegalStateException("Failed to lock file: " + lockfile.getAbsolutePath()); } } catch (IOException e) { throw new IllegalStateException("Unable to lock repository directory: " + rootDir.getAbsolutePath(), e); } // eagerly load up subjects loadSubjects(rootDir, subjects); } private void loadSubjects(File repoDir, SubjectCache subjects) { for (File file : repoDir.listFiles()) { if (file.isDirectory()) { subjects.add(new FileSubject(file)); } } } private void isValid() { if (closed) { throw new IllegalStateException("FileRepository is closed"); } } @Override public synchronized void close() { if (closed) { return; } try { fileLock.release(); } catch (IOException e) { // nothing to do here -- it was already released // or there are underlying errors we cannot recover from } finally { closed = true; try { lockChannel.close(); } catch (IOException e) { // nothing to do here -- underlying errors but recovery // not possible here or in client, and already closed } } } @Override public synchronized Subject register(String subjectName, SubjectConfig config) { isValid(); Subject subject = subjects.lookup(subjectName); if (null == subject) { subject = subjects.add(Subject.validatingSubject(createNewFileSubject(subjectName, config), validators)); } return subject; } @Override public synchronized Subject lookup(String subjectName) { isValid(); return subjects.lookup(subjectName); } @Override public synchronized Iterable<Subject> subjects() { isValid(); return subjects.values(); } private FileSubject createNewFileSubject(String subject, SubjectConfig config) { File subjectDir = new File(rootDir, subject); createNewSubjectDir(subjectDir, config); return new FileSubject(subjectDir); } // create a new empty subject directory private static void createNewSubjectDir(File subjectDir, SubjectConfig config) { if (subjectDir.exists()) { throw new RuntimeException( "Cannot create a FileSubject, directory already exists: " + subjectDir.getAbsolutePath()); } if (!subjectDir.mkdir()) { throw new RuntimeException("Cannot create a FileSubject dir: " + subjectDir.getAbsolutePath()); } createNewFileInDir(subjectDir, SCHEMA_IDS); File subjectProperties = createNewFileInDir(subjectDir, SUBJECT_PROPERTIES); Properties props = new Properties(); props.putAll(RepositoryUtil.safeConfig(config).asMap()); writePropertyFile(subjectProperties, props); } private static File createNewFileInDir(File dir, String filename) { File result = new File(dir, filename); try { if (!result.createNewFile()) { throw new RuntimeException(result.getAbsolutePath() + " already exists"); } } catch (IOException e) { throw new RuntimeException("Unable to create file: " + result.getAbsolutePath(), e); } return result; } private static void writeToFile(File file, WriteOp op, boolean append) { FileOutputStream out; try { out = new FileOutputStream(file, append); } catch (FileNotFoundException e) { throw new RuntimeException("Could not open file for write: " + file.getAbsolutePath()); } try { OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8"); BufferedWriter bwriter = new BufferedWriter(writer); op.write(bwriter); bwriter.flush(); bwriter.close(); writer.close(); out.close(); } catch (IOException e) { throw new RuntimeException("Failed to write and close file " + file.getAbsolutePath()); } } private static void writePropertyFile(File file, final Properties prop) { writeToFile(file, new WriteOp() { @Override protected void write(Writer writer) throws IOException { prop.store(writer, "Schema Repository Subject Properties"); } }, false); } private static void appendLineToFile(File file, final String line) { writeToFile(file, new WriteOp() { @Override protected void write(Writer writer) throws IOException { writer.append(line).append('\n'); } }, true); } private static void dirExists(File dir) { if (!dir.exists() || !dir.isDirectory()) { throw new RuntimeException( "directory does not exist or is not a directory: " + dir.toString()); } } private static void fileReadable(File file) { if (!file.canRead()) { throw new RuntimeException("file does not exist or is not readable: " + file.toString()); } } private static void fileWriteable(File file) { if (!file.canWrite()) { throw new RuntimeException("file does not exist or is not writeable: " + file.toString()); } } private abstract static class WriteOp { protected abstract void write(Writer writer) throws IOException; } private class FileSubject extends Subject { private final File subjectDir; private final File idFile; private final File propertyFile; private final SubjectConfig config; private int largestId = -1; private SchemaEntry latest; private FileSubject(File dir) { super(dir.getName()); this.subjectDir = dir; this.idFile = new File(dir, SCHEMA_IDS); this.propertyFile = new File(dir, SUBJECT_PROPERTIES); dirExists(subjectDir); fileReadable(idFile); fileWriteable(idFile); fileReadable(propertyFile); fileWriteable(propertyFile); // read from config file Properties props = new Properties(); try { props.load(new FileInputStream(propertyFile)); config = RepositoryUtil.configFromProperties(props); Integer lastId = null; HashSet<String> schemaFileNames = getSchemaFiles(); HashSet<Integer> foundIds = new HashSet<Integer>(); for (Integer id : getSchemaIds()) { if (id > largestId) { largestId = id; } lastId = id; if(!foundIds.add(id)) { throw new RuntimeException("Corrupt id file, id '" + id + "' duplicated in " + idFile.getAbsolutePath()); } fileReadable(getSchemaFile(id)); schemaFileNames.remove(getSchemaFileName(id)); } if (schemaFileNames.size() > 0) { throw new RuntimeException("Schema files found in subject directory " + subjectDir.getAbsolutePath() + " that are not referenced in the " + SCHEMA_IDS + " file: " + schemaFileNames.toString()); } if (lastId != null) { latest = new SchemaEntry(lastId.toString(), readSchemaForId(lastId.toString())); } } catch (IOException e) { throw new RuntimeException("error initializing subject: " + subjectDir.getAbsolutePath(), e); } } @Override public SubjectConfig getConfig() { return config; } @Override public synchronized SchemaEntry register(String schema) throws SchemaValidationException { isValid(); RepositoryUtil.validateSchemaOrSubject(schema); SchemaEntry entry = lookupBySchema(schema); if (entry == null) { entry = createNewSchemaFile(schema); appendLineToFile(idFile, entry.getId()); latest = entry; } return entry; } private synchronized SchemaEntry createNewSchemaFile(String schema) { try { int newId = largestId + 1; File f = getSchemaFile(String.valueOf(newId)); if (!f.exists() && f.createNewFile()) { Writer output = new BufferedWriter(new FileWriter(f)); try { output.write(schema); output.flush(); } finally { output.close(); } latest = new SchemaEntry(String.valueOf(newId), schema); largestId++; return latest; } else { throw new RuntimeException( "Unable to register schema, schema file either exists already " + " or couldn't create new file"); } } catch (NumberFormatException e) { throw new RuntimeException( "Unable to register schema, invalid schema latest schema id ", e); } catch (IOException e) { throw new RuntimeException( "Unable to register schema, couldn't create schema file ", e); } } @Override public synchronized SchemaEntry registerIfLatest(String schema, SchemaEntry latest) throws SchemaValidationException { isValid(); if (latest == this.latest // both null || (latest != null && latest.equals(this.latest))) { return register(schema); } else { return null; } } @Override public synchronized SchemaEntry lookupBySchema(String schema) { isValid(); RepositoryUtil.validateSchemaOrSubject(schema); for (Integer id : getSchemaIds()) { String idStr = id.toString(); String schemaInFile = readSchemaForIdOrNull(idStr); if (schema.equals(schemaInFile)) { return new SchemaEntry(idStr, schema); } } return null; } @Override public synchronized SchemaEntry lookupById(String id) { isValid(); String schema = readSchemaForIdOrNull(id); if (schema != null) { return new SchemaEntry(id, schema); } return null; } @Override public synchronized SchemaEntry latest() { isValid(); return latest; } @Override public synchronized Iterable<SchemaEntry> allEntries() { isValid(); List<SchemaEntry> entries = new ArrayList<SchemaEntry>(); for (Integer id : getSchemaIds()) { String idStr = id.toString(); String schema = readSchemaForId(idStr); entries.add(new SchemaEntry(idStr, schema)); } Collections.reverse(entries); return entries; } @Override public boolean integralKeys() { return true; } private String readSchemaForIdOrNull(String id) { try { return readSchemaForId(id); } catch (Exception e) { return null; } } private String readSchemaForId(String id) { File schemaFile = getSchemaFile(id); return readSchemaFile(schemaFile); } private String readSchemaFile(File schemaFile) { try { return readAllAsString(schemaFile); } catch (FileNotFoundException e) { throw new RuntimeException( "Could not read schema contents at: " + schemaFile.getAbsolutePath(), e); } } private final String endOfLine = System.getProperty("line.separator"); private String readAllAsString(File file) throws FileNotFoundException { Scanner s = new Scanner(file, "UTF-8").useDelimiter(endOfLine); StringBuilder strBuilder = new StringBuilder(); try { while (s.hasNext()) { strBuilder.append(s.nextLine()); if (s.hasNext()) { strBuilder.append(endOfLine); } } return strBuilder.toString(); } catch (NoSuchElementException e) { throw new RuntimeException( "file is empty: " + file.getAbsolutePath(), e); } finally { s.close(); } } private HashSet<String> getSchemaFiles() { String[] files = subjectDir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (null != name && name.endsWith(SCHEMA_POSTFIX)) { return true; } return false; } }); return new HashSet<String>(Arrays.asList(files)); } // schema ids from the schema id file, in order from oldest to newest private List<Integer> getSchemaIds(){ Scanner s = getIdFileScanner(); List<Integer> ids = new ArrayList<Integer>(); try { while (s.hasNextLine()) { if(s.hasNext()) { // only read non-empty lines ids.add(s.nextInt()); } s.nextLine(); } return ids; } finally { s.close(); } } private Scanner getIdFileScanner() { try { return new Scanner(idFile, "UTF-8"); } catch (FileNotFoundException e) { throw new RuntimeException("Unable to read schema id file: " + idFile.getAbsolutePath(), e); } } private File getSchemaFile(String id) { return new File(subjectDir, getSchemaFileName(id)); } private File getSchemaFile(int id) { return getSchemaFile(String.valueOf(id)); } private String getSchemaFileName(String id) { return id + SCHEMA_POSTFIX; } private String getSchemaFileName(int id) { return getSchemaFileName(String.valueOf(id)); } } }
package mytown.protection; import cpw.mods.fml.common.FMLCommonHandler; import myessentials.entities.BlockPos; import myessentials.entities.Volume; import mytown.MyTown; import mytown.datasource.MyTownDatasource; import mytown.datasource.MyTownUniverse; import mytown.entities.*; import mytown.entities.flag.FlagType; import mytown.protection.segment.*; import mytown.protection.segment.enums.BlockType; import mytown.protection.segment.enums.EntityType; import mytown.protection.segment.enums.ItemType; import mytown.proxies.DatasourceProxy; import mytown.util.exceptions.ConditionException; import mytown.util.exceptions.GetterException; import net.minecraft.block.Block; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraft.network.play.server.S23PacketBlockChange; import net.minecraft.server.MinecraftServer; import net.minecraft.tileentity.TileEntity; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import net.minecraftforge.event.entity.player.PlayerInteractEvent; import org.apache.commons.lang3.exception.ExceptionUtils; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * An object which offers protection for a specific mod and version */ public class Protection { public final String modid; public final String version; public final List<SegmentTileEntity> segmentsTiles = new ArrayList<SegmentTileEntity>(); public final List<SegmentEntity> segmentsEntities = new ArrayList<SegmentEntity>(); public final List<SegmentItem> segmentsItems = new ArrayList<SegmentItem>(); public final List<SegmentBlock> segmentsBlocks = new ArrayList<SegmentBlock>(); public Protection(String modid, List<Segment> segments) { this(modid, "", segments); } public Protection(String modid, String version, List<Segment> segments) { for(Segment segment : segments) { if(segment instanceof SegmentTileEntity) segmentsTiles.add((SegmentTileEntity)segment); else if(segment instanceof SegmentEntity) segmentsEntities.add((SegmentEntity) segment); else if(segment instanceof SegmentItem) segmentsItems.add((SegmentItem)segment); else if(segment instanceof SegmentBlock) segmentsBlocks.add((SegmentBlock)segment); } this.modid = modid; this.version = version; } public boolean checkTileEntity(TileEntity te) { for(Iterator<SegmentTileEntity> it = segmentsTiles.iterator(); it.hasNext();) { SegmentTileEntity segment = it.next(); if(segment.getCheckClass().isAssignableFrom(te.getClass())) { try { if(segment.checkCondition(te)) { Volume teBox = new Volume(segment.getX1(te), segment.getY1(te), segment.getZ1(te), segment.getX2(te), segment.getY2(te), segment.getZ2(te)); int dim = te.getWorldObj().provider.dimensionId; Resident owner = segment.hasOwner() ? Protections.instance.getOwnerForTileEntity(te) : null; if (!hasPermission(owner, segment, dim, teBox)) { return true; } } } catch (Exception ex) { MyTown.instance.LOG.error("Failed to check tile entity: {} ({}, {}, {}, Dim: {})", te.getClass().getSimpleName(), te.xCoord, te.yCoord, te.zCoord, te.getWorldObj().provider.dimensionId); MyTown.instance.LOG.error(ExceptionUtils.getStackTrace(ex)); // Disabling protection if something errors. if(ex instanceof GetterException || ex instanceof ConditionException) { this.disableSegment(it, segment, ex.getMessage()); } else { MyTown.instance.LOG.error("Skipping..."); } } return false; } } return false; } // TODO: Add condition check public boolean checkEntity(Entity entity) { for(SegmentEntity segment : segmentsEntities) { if (segment.getCheckClass().isAssignableFrom(entity.getClass())) { if(segment.getType() == EntityType.TRACKED) { if (segment.checkCondition(entity)) { int range = segment.getRange(entity); Resident owner = segment.getOwner(entity); int dim = entity.dimension; int x = (int) Math.floor(entity.posX); int y = (int) Math.floor(entity.posY); int z = (int) Math.floor(entity.posZ); if(range == 0) { if (!hasPermission(owner, segment, dim, x, y, z)) { return true; } } else { Volume rangeBox = new Volume(x-range, y-range, z-range, x+range, y+range, z+range); if (!hasPermission(owner, segment, dim, rangeBox)) { return true; } } } } } } return false; } /** * Checking item usage for left or right click on block */ public boolean checkItem(ItemStack item, ItemType type, Resident res, BlockPos bp, int face) { for(Iterator<SegmentItem> it = segmentsItems.iterator(); it.hasNext();) { SegmentItem segment = it.next(); if(segment.getType() == type && segment.getCheckClass().isAssignableFrom(item.getItem().getClass())) { ForgeDirection direction = ForgeDirection.getOrientation(face); if(segment.isOnAdjacent()) { bp = new BlockPos(bp.getX() + direction.offsetX, bp.getY() + direction.offsetY, bp.getZ() + direction.offsetZ, bp.getDim()); } if (!segment.isDirectionalClientUpdate()) { direction = null; } try { if (segment.checkCondition(item)) { int range = segment.getRange(item); int dim = bp.getDim(); int x = bp.getX(); int y = bp.getY(); int z = bp.getZ(); boolean isProtected; if(range == 0) { isProtected = !hasPermission(res, segment, dim, x, y, z); } else { Volume rangeBox = new Volume(x-range, y-range, z-range, x+range, y+range, z+range); isProtected = !hasPermission(res, segment, dim, rangeBox); } if(isProtected) { if (segment.hasClientUpdate()) { sendClientUpdate(segment.getClientUpdateCoords(), bp, (EntityPlayerMP) res.getPlayer(), direction); } return true; } } } catch (Exception ex) { MyTown.instance.LOG.error("Failed to check item use on {} at the player {} ({})", item.getDisplayName(), res.getPlayerName(), bp); MyTown.instance.LOG.error(ExceptionUtils.getStackTrace(ex)); if(ex instanceof GetterException || ex instanceof ConditionException) { this.disableSegment(it, segment, ex.getMessage()); } } } } return false; } /** * Checking item usage for right click on entity */ public boolean checkEntityRightClick(ItemStack item, Resident res, Entity entity) { for(Iterator<SegmentEntity> it = segmentsEntities.iterator(); it.hasNext();) { SegmentEntity segment = it.next(); if(segment.getType() == EntityType.PROTECT && segment.getCheckClass().isAssignableFrom(entity.getClass())) { int dim = entity.dimension; int x = (int) Math.floor(entity.posX); int y = (int) Math.floor(entity.posY); int z = (int) Math.floor(entity.posZ); if (!hasPermission(res, segment, dim, x, y, z)) { return true; } } } if(item == null) return false; for(Iterator<SegmentItem> it = segmentsItems.iterator(); it.hasNext();) { SegmentItem segment = it.next(); if(segment.getType() == ItemType.RIGHT_CLICK_ENTITY && segment.getCheckClass().isAssignableFrom(item.getItem().getClass())) { try { if (segment.checkCondition(item)) { int range = segment.getRange(item); int dim = entity.dimension; int x = (int) Math.floor(entity.posX); int y = (int) Math.floor(entity.posY); int z = (int) Math.floor(entity.posZ); if(range == 0) { if (!hasPermission(res, segment, dim, x, y, z)) { return true; } } else { Volume rangeBox = new Volume(x-range, y-range, z-range, x+range, y+range, z+range); if (!hasPermission(res, segment, dim, rangeBox)) { return true; } } } } catch (Exception ex) { MyTown.instance.LOG.error("Failed to check item use on {} at the player {} ({}, {}, {} | Dim: {})", item.getDisplayName(), res.getPlayerName(), entity.posX, entity.posY, entity.posZ, entity.dimension); MyTown.instance.LOG.error(ExceptionUtils.getStackTrace(ex)); if(ex instanceof GetterException || ex instanceof ConditionException) { this.disableSegment(it, segment, ex.getMessage()); } } } } return false; } /** * Checking item usage for right click on air */ public boolean checkItem(ItemStack item, Resident res) { for(Iterator<SegmentItem> it = segmentsItems.iterator(); it.hasNext();) { SegmentItem segment = it.next(); if(segment.getType() == ItemType.RIGHT_CLICK_AIR && segment.getCheckClass().isAssignableFrom(item.getItem().getClass())) { EntityPlayer entity = res.getPlayer(); try { if (segment.checkCondition(item)) { int range = segment.getRange(item); int dim = entity.dimension; int x = (int) Math.floor(entity.posX); int y = (int) Math.floor(entity.posY); int z = (int) Math.floor(entity.posZ); if(range == 0) { if (!hasPermission(res, segment, dim, x, y, z)) { return true; } } else { Volume rangeBox = new Volume(x-range, y-range, z-range, x+range, y+range, z+range); if (!hasPermission(res, segment, dim, rangeBox)) { return true; } } } } catch (Exception ex) { MyTown.instance.LOG.error("Failed to check item use on {} at the player {} ({}, {}, {} | Dim: {})", item.getDisplayName(), res.getPlayerName(), entity.posX, entity.posY, entity.posZ, entity.dimension); if(ex instanceof GetterException || ex instanceof ConditionException) { this.disableSegment(it, segment, ex.getMessage()); } } } } return false; } /** * Checking right click actions on blocks. */ public boolean checkBlockInteraction(Resident res, BlockPos bp, PlayerInteractEvent.Action action) { Block blockType = MinecraftServer.getServer().worldServerForDimension(bp.getDim()).getBlock(bp.getX(), bp.getY(), bp.getZ()); for(SegmentBlock segment : segmentsBlocks) { if(segment.getCheckClass().isAssignableFrom(blockType.getClass()) && (segment.getMeta() == -1 || segment.getMeta() == MinecraftServer.getServer().worldServerForDimension(bp.getDim()).getBlockMetadata(bp.getX(), bp.getY(), bp.getZ())) && (segment.getType() == BlockType.ANY_CLICK || segment.getType() == BlockType.RIGHT_CLICK && action == PlayerInteractEvent.Action.RIGHT_CLICK_BLOCK || segment.getType() == BlockType.LEFT_CLICK && action == PlayerInteractEvent.Action.LEFT_CLICK_BLOCK)) { int dim = bp.getDim(); int x = bp.getX(); int y = bp.getY(); int z = bp.getZ(); if (!hasPermission(res, segment, dim, x, y, z)) { if(segment.hasClientUpdate()) sendClientUpdate(segment.getClientUpdateCoords(), bp, (EntityPlayerMP) res.getPlayer(), null); return true; } } } return false; } public void sendClientUpdate(Volume updateVolume, BlockPos center, EntityPlayerMP player, ForgeDirection face) { World world = MinecraftServer.getServer().worldServerForDimension(center.getDim()); int x, y, z; if(face != null) updateVolume = translateVolume(updateVolume, face); for (int i = updateVolume.getMinX(); i <= updateVolume.getMaxX(); i++) { for (int j = updateVolume.getMinY(); j <= updateVolume.getMaxY(); j++) { for (int k = updateVolume.getMinZ(); k <= updateVolume.getMaxZ(); k++) { x = center.getX() + i; y = center.getY() + j; z = center.getZ() + k; S23PacketBlockChange packet = new S23PacketBlockChange(x, y, z, world); packet.field_148884_e = world.getBlockMetadata(x, y, z); FMLCommonHandler.instance().getMinecraftServerInstance().getConfigurationManager().sendPacketToAllPlayers(packet); } } } } public Volume translateVolume(Volume volume, ForgeDirection direction) { if(direction == ForgeDirection.UNKNOWN) return volume; switch (direction) { case DOWN: volume = new Volume(volume.getMinX(), -volume.getMaxZ(), volume.getMinY(), volume.getMaxX(), volume.getMinZ(), volume.getMaxY()); break; case UP: volume = new Volume(volume.getMinX(), volume.getMinZ(), volume.getMinY(), volume.getMaxX(), volume.getMaxZ(), volume.getMaxY()); break; case NORTH: volume = new Volume(volume.getMinX(), volume.getMinY(), - volume.getMaxZ(), volume.getMaxX(), volume.getMaxY(), volume.getMinZ()); break; case WEST: volume = new Volume(- volume.getMaxZ(), volume.getMinY(), volume.getMinX(), volume.getMinZ(), volume.getMaxY(), volume.getMaxX()); break; case EAST: volume = new Volume(volume.getMinZ(), volume.getMinY(), volume.getMinX(), volume.getMaxZ(), volume.getMaxY(), volume.getMaxX()); break; case SOUTH: // The orientation on South is already the correct one, no translation needed. break; } return volume; } public boolean hasPermission(Resident res, Segment segment, int dim, int x, int y, int z) { TownBlock townBlock = MyTownUniverse.instance.blocks.get(dim, x >> 4, z >> 4); if(townBlock == null) { if (res == null) { return !Wild.instance.flagsContainer.getValue(segment.getFlag()).equals(segment.getDenialValue()); } else { if (!Wild.instance.hasPermission(res, segment.getFlag(), segment.getDenialValue())) { res.sendMessage(segment.getFlag().getLocalizedProtectionDenial()); return false; } } } else { Town town = townBlock.getTown(); if (res == null) { return !town.getValueAtCoords(dim, x, y, z, segment.getFlag()).equals(segment.getDenialValue()); } else { if (!town.hasPermission(res, segment.getFlag(), segment.getDenialValue(), dim, x, y, z)) { res.protectionDenial(segment.getFlag(), town.formatOwners(dim, x, y, z)); return false; } } } return true; } public boolean hasPermission(Resident res, Segment segment, int dim, Volume area) { boolean inWild = false; for (int townBlockX = area.getMinX() >> 4; townBlockX <= area.getMaxX() >> 4; townBlockX++) { for (int townBlockZ = area.getMinZ() >> 4; townBlockZ <= area.getMaxZ() >> 4; townBlockZ++) { TownBlock townBlock = MyTownUniverse.instance.blocks.get(dim, townBlockX, townBlockZ); if (townBlock == null) { inWild = true; } else { Town town = townBlock.getTown(); Volume rangeBox = townBlock.getAreaLimit(area); int totalIntersectArea = 0; // Check every plot in the current TownBlock and sum all plot areas for (Plot plot : townBlock.plotsContainer) { int plotIntersectArea = plot.getIntersectingArea(rangeBox); if (plotIntersectArea > 0) { if (res == null) { if (plot.flagsContainer.getValue(segment.getFlag()).equals(segment.getDenialValue())) { return false; } } else { if (!plot.hasPermission(res, segment.getFlag(), segment.getDenialValue())) { res.protectionDenial(segment.getFlag(), MyTown.instance.LOCAL.getLocalization("mytown.notification.town.owners", town.residentsMap.getMayor() == null ? "SERVER ADMINS" : town.residentsMap.getMayor().getPlayerName())); return false; } } } totalIntersectArea += plotIntersectArea; } // If plot area sum is not equal to range area, check town permission if (totalIntersectArea != getArea(rangeBox)) { if (res == null) { if (town.flagsContainer.getValue(segment.getFlag()).equals(segment.getDenialValue())) { return false; } } else { if (!town.hasPermission(res, segment.getFlag(), segment.getDenialValue())) { res.protectionDenial(segment.getFlag(), MyTown.instance.LOCAL.getLocalization("mytown.notification.town.owners", town.residentsMap.getMayor() == null ? "SERVER ADMINS" : town.residentsMap.getMayor().getPlayerName())); return false; } } } } } } if (inWild) { if (res == null) { if (Wild.instance.flagsContainer.getValue(segment.getFlag()).equals(segment.getDenialValue())) { return false; } } else { if (!Wild.instance.hasPermission(res, segment.getFlag(), segment.getDenialValue())) { res.sendMessage(segment.getFlag().getLocalizedProtectionDenial()); return false; } } } return true; } public int getArea(Volume rangeBox) { return ((rangeBox.getMaxX() - rangeBox.getMinX()) + 1) * ((rangeBox.getMaxY() - rangeBox.getMinY()) + 1) * ((rangeBox.getMaxZ() - rangeBox.getMinZ()) + 1); } /** * Gets the flags which the type of TileEntity is checked against. */ public List<FlagType> getFlagsForTile(Class<? extends TileEntity> te) { List<FlagType> flags = new ArrayList<FlagType>(); for(SegmentTileEntity segment : segmentsTiles) { if(segment.getCheckClass().isAssignableFrom(te)) flags.add(segment.getFlag()); } return flags; } public EntityType getEntityType(Class<? extends Entity> entity) { for(SegmentEntity segment : segmentsEntities) { if (segment.getCheckClass().isAssignableFrom(entity)) { return segment.getType(); } } return null; } public boolean isEntityProtected(Class<? extends Entity> entity) { EntityType type = getEntityType(entity); return type != null && type == EntityType.PROTECT; } /** * Returns whether or not the entity should be checked each tick. */ public boolean isEntityTracked(Class<? extends Entity> entity) { EntityType type = getEntityType(entity); return type != null && (type == EntityType.TRACKED); } public boolean isEntityOwnable(Class<? extends Entity> entity) { for(SegmentEntity segment : segmentsEntities) { if(segment.getCheckClass().isAssignableFrom(entity) && segment.hasOwner()) return true; } return false; } public boolean isTileTracked(Class<? extends TileEntity> te) { for(SegmentTileEntity segment : segmentsTiles) { if(segment.getCheckClass().isAssignableFrom(te)) return true; } return false; } public boolean isBlockTracked(Class<? extends Block> block, int meta) { for(SegmentBlock segment : segmentsBlocks) { if(segment.getCheckClass().isAssignableFrom(block) &&( segment.getMeta() == -1 || segment.getMeta() == meta)) return true; } return false; } public boolean isTileEntityOwnable(Class<? extends TileEntity> te) { for(SegmentTileEntity segment : segmentsTiles) { if(segment.getCheckClass().isAssignableFrom(te) && segment.hasOwner()) return true; } return false; } public boolean canEntityTrespassPvp(Class<? extends Entity> entity) { for(SegmentEntity segment : segmentsEntities) { if(segment.getCheckClass().isAssignableFrom(entity) && segment.getType() == EntityType.PVP) { return true; } } return false; } /* ---- Protection instance utilities ---- */ private void disableSegment(Iterator<? extends Segment> it, Segment segment, String message) { it.remove(); MyTown.instance.LOG.error(message); MyTown.instance.LOG.error("Disabling segment for {} in protection {}.", segment.getCheckClass().getName(), this.modid); MyTown.instance.LOG.info("Reload protections to enable it again."); } private void disable() { Protections.instance.removeProtection(this); } public static MyTownDatasource getDatasource() { return DatasourceProxy.getDatasource(); } }
/* * Copyright (C) 2004-2011 John Currier * Copyright (C) 2018 Nils Petzaell * * This file is a part of the SchemaSpy project (http://schemaspy.org). * * SchemaSpy is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * SchemaSpy is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package org.schemaspy; import org.schemaspy.model.ForeignKeyConstraint; import org.schemaspy.model.Table; import java.util.*; /** * Sorts {@link Table}s by their referential integrity constraints. * The intent is to have a list of tables in an order that can be used * to insert or delete them from a database. * * @author John Currier * @author Nils Petzaell */ public class TableOrderer { /** * Returns a list of <code>Table</code>s ordered such that parents are listed first * and child tables are listed last. * * <code>recursiveConstraints</code> gets populated with <code>TableConstraint</code>s * that had to be removed to resolve the returned list. * @param tables Tables to order * @param recursiveConstraints gets populated with TableConstraints that had to be removed to resolve the returned list * @return Returns a list of <code>Table</code>s ordered such that parents are listed first and child tables are listed last. */ public List<Table> getTablesOrderedByRI(Collection<Table> tables, Collection<ForeignKeyConstraint> recursiveConstraints) { List<Table> heads = new ArrayList<>(); List<Table> tails = new ArrayList<>(); List<Table> remainingTables = new ArrayList<>(tables); List<Table> unattached = new ArrayList<>(); // first pass to gather the 'low hanging fruit' removeRemotesAndUnattached(remainingTables, unattached); unattached = sortTrimmedLevel(unattached); boolean prunedNonReals = false; while (!remainingTables.isEmpty()) { int tablesLeft = remainingTables.size(); tails.addAll(0, trimLeaves(remainingTables)); heads.addAll(trimRoots(remainingTables)); // if we could't trim anything then there's recursion.... // resolve it by removing a constraint, one by one, 'till the tables are all trimmed if (tablesLeft == remainingTables.size()) { if (!prunedNonReals) { // get ride of everything that isn't explicitly specified by the database for (Table table : remainingTables) { table.removeNonRealForeignKeys(); } prunedNonReals = true; continue; } boolean foundSimpleRecursion = removeSelfReferencingConstraints(remainingTables, recursiveConstraints); removeAForeignKeyConstraint(recursiveConstraints, remainingTables, foundSimpleRecursion); } } // we've gathered all the heads and tails, so combine them here moving 'unattached' tables to the end List<Table> ordered = new ArrayList<>(heads.size() + tails.size()); ordered.addAll(heads); ordered.addAll(tails); ordered.addAll(unattached); return ordered; } private static void removeRemotesAndUnattached(List<Table> remainingTables, List<Table> unattached) { for (Iterator<Table> iter = remainingTables.iterator(); iter.hasNext(); ) { Table table = iter.next(); if (table.isRemote()) { // ignore remote tables since there's no way to deal with them table.unlinkParents(); table.unlinkChildren(); iter.remove(); } else if (table.isLeaf() && table.isRoot()) { // floater, so add it to 'unattached' unattached.add(table); iter.remove(); } } } /** * Remove the leaf nodes (tables w/o children) * * @param tables tables to remove leafs from * @return tables removed */ private static List<Table> trimLeaves(List<Table> tables) { List<Table> leaves = new ArrayList<>(); Iterator<Table> iter = tables.iterator(); while (iter.hasNext()) { Table leaf = iter.next(); if (leaf.isLeaf()) { leaves.add(leaf); iter.remove(); } } // now sort them so the ones with large numbers of children show up first (not required, but cool) leaves = sortTrimmedLevel(leaves); iter = leaves.iterator(); while (iter.hasNext()) { // do this after the previous loop to prevent getting leaves before they're ready // and so we can sort them correctly iter.next().unlinkParents(); } return leaves; } /** * Remove the root nodes (tables w/o parents) * * @param tables to trim roots from * @return tables removed */ private static List<Table> trimRoots(List<Table> tables) { List<Table> roots = new ArrayList<>(); Iterator<Table> iter = tables.iterator(); while (iter.hasNext()) { Table root = iter.next(); if (root.isRoot()) { roots.add(root); iter.remove(); } } // now sort them so the ones with large numbers of children show up first (not required, but cool) roots = sortTrimmedLevel(roots); iter = roots.iterator(); while (iter.hasNext()) { // do this after the previous loop to prevent getting roots before they're ready // and so we can sort them correctly iter.next().unlinkChildren(); } return roots; } /** * this doesn't change the logical output of the program because all of these (leaves or roots) are at the same logical level */ private static List<Table> sortTrimmedLevel(List<Table> tables) { /* order by <ul> <li>number of kids (descending) <li>number of parents (ascending) <li>alpha name (ascending) </ul> */ final class TrimComparator implements Comparator<Table> { public int compare(Table table1, Table table2) { // have to keep track of and use the 'max' versions because // by the time we get here we'll (probably?) have no parents or children int rc = table2.getMaxChildren() - table1.getMaxChildren(); if (rc == 0) rc = table1.getMaxParents() - table2.getMaxParents(); if (rc == 0) rc = table1.compareTo(table2); return rc; } } Set<Table> sorter = new TreeSet<>(new TrimComparator()); sorter.addAll(tables); return new ArrayList<>(sorter); } private static boolean removeSelfReferencingConstraints(List<Table> remainingTables, Collection<ForeignKeyConstraint> recursiveConstraints) { boolean foundSimpleRecursion = false; for (Table potentialRecursiveTable : remainingTables) { ForeignKeyConstraint recursiveConstraint = potentialRecursiveTable.removeSelfReferencingConstraint(); if (recursiveConstraint != null) { recursiveConstraints.add(recursiveConstraint); foundSimpleRecursion = true; } } return foundSimpleRecursion; } private static void removeAForeignKeyConstraint(Collection<ForeignKeyConstraint> recursiveConstraints, List<Table> remainingTables, boolean foundSimpleRecursion) { if (!foundSimpleRecursion) { // expensive comparison, but we're down to the end of the tables so it shouldn't really matter Set<Table> byParentChildDelta = new TreeSet<>((t1, t2) -> { // sort on the delta between number of parents and kids so we can // target the tables with the biggest delta and therefore the most impact // on reducing the smaller of the two int rc = Math.abs(t2.getNumChildren() - t2.getNumParents()) - Math.abs(t1.getNumChildren() - t1.getNumParents()); if (rc == 0) rc = t1.compareTo(t2); return rc; }); byParentChildDelta.addAll(remainingTables); Table recursiveTable = byParentChildDelta.iterator().next(); // this one has the largest delta ForeignKeyConstraint removedConstraint = recursiveTable.removeAForeignKeyConstraint(); recursiveConstraints.add(removedConstraint); } } }
/* * RED5 Open Source Media Server - https://github.com/Red5/ * * Copyright 2006-2016 by respective authors (see below). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.red5.server.so; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.red5.io.object.Deserializer; import org.red5.io.object.Input; import org.red5.io.object.Output; import org.red5.io.object.Serializer; import org.red5.server.AttributeStore; import org.red5.server.api.IAttributeStore; import org.red5.server.api.Red5; import org.red5.server.api.event.IEventListener; import org.red5.server.api.persistence.IPersistable; import org.red5.server.api.persistence.IPersistenceStore; import org.red5.server.api.scope.ScopeType; import org.red5.server.api.statistics.ISharedObjectStatistics; import org.red5.server.api.statistics.support.StatisticsCounter; import org.red5.server.net.rtmp.RTMPConnection; import org.red5.server.net.rtmp.codec.RTMP; import org.red5.server.net.rtmp.message.Constants; import org.red5.server.so.ISharedObjectEvent.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents shared object on server-side. Shared Objects in Flash are like cookies that are stored on client side. In Red5 and Flash Media Server there's one more special type of SOs : remote Shared Objects. * * These are shared by multiple clients and synchronized between them automatically on each data change. This is done asynchronously, used as events handling and is widely used in multiplayer Flash online games. * * Shared object can be persistent or transient. The difference is that first are saved to the disk and can be accessed later on next connection, transient objects are not saved and get lost each time they last client disconnects from it. * * Shared Objects has name identifiers and path on server's HD (if persistent). On deeper level server-side Shared Object in this implementation actually uses IPersistenceStore to delegate all (de)serialization work. * * SOs store data as simple map, that is, "name-value" pairs. Each value in turn can be complex object or map. * * All access to methods that change properties in the SO must be properly synchronized for multi-threaded access. */ public class SharedObject extends AttributeStore implements ISharedObjectStatistics, IPersistable, Constants { protected static Logger log = LoggerFactory.getLogger(SharedObject.class); /** * Shared Object name (identifier) */ protected String name = ""; /** * SO path */ protected String path = ""; /** * true if the SharedObject was stored by the persistence framework and can be used later on reconnection */ protected boolean persistent; /** * Object that is delegated with all storage work for persistent SOs */ protected IPersistenceStore storage; /** * Version. Used on synchronization purposes. */ protected AtomicInteger version = new AtomicInteger(1); /** * Number of pending update operations */ protected AtomicInteger updateCounter = new AtomicInteger(); /** * Has changes? flag */ protected AtomicBoolean modified = new AtomicBoolean(); /** * Last modified timestamp */ protected long lastModified = -1; /** * Owner event */ protected SharedObjectMessage ownerMessage; /** * Synchronization events */ protected transient volatile ConcurrentLinkedQueue<ISharedObjectEvent> syncEvents = new ConcurrentLinkedQueue<ISharedObjectEvent>(); /** * Listeners */ protected transient volatile CopyOnWriteArraySet<IEventListener> listeners = new CopyOnWriteArraySet<IEventListener>(); /** * Event listener, actually RTMP connection */ protected IEventListener source; /** * Number of times the SO has been acquired */ protected AtomicInteger acquireCount = new AtomicInteger(); /** * Timestamp the scope was created. */ private long creationTime; /** * Manages listener statistics. */ protected transient StatisticsCounter listenerStats = new StatisticsCounter(); /** * Counts number of "change" events. */ protected AtomicInteger changeStats = new AtomicInteger(); /** * Counts number of "delete" events. */ protected AtomicInteger deleteStats = new AtomicInteger(); /** * Counts number of "send message" events. */ protected AtomicInteger sendStats = new AtomicInteger(); /** * Whether or not this shared object is closed */ protected AtomicBoolean closed = new AtomicBoolean(false); /** Constructs a new SharedObject. */ public SharedObject() { // This is used by the persistence framework super(); ownerMessage = new SharedObjectMessage(null, null, -1, false); creationTime = System.currentTimeMillis(); } /** * Constructs new SO from Input object * * @param input * Input source * @throws IOException * I/O exception * * @see org.red5.io.object.Input */ public SharedObject(Input input) throws IOException { this(); deserialize(input); } /** * Creates new SO from given data map, name, path and persistence option * * @param name * SO name * @param path * SO path * @param persistent * SO persistence */ public SharedObject(String name, String path, boolean persistent) { super(); this.name = name; this.path = path; this.persistent = persistent; ownerMessage = new SharedObjectMessage(null, name, 0, persistent); creationTime = System.currentTimeMillis(); } /** * Creates new SO from given data map, name, path, storage object and persistence option * * @param name * SO name * @param path * SO path * @param persistent * SO persistence * @param storage * Persistence storage */ public SharedObject(String name, String path, boolean persistent, IPersistenceStore storage) { this(name, path, persistent); setStore(storage); } /** * Creates new SO from given data map, name, path and persistence option * * @param data * Data * @param name * SO name * @param path * SO path * @param persistent * SO persistence */ public SharedObject(Map<String, Object> data, String name, String path, boolean persistent) { this(name, path, persistent); attributes.putAll(data); } /** * Creates new SO from given data map, name, path, storage object and persistence option * * @param data * Data * @param name * SO name * @param path * SO path * @param persistent * SO persistence * @param storage * Persistence storage */ public SharedObject(Map<String, Object> data, String name, String path, boolean persistent, IPersistenceStore storage) { this(data, name, path, persistent); setStore(storage); } /** {@inheritDoc} */ public String getName() { return name; } /** {@inheritDoc} */ public void setName(String name) { throw new UnsupportedOperationException(String.format("Name change not supported; current name: %s", getName())); } /** {@inheritDoc} */ public String getPath() { return path; } /** {@inheritDoc} */ public void setPath(String path) { this.path = path; } /** {@inheritDoc} */ public String getType() { return ScopeType.SHARED_OBJECT.toString(); } /** {@inheritDoc} */ public long getLastModified() { return lastModified; } /** {@inheritDoc} */ public boolean isPersistent() { return persistent; } /** {@inheritDoc} */ public void setPersistent(boolean persistent) { log.debug("setPersistent: {}", persistent); this.persistent = persistent; } /** * Send update notification over data channel of RTMP connection */ protected void sendUpdates() { log.debug("sendUpdates"); // get the current version final int currentVersion = getVersion(); log.debug("Current version: {}", currentVersion); // get the name final String name = getName(); //get owner events ConcurrentLinkedQueue<ISharedObjectEvent> ownerEvents = ownerMessage.getEvents(); if (!ownerEvents.isEmpty()) { // get all current owner events final ConcurrentLinkedQueue<ISharedObjectEvent> events = new ConcurrentLinkedQueue<ISharedObjectEvent>(); if (ownerEvents.size() > SharedObjectService.MAXIMUM_EVENTS_PER_UPDATE) { log.debug("Owner events exceed max: {}", ownerEvents.size()); for (int i = 0; i < SharedObjectService.MAXIMUM_EVENTS_PER_UPDATE; i++) { events.add(ownerEvents.poll()); } } else { events.addAll(ownerEvents); ownerEvents.removeAll(events); } // send update to "owner" of this update request if (source != null) { final RTMPConnection con = (RTMPConnection) source; // create a worker SharedObjectService.submitTask(new Runnable() { public void run() { Red5.setConnectionLocal(con); con.sendSharedObjectMessage(name, currentVersion, persistent, events); Red5.setConnectionLocal(null); } }); } } else if (log.isTraceEnabled()) { log.trace("No owner events to send"); } // tell all the listeners if (!syncEvents.isEmpty()) { // get all current sync events final ConcurrentLinkedQueue<ISharedObjectEvent> events = new ConcurrentLinkedQueue<ISharedObjectEvent>(); if (syncEvents.size() > SharedObjectService.MAXIMUM_EVENTS_PER_UPDATE) { log.debug("Sync events exceed max: {}", syncEvents.size()); for (int i = 0; i < SharedObjectService.MAXIMUM_EVENTS_PER_UPDATE; i++) { events.add(syncEvents.poll()); } } else { events.addAll(syncEvents); syncEvents.removeAll(events); } // get the listeners Set<IEventListener> listeners = getListeners(); if (log.isDebugEnabled()) { log.debug("Listeners: {}", listeners); } // updates all registered clients of this shared object for (IEventListener listener : listeners) { if (listener != source) { if (listener instanceof RTMPConnection) { final RTMPConnection con = (RTMPConnection) listener; if (con.getStateCode() == RTMP.STATE_CONNECTED) { // create a worker SharedObjectService.submitTask(new Runnable() { public void run() { Red5.setConnectionLocal(con); con.sendSharedObjectMessage(name, currentVersion, persistent, events); Red5.setConnectionLocal(null); } }); } else { log.debug("Skipping unconnected connection"); } } else { log.warn("Can't send sync message to unknown connection {}", listener); } } else { // don't re-send update to active client log.debug("Skipped {}", source); } } } else if (log.isTraceEnabled()) { log.trace("No sync events to send"); } } /** * Send notification about modification of SO */ protected void notifyModified() { log.debug("notifyModified - modified: {} update counter: {}", modified.get(), updateCounter.get()); if (updateCounter.get() == 0) { if (modified.get()) { // client sent at least one update -> increase version of SO updateVersion(); lastModified = System.currentTimeMillis(); if (storage == null || !storage.save(this)) { log.warn("Could not store shared object"); } } sendUpdates(); modified.compareAndSet(true, false); } } /** * Return an error message to the client. * * @param message * message */ protected void returnError(String message) { ownerMessage.addEvent(Type.CLIENT_STATUS, "error", message); } /** * Return an attribute value to the owner. * * @param name * name */ protected void returnAttributeValue(String name) { ownerMessage.addEvent(Type.CLIENT_UPDATE_DATA, name, getAttribute(name)); } /** * Return attribute by name and set if it doesn't exist yet. * * @param name * Attribute name * @param value * Value to set if attribute doesn't exist * @return Attribute value */ @Override public Object getAttribute(String name, Object value) { log.debug("getAttribute - name: {} value: {}", name, value); Object result = null; if (name != null) { result = attributes.putIfAbsent(name, value); if (result == null) { // no previous value modified.set(true); ownerMessage.addEvent(Type.CLIENT_UPDATE_DATA, name, value); syncEvents.add(new SharedObjectEvent(Type.CLIENT_UPDATE_DATA, name, value)); notifyModified(); changeStats.incrementAndGet(); result = value; } } return result; } /** {@inheritDoc} */ @Override public boolean setAttribute(String name, Object value) { log.debug("setAttribute - name: {} value: {}", name, value); boolean result = true; ownerMessage.addEvent(Type.CLIENT_UPDATE_ATTRIBUTE, name, null); if (value == null && super.removeAttribute(name)) { // Setting a null value removes the attribute modified.set(true); syncEvents.add(new SharedObjectEvent(Type.CLIENT_DELETE_DATA, name, null)); deleteStats.incrementAndGet(); } else if (value != null) { boolean setAttr = super.setAttribute(name, value); log.debug("Set attribute?: {} modified: {}", setAttr, modified.get()); // only sync if the attribute changed modified.set(true); syncEvents.add(new SharedObjectEvent(Type.CLIENT_UPDATE_DATA, name, value)); changeStats.incrementAndGet(); } else { result = false; } notifyModified(); return result; } /** {@inheritDoc} */ @Override public boolean setAttributes(Map<String, Object> values) { int successes = 0; if (values != null) { beginUpdate(); try { for (Map.Entry<String, Object> entry : values.entrySet()) { if (setAttribute(entry.getKey(), entry.getValue())) { successes++; } } } finally { endUpdate(); } } // expect every value to have been added return (successes == values.size()); } /** {@inheritDoc} */ @Override public boolean setAttributes(IAttributeStore values) { if (values != null) { return setAttributes(values.getAttributes()); } return false; } /** * Removes attribute with given name * * @param name * Attribute * @return <pre> * true * </pre> * * if there's such an attribute and it was removed, * * <pre> * false * </pre> * * otherwise */ @Override public boolean removeAttribute(String name) { boolean result = true; // Send confirmation to client ownerMessage.addEvent(Type.CLIENT_DELETE_DATA, name, null); if (super.removeAttribute(name)) { modified.set(true); syncEvents.add(new SharedObjectEvent(Type.CLIENT_DELETE_DATA, name, null)); deleteStats.incrementAndGet(); } else { result = false; } notifyModified(); return result; } /** * Broadcast event to event handler * * @param handler * Event handler * @param arguments * Arguments */ protected void sendMessage(String handler, List<?> arguments) { if (ownerMessage.addEvent(Type.CLIENT_SEND_MESSAGE, handler, arguments)) { syncEvents.add(new SharedObjectEvent(Type.CLIENT_SEND_MESSAGE, handler, arguments)); sendStats.incrementAndGet(); if (log.isTraceEnabled()) { log.trace("Send message: {}", arguments); } } } /** * Getter for data. * * @return SO data as unmodifiable map */ public Map<String, Object> getData() { return getAttributes(); } /** * Getter for version. * * @return SO version. */ public int getVersion() { return version.get(); } /** * Increases version by one */ private void updateVersion() { version.incrementAndGet(); } /** * Remove all attributes (clear Shared Object) */ @Override public void removeAttributes() { // TODO: there must be a direct way to clear the SO on the client side... Set<String> names = getAttributeNames(); for (String key : names) { ownerMessage.addEvent(Type.CLIENT_DELETE_DATA, key, null); syncEvents.add(new SharedObjectEvent(Type.CLIENT_DELETE_DATA, key, null)); } deleteStats.addAndGet(names.size()); // clear data super.removeAttributes(); // mark as modified modified.set(true); // broadcast 'modified' event notifyModified(); } /** * Register event listener * * @param listener * Event listener * @return true if listener was added */ protected boolean register(IEventListener listener) { log.debug("register - listener: {}", listener); boolean registered = listeners.add(listener); if (registered) { listenerStats.increment(); // prepare response for new client ownerMessage.addEvent(Type.CLIENT_INITIAL_DATA, null, null); if (!isPersistent()) { ownerMessage.addEvent(Type.CLIENT_CLEAR_DATA, null, null); } if (!attributes.isEmpty()) { ownerMessage.addEvent(new SharedObjectEvent(Type.CLIENT_UPDATE_DATA, null, getAttributes())); } // we call notifyModified here to send response if we're not in a beginUpdate block notifyModified(); } return registered; } /** * Unregister event listener * * @param listener * Event listener */ protected void unregister(IEventListener listener) { log.debug("unregister - listener: {}", listener); listeners.remove(listener); listenerStats.decrement(); } /** * Check if shared object must be released. */ protected void checkRelease() { if (!isPersistent() && listeners.isEmpty() && !isAcquired()) { log.info("Deleting shared object {} because all clients disconnected and it is no longer acquired.", name); if (storage != null) { if (!storage.remove(this)) { log.error("Could not remove shared object"); } } close(); } } /** * Get event listeners. * * @return Value for property 'listeners'. */ public Set<IEventListener> getListeners() { return Collections.unmodifiableSet(listeners); } /** * Begin update of this Shared Object. Increases number of pending update operations */ protected void beginUpdate() { log.debug("beginUpdate"); beginUpdate(source); } /** * Begin update of this Shared Object and setting listener * * @param listener * Update with listener */ protected void beginUpdate(IEventListener listener) { log.debug("beginUpdate - listener: {}", listener); source = listener; // increase number of pending updates updateCounter.incrementAndGet(); } /** * End update of this Shared Object. Decreases number of pending update operations and broadcasts modified event if it is equal to zero (i.e. no more pending update operations). */ protected void endUpdate() { log.debug("endUpdate"); // decrease number of pending updates if (updateCounter.decrementAndGet() == 0) { notifyModified(); source = null; } } /** {@inheritDoc} */ public void serialize(Output output) throws IOException { log.debug("serialize - name: {}", name); Serializer.serialize(output, getName()); Map<String, Object> map = getAttributes(); if (log.isTraceEnabled()) { log.trace("Attributes: {}", map); } Serializer.serialize(output, map); } /** {@inheritDoc} */ @SuppressWarnings({ "unchecked", "rawtypes" }) public void deserialize(Input input) throws IOException { log.debug("deserialize"); name = Deserializer.deserialize(input, String.class); log.trace("Name: {}", name); persistent = true; Map<String, Object> map = Deserializer.<Map> deserialize(input, Map.class); if (log.isTraceEnabled()) { log.trace("Attributes: {}", map); } super.setAttributes(map); ownerMessage.setName(name); ownerMessage.setPersistent(persistent); } /** {@inheritDoc} */ public void setStore(IPersistenceStore store) { this.storage = store; } /** {@inheritDoc} */ public IPersistenceStore getStore() { return storage; } /** * Deletes all the attributes and sends a clear event to all listeners. The persistent data object is also removed from a persistent shared object. * * @return <pre> * true * </pre> * * on success, * * <pre> * false * </pre> * * otherwise */ protected boolean clear() { log.debug("clear"); super.removeAttributes(); // send confirmation to client ownerMessage.addEvent(Type.CLIENT_CLEAR_DATA, name, null); notifyModified(); changeStats.incrementAndGet(); return true; } /** * Detaches a reference from this shared object, reset it's state, this will destroy the reference immediately. This is useful when you don't want to proxy a shared object any longer. */ protected void close() { log.debug("close"); closed.compareAndSet(false, true); // clear collections super.removeAttributes(); listeners.clear(); syncEvents.clear(); ownerMessage.getEvents().clear(); } /** * Prevent shared object from being released. Each call to * * <pre> * acquire * </pre> * * must be paired with a call to * * <pre> * release * </pre> * * so the SO isn't held forever. This is only valid for non-persistent SOs. */ public void acquire() { log.debug("acquire"); acquireCount.incrementAndGet(); } /** * Check if shared object currently is acquired. * * @return <pre> * true * </pre> * * if the SO is acquired, otherwise * * <pre> * false * </pre> */ public boolean isAcquired() { return acquireCount.get() > 0; } /** * Release previously acquired shared object. If the SO is non-persistent, no more clients are connected the SO isn't acquired any more, the data is released. */ public void release() { log.debug("release"); if (acquireCount.get() == 0) { throw new RuntimeException("The shared object was not acquired before."); } if (acquireCount.decrementAndGet() == 0) { checkRelease(); } } public boolean isClosed() { return closed.get(); } /** {@inheritDoc} */ public long getCreationTime() { return creationTime; } /** {@inheritDoc} */ public int getTotalListeners() { return listenerStats.getTotal(); } /** {@inheritDoc} */ public int getMaxListeners() { return listenerStats.getMax(); } /** {@inheritDoc} */ public int getActiveListeners() { return listenerStats.getCurrent(); } /** {@inheritDoc} */ public int getTotalChanges() { return changeStats.intValue(); } /** {@inheritDoc} */ public int getTotalDeletes() { return deleteStats.intValue(); } /** {@inheritDoc} */ public int getTotalSends() { return sendStats.intValue(); } /** * Sets a modified or dirty property on this object to indicate whether or not a modification has been made. * * @param dirty * true if modified and false otherwise */ public void setDirty(boolean dirty) { log.trace("setDirty: {}", dirty); modified.set(dirty); } }
/** * $URL$ * $Id$ * * Copyright (c) 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.basiclti.impl; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.Properties; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.ServletOutputStream; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.imsglobal.basiclti.BasicLTIUtil; import org.sakaiproject.authz.cover.SecurityService; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.EntityAccessOverloadException; import org.sakaiproject.entity.api.EntityCopyrightException; import org.sakaiproject.entity.cover.EntityManager; import org.sakaiproject.entity.api.EntityNotDefinedException; import org.sakaiproject.entity.api.EntityPermissionException; import org.sakaiproject.entity.api.EntityProducer; import org.sakaiproject.entity.api.HttpAccess; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.tool.cover.SessionManager; import org.sakaiproject.tool.api.Session; import org.sakaiproject.tool.cover.ToolManager; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.cover.SiteService; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.util.StringUtil; import org.sakaiproject.util.FormattedText; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.event.api.Event; import org.sakaiproject.event.api.NotificationService; import org.sakaiproject.lti.api.LTIService; //import org.sakaiproject.event.cover.EventTrackingService; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.util.Validator; import org.sakaiproject.util.Web; import org.sakaiproject.site.api.SitePage; import org.sakaiproject.site.api.ToolConfiguration; import org.sakaiproject.util.foorm.SakaiFoorm; import org.sakaiproject.basiclti.LocalEventTrackingService; import org.sakaiproject.basiclti.util.SakaiBLTIUtil; import org.sakaiproject.basiclti.impl.BasicLTIArchiveBean; @SuppressWarnings("deprecation") public class BasicLTISecurityServiceImpl implements EntityProducer { public static final String SERVICE_NAME = BasicLTISecurityServiceImpl.class.getName(); private static ResourceLoader rb = new ResourceLoader("basicltisvc"); public static final String MIME_TYPE_BLTI="ims/basiclti"; public static final String REFERENCE_ROOT="/basiclti"; public static final String APPLICATION_ID = "sakai:basiclti"; public static final String TOOL_REGISTRATION = "sakai.basiclti"; public static final String EVENT_BASICLTI_LAUNCH = "basiclti.launch"; protected static SakaiFoorm foorm = new SakaiFoorm(); // Note: security needs a proper Resource reference /******************************************************************************* * Dependencies and their setter methods *******************************************************************************/ /** Dependency: a logger component. */ private Log logger = LogFactory.getLog(BasicLTISecurityServiceImpl.class); /** * Check security for this entity. * * @param ref * The Reference to the entity. * @return true if allowed, false if not. */ protected boolean checkSecurity(Reference ref) { String contextId = ref.getContext(); try { Site site = SiteService.getSiteVisit(contextId); if ( site != null ) return true; } catch(IdUnusedException ex) { return false; } catch(PermissionException ex) { return false; } // System.out.println("ID="+ref.getId()); // System.out.println("Type="+ref.getType()); // System.out.println("SubType="+ref.getSubType()); return false; } /******************************************************************************* * Init and Destroy *******************************************************************************/ /** A service */ protected static LTIService ltiService = null; /** * Final initialization, once all dependencies are set. */ public void init() { logger.info(this +".init()"); if (ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_ENCRYPTION_KEY, null) == null) { logger.warn("BasicLTI secrets in database unencrypted, please set "+ SakaiBLTIUtil.BASICLTI_ENCRYPTION_KEY); } try { // register as an entity producer EntityManager.registerEntityProducer(this,REFERENCE_ROOT); } catch (Throwable t) { logger.warn("init(): ", t); } if ( ltiService == null ) ltiService = (LTIService) ComponentManager.get("org.sakaiproject.lti.api.LTIService"); } /** * Final cleanup. */ public void destroy() { logger.info(this +".destroy()"); } /** * */ public BasicLTISecurityServiceImpl() { super(); } public boolean isSuperUser(String userId) { return SecurityService.isSuperUser(userId); } /******************************************************************************************************************************* * EntityProducer ******************************************************************************************************************************/ /** * {@inheritDoc} /access/basiclti/site/12-siteid-456/98-placement-id /access/basiclti/content/ --- content path ---- (Future) */ public boolean parseEntityReference(String reference, Reference ref) { if (reference.startsWith(REFERENCE_ROOT)) { // we will get null, simplelti, site, <context>, <placement> // we will store the context, and the ContentHosting reference in our id field. String id = null; String context = null; String[] parts = StringUtil.split(reference, Entity.SEPARATOR); if ( parts.length == 5 && parts[2].equals("site") ) { context = parts[3]; id = parts[4]; //Should the slashes below be entityseparator // id = "/" + StringUtil.unsplit(parts, 2, parts.length - 2, "/"); } ref.set(APPLICATION_ID, "site", id, null, context); return true; } return false; } private void sendHTMLPage(HttpServletResponse res, String body) { try { res.setContentType("text/html; charset=UTF-8"); res.setCharacterEncoding("utf-8"); res.addDateHeader("Expires", System.currentTimeMillis() - (1000L * 60L * 60L * 24L * 365L)); res.addDateHeader("Last-Modified", System.currentTimeMillis()); res.addHeader("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0, post-check=0, pre-check=0"); res.addHeader("Pragma", "no-cache"); java.io.PrintWriter out = res.getWriter(); out.println("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">"); out.println("<html xmlns=\"http://www.w3.org/1999/xhtml\" lang=\"en\" xml:lang=\"en\">"); out.println("<html>\n<head>"); out.println("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\" />"); out.println("</head>\n<body>\n"); out.println(body); out.println("\n</body>\n</html>"); } catch (Exception e) { e.printStackTrace(); } } private void doSplash(HttpServletRequest req, HttpServletResponse res, String splash, ResourceLoader rb) { // req.getRequestURL()=http://localhost:8080/access/basiclti/site/85fd092b-1755-4aa9-8abc-e6549527dce0/content:0 // req.getRequestURI()=/access/basiclti/site/85fd092b-1755-4aa9-8abc-e6549527dce0/content:0 String acceptPath = req.getRequestURI().toString() + "?splash=bypass"; String body = "<div align=\"center\" style=\"text-align:left;width:80%;margin-top:5px;margin-left:auto;margin-right:auto;border-width:1px 1px 1px 1px;border-style:solid;border-color: gray;padding:.5em;font-family:Verdana,Arial,Helvetica,sans-serif;font-size:.8em\">"; body += splash+"</div><p>"; String txt = rb.getString("launch.button", "Press to continue to external tool."); body += "<form><input type=\"submit\" onclick=\"window.location='"+acceptPath+"';return false;\" value=\""; body += rb.getString("launch.button", "Press to continue to proceed to external tool."); body += "\"></form></p>\n"; sendHTMLPage(res, body); } /** * {@inheritDoc} */ public HttpAccess getHttpAccess() { return new HttpAccess() { @SuppressWarnings("unchecked") public void handleAccess(HttpServletRequest req, HttpServletResponse res, Reference ref, Collection copyrightAcceptedRefs) throws EntityPermissionException, EntityNotDefinedException, EntityAccessOverloadException, EntityCopyrightException { // decide on security if (!checkSecurity(ref)) { throw new EntityPermissionException(SessionManager.getCurrentSessionUserId(), "basiclti", ref.getReference()); } String refId = ref.getId(); String [] retval = null; if ( refId.startsWith("deploy:") && refId.length() > 7 ) { if ("!admin".equals(ref.getContext()) ) { throw new EntityPermissionException(SessionManager.getCurrentSessionUserId(), "basiclti", ref.getReference()); } Map<String,Object> deploy = null; String deployStr = refId.substring(7); Long deployKey = foorm.getLongKey(deployStr); if ( deployKey >= 0 ) deploy = ltiService.getDeployDao(deployKey); String placementId = req.getParameter("placement"); // System.out.println("deployStr="+deployStr+" deployKey="+deployKey+" placementId="+placementId); // System.out.println(deploy); Long reg_state = foorm.getLongKey(deploy.get(LTIService.LTI_REG_STATE)); if ( reg_state == 0 ) { retval = SakaiBLTIUtil.postRegisterHTML(deployKey, deploy, rb, placementId); } else { retval = SakaiBLTIUtil.postReRegisterHTML(deployKey, deploy, rb, placementId); } } else if ( refId.startsWith("content:") && refId.length() > 8 ) { Map<String,Object> content = null; Map<String,Object> tool = null; String contentStr = refId.substring(8); Long contentKey = foorm.getLongKey(contentStr); if ( contentKey >= 0 ) { content = ltiService.getContentDao(contentKey,ref.getContext()); if ( content != null ) { String siteId = (String) content.get(LTIService.LTI_SITE_ID); if ( siteId == null || ! siteId.equals(ref.getContext()) ) { content = null; } } if ( content != null ) { Long toolKey = foorm.getLongKey(content.get(LTIService.LTI_TOOL_ID)); if ( toolKey >= 0 ) tool = ltiService.getToolDao(toolKey, ref.getContext()); if ( tool != null ) { // SITE_ID can be null for the tool String siteId = (String) tool.get(LTIService.LTI_SITE_ID); if ( siteId != null && ! siteId.equals(ref.getContext()) ) { tool = null; } } } // Adjust the content items based on the tool items if ( tool != null || content != null ) { ltiService.filterContent(content, tool); } } String splash = null; if ( tool != null ) splash = (String) tool.get("splash"); String splashParm = req.getParameter("splash"); String siteId = null; if ( tool != null ) siteId = (String) tool.get(LTIService.LTI_SITE_ID); if ( splashParm == null && splash != null && splash.trim().length() > 1 ) { // XSS Note: Administrator-created tools can put HTML in the splash. if ( siteId != null ) splash = FormattedText.escapeHtml(splash,false); doSplash(req, res, splash, rb); return; } retval = SakaiBLTIUtil.postLaunchHTML(content, tool, ltiService, rb); } else { String splashParm = req.getParameter("splash"); if ( splashParm == null ) { ToolConfiguration placement = SiteService.findTool(refId); Properties config = placement == null ? null : placement.getConfig(); if ( placement != null ) { // XSS Note: Only the Administrator can set overridesplash - so we allow HTML String splash = SakaiBLTIUtil.toNull(SakaiBLTIUtil.getCorrectProperty(config,"overridesplash", placement)); if ( splash == null ) { // This may be user-set so no HTML splash = SakaiBLTIUtil.toNull(SakaiBLTIUtil.getCorrectProperty(config,"splash", placement)); if ( splash != null ) splash = FormattedText.escapeHtml(splash,false); } // XSS Note: Only the Administrator can set defaultsplash - so we allow HTML if ( splash == null ) { splash = SakaiBLTIUtil.toNull(SakaiBLTIUtil.getCorrectProperty(config,"defaultsplash", placement)); } if ( splash != null && splash.trim().length() > 1 ) { doSplash(req, res, splash, rb); return; } } } // Get the post data for the placement retval = SakaiBLTIUtil.postLaunchHTML(refId, rb); } try { sendHTMLPage(res, retval[0]); String refstring = ref.getReference(); if ( retval.length > 1 ) refstring = retval[1]; // Cool 2.6 Event call Event event = LocalEventTrackingService.newEvent(EVENT_BASICLTI_LAUNCH, refstring, ref.getContext(), false, NotificationService.NOTI_OPTIONAL); // 2.5 Event call // Event event = EventTrackingService.newEvent(EVENT_BASICLTI_LAUNCH, refstring, false); // SAK-24069 - Extend Sakai session lifetime on LTI tool launch Session session = SessionManager.getCurrentSession(); if (session !=null) { int seconds = ServerConfigurationService.getInt(SakaiBLTIUtil.BASICLTI_LAUNCH_SESSION_TIMEOUT, 10800); if ( seconds != 0 ) session.setMaxInactiveInterval(seconds); } LocalEventTrackingService.post(event); } catch (Exception e) { e.printStackTrace(); } } }; } /** * {@inheritDoc} */ public Entity getEntity(Reference ref) { return null; } /** * {@inheritDoc} */ public Collection<String> getEntityAuthzGroups(Reference ref, String userId) { // Since we handle security ourself, we won't support anyone else asking return null; } /** * {@inheritDoc} */ public String getEntityDescription(Reference ref) { return null; } /** * {@inheritDoc} */ public ResourceProperties getEntityResourceProperties(Reference ref) { return null; } /** * {@inheritDoc} */ public String getEntityUrl(Reference ref) { return ServerConfigurationService.getAccessUrl() + ref.getReference(); } /** * {@inheritDoc} */ public String getLabel() { return "basiclti"; } public boolean willArchiveMerge() { return true; } @SuppressWarnings("unchecked") public String merge(String siteId, Element root, String archivePath, String fromSiteId, Map attachmentNames, Map userIdTrans, Set userListAllowImport) { StringBuilder results = new StringBuilder("Merging BasicLTI "); org.w3c.dom.NodeList nodeList = root.getElementsByTagName("basicLTI"); try { Site site = SiteService.getSite(siteId); for(int i=0; i < nodeList.getLength(); i++) { BasicLTIArchiveBean basicLTI = new BasicLTIArchiveBean(nodeList.item(i)); logger.info("BASIC LTI: " + basicLTI); results.append(", merging basicLTI tool " + basicLTI.getPageTitle()); SitePage sitePage = site.addPage(); sitePage.setTitle(basicLTI.getPageTitle()); // This property affects both the Tool and SitePage. sitePage.setTitleCustom(true); ToolConfiguration toolConfiguration = sitePage.addTool(); toolConfiguration.setTool(TOOL_REGISTRATION, ToolManager.getTool(TOOL_REGISTRATION)); toolConfiguration.setTitle(basicLTI.getToolTitle()); for(Object key: basicLTI.getSiteToolProperties().keySet()) { toolConfiguration.getPlacementConfig().setProperty((String)key, (String)basicLTI.getSiteToolProperties().get(key)); } SiteService.save(site); } } catch (IdUnusedException ie) { // This would be thrown by SiteService.getSite(siteId) ie.printStackTrace(); } catch (PermissionException pe) { // This would be thrown by SiteService.save(site) pe.printStackTrace(); } catch (Exception e) { // This is a generic exception that would be thrown by the BasicLTIArchiveBean constructor. e.printStackTrace(); } results.append("."); return results.toString(); } @SuppressWarnings("unchecked") public String archive(String siteId, Document doc, Stack stack, String archivePath, List attachments) { logger.info("-------basic-lti-------- archive('" + StringUtils.join(new Object[] { siteId, doc, stack, archivePath, attachments }, "','") + "')"); StringBuilder results = new StringBuilder("archiving basiclti "+siteId+"\n"); int count = 0; try { Site site = SiteService.getSite(siteId); logger.info("SITE: " + site.getId() + " : " + site.getTitle()); Element basicLtiList = doc.createElement("org.sakaiproject.basiclti.service.BasicLTISecurityService"); for (SitePage sitePage : site.getPages()) { for (ToolConfiguration toolConfiguration : sitePage.getTools()) { if ( toolConfiguration.getTool() == null ) continue; if (toolConfiguration.getTool().getId().equals( TOOL_REGISTRATION)) { // results.append(" tool=" + toolConfiguration.getId() + "\n"); count++; BasicLTIArchiveBean basicLTIArchiveBean = new BasicLTIArchiveBean(); basicLTIArchiveBean.setPageTitle(sitePage.getTitle()); basicLTIArchiveBean.setToolTitle(toolConfiguration.getTitle()); basicLTIArchiveBean.setSiteToolProperties(toolConfiguration.getConfig()); Node newNode = basicLTIArchiveBean.toNode(doc); basicLtiList.appendChild(newNode); } } } ((Element) stack.peek()).appendChild(basicLtiList); stack.push(basicLtiList); stack.pop(); } catch (IdUnusedException iue) { logger.info("SITE ID " + siteId + " DOES NOT EXIST."); results.append("Basic LTI Site does not exist\n"); } // Something we did not expect catch (Exception e) { e.printStackTrace(); results.append("basiclti exception:"+e.getClass().getName()+"\n"); } results.append("archiving basiclti ("+count+") tools archived\n"); return results.toString(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.ExceededMemoryLimitException; import com.facebook.presto.RowPagesBuilder; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.metadata.Signature; import com.facebook.presto.operator.HashAggregationOperator.HashAggregationOperatorFactory; import com.facebook.presto.operator.aggregation.InternalAggregationFunction; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.block.BlockBuilderStatus; import com.facebook.presto.spi.block.PageBuilderStatus; import com.facebook.presto.spi.type.StandardTypes; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.planner.plan.AggregationNode.Step; import com.facebook.presto.testing.MaterializedResult; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Ints; import io.airlift.slice.Slices; import io.airlift.units.DataSize; import io.airlift.units.DataSize.Unit; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutorService; import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder; import static com.facebook.presto.SessionTestUtils.TEST_SESSION; import static com.facebook.presto.metadata.FunctionType.AGGREGATE; import static com.facebook.presto.operator.OperatorAssertion.assertOperatorEqualsIgnoreOrder; import static com.facebook.presto.operator.OperatorAssertion.dropChannel; import static com.facebook.presto.operator.OperatorAssertion.toMaterializedResult; import static com.facebook.presto.operator.OperatorAssertion.toPages; import static com.facebook.presto.operator.OperatorAssertion.without; import static com.facebook.presto.operator.aggregation.AverageAggregations.LONG_AVERAGE; import static com.facebook.presto.operator.aggregation.CountAggregation.COUNT; import static com.facebook.presto.operator.aggregation.LongSumAggregation.LONG_SUM; import static com.facebook.presto.spi.block.BlockBuilderStatus.DEFAULT_MAX_BLOCK_SIZE_IN_BYTES; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.testing.MaterializedResult.resultBuilder; import static com.facebook.presto.testing.TestingTaskContext.createTaskContext; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.slice.SizeOf.SIZE_OF_DOUBLE; import static io.airlift.slice.SizeOf.SIZE_OF_LONG; import static io.airlift.testing.Assertions.assertEqualsIgnoreOrder; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static java.util.concurrent.Executors.newCachedThreadPool; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @Test(singleThreaded = true) public class TestHashAggregationOperator { private ExecutorService executor; private DriverContext driverContext; @BeforeMethod public void setUp() { executor = newCachedThreadPool(daemonThreadsNamed("test-%s")); driverContext = createTaskContext(executor, TEST_SESSION) .addPipelineContext(true, true) .addDriverContext(); } @DataProvider(name = "hashEnabledValues") public static Object[][] hashEnabledValuesProvider() { return new Object[][] { { true }, { false } }; } @AfterMethod public void tearDown() { executor.shutdownNow(); } @Test(dataProvider = "hashEnabledValues") public void testHashAggregation(boolean hashEnabled) throws Exception { MetadataManager metadata = MetadataManager.createTestMetadataManager(); InternalAggregationFunction countVarcharColumn = metadata.getFunctionRegistry().getAggregateFunctionImplementation(new Signature("count", AGGREGATE, StandardTypes.BIGINT, StandardTypes.VARCHAR)); InternalAggregationFunction countBooleanColumn = metadata.getFunctionRegistry().getAggregateFunctionImplementation(new Signature("count", AGGREGATE, StandardTypes.BIGINT, StandardTypes.BOOLEAN)); InternalAggregationFunction maxVarcharColumn = metadata.getFunctionRegistry().getAggregateFunctionImplementation(new Signature("max", AGGREGATE, StandardTypes.VARCHAR, StandardTypes.VARCHAR)); List<Integer> hashChannels = Ints.asList(1); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR, VARCHAR, VARCHAR, BIGINT, BOOLEAN); List<Page> input = rowPagesBuilder .addSequencePage(10, 100, 0, 100, 0, 500) .addSequencePage(10, 100, 0, 200, 0, 500) .addSequencePage(10, 100, 0, 300, 0, 500) .build(); HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory( 0, ImmutableList.of(VARCHAR), hashChannels, Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0), LONG_SUM.bind(ImmutableList.of(3), Optional.empty(), Optional.empty(), 1.0), LONG_AVERAGE.bind(ImmutableList.of(3), Optional.empty(), Optional.empty(), 1.0), maxVarcharColumn.bind(ImmutableList.of(2), Optional.empty(), Optional.empty(), 1.0), countVarcharColumn.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0), countBooleanColumn.bind(ImmutableList.of(4), Optional.empty(), Optional.empty(), 1.0)), Optional.<Integer>empty(), rowPagesBuilder.getHashChannel(), 100_000, new DataSize(16, MEGABYTE)); Operator operator = operatorFactory.createOperator(driverContext); MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, BIGINT, DOUBLE, VARCHAR, BIGINT, BIGINT) .row("0", 3, 0, 0.0, "300", 3, 3) .row("1", 3, 3, 1.0, "301", 3, 3) .row("2", 3, 6, 2.0, "302", 3, 3) .row("3", 3, 9, 3.0, "303", 3, 3) .row("4", 3, 12, 4.0, "304", 3, 3) .row("5", 3, 15, 5.0, "305", 3, 3) .row("6", 3, 18, 6.0, "306", 3, 3) .row("7", 3, 21, 7.0, "307", 3, 3) .row("8", 3, 24, 8.0, "308", 3, 3) .row("9", 3, 27, 9.0, "309", 3, 3) .build(); assertOperatorEqualsIgnoreOrder(operator, input, expected, hashEnabled, Optional.of(hashChannels.size())); } @Test(dataProvider = "hashEnabledValues", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded local memory limit of 10B") public void testMemoryLimit(boolean hashEnabled) { MetadataManager metadata = MetadataManager.createTestMetadataManager(); InternalAggregationFunction maxVarcharColumn = metadata.getFunctionRegistry().getAggregateFunctionImplementation(new Signature("max", AGGREGATE, StandardTypes.VARCHAR, StandardTypes.VARCHAR)); List<Integer> hashChannels = Ints.asList(1); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR, VARCHAR, VARCHAR, BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(10, 100, 0, 100, 0) .addSequencePage(10, 100, 0, 200, 0) .addSequencePage(10, 100, 0, 300, 0) .build(); DriverContext driverContext = createTaskContext(executor, TEST_SESSION, new DataSize(10, Unit.BYTE)) .addPipelineContext(true, true) .addDriverContext(); HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory( 0, ImmutableList.of(VARCHAR), hashChannels, Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0), LONG_SUM.bind(ImmutableList.of(3), Optional.empty(), Optional.empty(), 1.0), LONG_AVERAGE.bind(ImmutableList.of(3), Optional.empty(), Optional.empty(), 1.0), maxVarcharColumn.bind(ImmutableList.of(2), Optional.empty(), Optional.empty(), 1.0)), Optional.<Integer>empty(), rowPagesBuilder.getHashChannel(), 100_000, new DataSize(16, MEGABYTE)); Operator operator = operatorFactory.createOperator(driverContext); toPages(operator, input); } @Test(dataProvider = "hashEnabledValues") public void testHashBuilderResize(boolean hashEnabled) { BlockBuilder builder = VARCHAR.createBlockBuilder(new BlockBuilderStatus(), 1, DEFAULT_MAX_BLOCK_SIZE_IN_BYTES); VARCHAR.writeSlice(builder, Slices.allocate(200_000)); // this must be larger than DEFAULT_MAX_BLOCK_SIZE, 64K builder.build(); List<Integer> hashChannels = Ints.asList(0); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR); List<Page> input = rowPagesBuilder .addSequencePage(10, 100) .addBlocksPage(builder.build()) .addSequencePage(10, 100) .build(); DriverContext driverContext = createTaskContext(executor, TEST_SESSION, new DataSize(10, MEGABYTE)) .addPipelineContext(true, true) .addDriverContext(); HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory( 0, ImmutableList.of(VARCHAR), hashChannels, Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0)), Optional.<Integer>empty(), rowPagesBuilder.getHashChannel(), 100_000, new DataSize(16, MEGABYTE)); Operator operator = operatorFactory.createOperator(driverContext); toPages(operator, input); } @Test(dataProvider = "hashEnabledValues", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded local memory limit of 3MB") public void testHashBuilderResizeLimit(boolean hashEnabled) { BlockBuilder builder = VARCHAR.createBlockBuilder(new BlockBuilderStatus(), 1, DEFAULT_MAX_BLOCK_SIZE_IN_BYTES); VARCHAR.writeSlice(builder, Slices.allocate(5_000_000)); // this must be larger than DEFAULT_MAX_BLOCK_SIZE, 64K builder.build(); List<Integer> hashChannels = Ints.asList(0); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR); List<Page> input = rowPagesBuilder .addSequencePage(10, 100) .addBlocksPage(builder.build()) .addSequencePage(10, 100) .build(); DriverContext driverContext = createTaskContext(executor, TEST_SESSION, new DataSize(3, MEGABYTE)) .addPipelineContext(true, true) .addDriverContext(); HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory( 0, ImmutableList.of(VARCHAR), hashChannels, Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0)), Optional.<Integer>empty(), rowPagesBuilder.getHashChannel(), 100_000, new DataSize(16, MEGABYTE)); Operator operator = operatorFactory.createOperator(driverContext); toPages(operator, input); } @Test(dataProvider = "hashEnabledValues") public void testMultiSliceAggregationOutput(boolean hashEnabled) { // estimate the number of entries required to create 1.5 pages of results int fixedWidthSize = SIZE_OF_LONG + SIZE_OF_DOUBLE + SIZE_OF_DOUBLE; int multiSlicePositionCount = (int) (1.5 * PageBuilderStatus.DEFAULT_MAX_PAGE_SIZE_IN_BYTES / fixedWidthSize); multiSlicePositionCount = Math.min((int) (1.5 * DEFAULT_MAX_BLOCK_SIZE_IN_BYTES / SIZE_OF_DOUBLE), multiSlicePositionCount); List<Integer> hashChannels = Ints.asList(1); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, BIGINT, BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(multiSlicePositionCount, 0, 0) .build(); HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory( 0, ImmutableList.of(BIGINT), hashChannels, Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0), LONG_AVERAGE.bind(ImmutableList.of(1), Optional.empty(), Optional.empty(), 1.0)), Optional.<Integer>empty(), rowPagesBuilder.getHashChannel(), 100_000, new DataSize(16, MEGABYTE)); Operator operator = operatorFactory.createOperator(driverContext); assertEquals(toPages(operator, input).size(), 2); } @Test(dataProvider = "hashEnabledValues") public void testMultiplePartialFlushes(boolean hashEnabled) throws Exception { List<Integer> hashChannels = Ints.asList(0); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, BIGINT); List<Page> input = rowPagesBuilder .addSequencePage(500, 0) .addSequencePage(500, 500) .addSequencePage(500, 1000) .addSequencePage(500, 1500) .build(); HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory( 0, ImmutableList.of(BIGINT), hashChannels, Step.PARTIAL, ImmutableList.of(LONG_SUM.bind(ImmutableList.of(0), Optional.empty(), Optional.empty(), 1.0)), Optional.<Integer>empty(), rowPagesBuilder.getHashChannel(), 100_000, new DataSize(16, MEGABYTE)); DriverContext driverContext = createTaskContext(executor, TEST_SESSION, new DataSize(1, Unit.KILOBYTE)) .addPipelineContext(true, true) .addDriverContext(); Operator operator = operatorFactory.createOperator(driverContext); List<Page> expectedPages = rowPagesBuilder(BIGINT, BIGINT) .addSequencePage(2000, 0, 0) .build(); MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT) .pages(expectedPages) .build(); Iterator<Page> inputIterator = input.iterator(); // Fill up the aggregation while (operator.needsInput() && inputIterator.hasNext()) { operator.addInput(inputIterator.next()); } // Drain the output (partial flush) List<Page> outputPages = new ArrayList<>(); while (true) { Page output = operator.getOutput(); if (output == null) { break; } outputPages.add(output); } // There should be some pages that were drained assertTrue(!outputPages.isEmpty()); // The operator need input again since this was a partial flush assertTrue(operator.needsInput()); // Now, drive the operator to completion outputPages.addAll(toPages(operator, inputIterator)); MaterializedResult actual; if (hashEnabled) { // Drop the hashChannel for all pages List<Page> actualPages = dropChannel(outputPages, hashChannels); List<Type> expectedTypes = without(operator.getTypes(), hashChannels); actual = toMaterializedResult(operator.getOperatorContext().getSession(), expectedTypes, actualPages); } else { actual = toMaterializedResult(operator.getOperatorContext().getSession(), operator.getTypes(), outputPages); } assertEquals(actual.getTypes(), expected.getTypes()); assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows()); } }
package jokrey.utilities.encoder.tag_based.tests.performance; import jokrey.utilities.date_time.ExactDateTime; import jokrey.utilities.debug_analysis_helper.AverageCallTimeMarker; import jokrey.utilities.debug_analysis_helper.TimeDiffMarker; import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.cached.CachedTagBasedEncoderBytes; import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.remote.RemoteEncoderBytes; import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.remote.RemoteEncoderServer; import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.remote.authenticated.AuthenticatedRemoteEncoderBytes; import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.remote.authenticated.AuthenticatedRemoteEncoderServer; import jokrey.utilities.encoder.tag_based.additional.functionality.wrapper.delegation.thread_safe.SynchronizingTagBasedEncoder; import jokrey.utilities.encoder.tag_based.implementation.length_indicator.LITagCachedEncoder; import jokrey.utilities.encoder.tag_based.implementation.length_indicator.bytes.LITagBytesEncoder; import jokrey.utilities.encoder.tag_based.implementation.length_indicator.bytes.remote.encoder.LIRemoteEncoderServer; import jokrey.utilities.encoder.tag_based.implementation.length_indicator.bytes.remote.encoder.authenticated.LIAuthenticatedRemoteEncoderServer; import jokrey.utilities.encoder.tag_based.implementation.length_indicator.bytes.remote.encoder.authenticated.LIAuthenticatedRemoteEncoderServer_MultiFile; import jokrey.utilities.encoder.tag_based.implementation.length_indicator.string.LITagStringEncoder; import jokrey.utilities.debug_analysis_helper.ConcurrentPoolTester; import jokrey.utilities.encoder.tag_based.tests.AuthenticatedRemoteEncoderTest; import jokrey.utilities.transparent_storage.bytes.file.FileStorage; import jokrey.utilities.transparent_storage.bytes.non_persistent.ByteArrayStorage; import jokrey.utilities.transparent_storage.bytes.non_persistent.ByteArrayStorageLegacy; import jokrey.utilities.transparent_storage.bytes.remote.RemoteStorage; import jokrey.utilities.transparent_storage.bytes.remote.server.RemoteStorageServer; import org.junit.BeforeClass; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import java.io.File; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.util.concurrent.TimeUnit; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class PerformanceTests { private static final File result_log_file = new File(System.getProperty("user.home")+"/Desktop/perf_test_results_log_"+ ExactDateTime.todayandnow().asFileTimeStamp() +".txt"); private static FileWriter result_writer; private static final int[] available_ports = {58241, 58242, 58243, 58244, 58245, 58246, 58247, 58248, 58249, 58250, 58251, 58252, 58253, 58254, 58255}; private static int port_pointer = 0; private static int getNextPort() { return available_ports[port_pointer++]; } @BeforeClass public static void init() { try { result_writer = new FileWriter(result_log_file); } catch (IOException e) { result_writer = null; e.printStackTrace(); } } @Test public void a0_lame_attempt_to_remove_class_loader_impact_on_measurement() { GenericPerformanceTest.run_standard_test(null, null, new LITagBytesEncoder()); } @Test public void litbe_ramStorage() { GenericPerformanceTest.run_standard_test("LITagBytesEncoder | RAM storage", PerformanceTests::writeResults, new LITagBytesEncoder()); } @Test public void litbe_ramStorage_threadsafeWrapper() { GenericPerformanceTest.run_standard_test("SynchronizedTagBasedEncoder | RAM storage", PerformanceTests::writeResults, new SynchronizingTagBasedEncoder<>(new LITagBytesEncoder())); } @Test public void litbe_ramStorage_legacy() { GenericPerformanceTest.run_standard_test("OLD VERSION | LITagBytesEncoder | RAM storage | OLD VERSION", PerformanceTests::writeResults, new LITagBytesEncoder(new ByteArrayStorageLegacy())); } @Test public void x1_use_test() { GenericPerformanceTest.run_standard_test_string("LITagStringEncoder", PerformanceTests::writeResults, new LITagStringEncoder()); } @Test public void x2_litbe_ramStorage_memory_efficient() { GenericPerformanceTest.run_standard_test("LITagBytesEncoder | RAM storage | Memory over Performance", PerformanceTests::writeResults, new LITagBytesEncoder(new ByteArrayStorage(true, new byte[0], 0))); } @Test public void x3_litbe_fileStorage_fastDrive() throws IOException { File f = new File(System.getProperty("user.home")+"/Desktop/litbe_file_storage_perf_test.litbe"); try(FileStorage storage = new FileStorage(f)) { GenericPerformanceTest.run_standard_test_short("LITagBytesEncoder | FILE storage", PerformanceTests::writeResults, new LITagBytesEncoder(storage)); } } @Test public void x6_litbe_remoteStorage_ram() throws IOException { int port = getNextPort(); try(RemoteStorageServer ignored = new RemoteStorageServer(port, new ByteArrayStorage()); RemoteStorage storage = new RemoteStorage("localhost", port) ) { GenericPerformanceTest.run_standard_test_short("LITagBytesEncoder | REMOTE storage | RAM", PerformanceTests::writeResults, new LITagBytesEncoder(storage)); } } @Test public void x6_litbe_remoteStorage_file() throws IOException { int port = getNextPort(); File f = new File(System.getProperty("user.home")+"/Desktop/litbe_remote_file_storage_perf_test.litbe"); try(RemoteStorageServer server = new RemoteStorageServer(port, f); RemoteStorage storage = new RemoteStorage("localhost", port) ) { server.clear(); GenericPerformanceTest.run_standard_test_short("LITagBytesEncoder | REMOTE storage | FILE", PerformanceTests::writeResults, new LITagBytesEncoder(storage)); } } @Test public void x3_litbe_fileStorage_slowishDrive() throws IOException { File f = new File("F:/litbe_file_storage_perf_test.litbe"); try(FileStorage storage = new FileStorage(f)) { GenericPerformanceTest.run_standard_test_short("LITagBytesEncoder | FILE storage(slower drive)", PerformanceTests::writeResults, new LITagBytesEncoder(storage)); } } @Test public void x4_rbae_ram() throws IOException { int port = getNextPort(); try(RemoteEncoderServer server = new LIRemoteEncoderServer(port, new ByteArrayStorage()); RemoteEncoderBytes rbae = new RemoteEncoderBytes("localhost", port) ) { GenericPerformanceTest.run_standard_test("RemoteEncoderBytes | RAM | Client", PerformanceTests::writeResults, rbae); GenericPerformanceTest.run_standard_test("RemoteEncoderBytes | RAM | Server", PerformanceTests::writeResults, server); } } @Test public void x4_rbae_file() throws IOException { File f = new File(System.getProperty("user.home")+"/Desktop/rbae_file_storage_perf_test.litbe"); int port = getNextPort(); try(RemoteEncoderServer server = new LIRemoteEncoderServer(port, f); RemoteEncoderBytes rbae = new RemoteEncoderBytes("localhost", port) ) { server.clear(); GenericPerformanceTest.run_standard_test_short("RemoteEncoderBytes | File | Client", PerformanceTests::writeResults, rbae); GenericPerformanceTest.run_standard_test_short("RemoteEncoderBytes | File | Server", PerformanceTests::writeResults, server); } } @Test public void x5_areb_ram() throws Exception { int port = getNextPort(); try(AuthenticatedRemoteEncoderServer server = new LIAuthenticatedRemoteEncoderServer(port, new ByteArrayStorage()); AuthenticatedRemoteEncoderBytes areb = AuthenticatedRemoteEncoderBytes.register("localhost", port, "performer", "test") ) { GenericPerformanceTest.run_standard_test("AREB - Authenticated Remote Encoder RAM", PerformanceTests::writeResults, areb); GenericPerformanceTest.run_standard_test("AREB - User view on server side - RAM", PerformanceTests::writeResults, server.getSubEncoder("performer")); } } @Test public void x5_areb_file() throws IOException { File f = new File(System.getProperty("user.home")+"/Desktop/areb_file_storage_perf_test.litbe"); int port = getNextPort(); try(AuthenticatedRemoteEncoderServer server = new LIAuthenticatedRemoteEncoderServer(port, f); AuthenticatedRemoteEncoderBytes areb = AuthenticatedRemoteEncoderBytes.register("localhost", port, "performer", "test") ) { server.clear(); GenericPerformanceTest.run_standard_test_short("AREB - Authenticated Remote Encoder FILE", PerformanceTests::writeResults, areb); GenericPerformanceTest.run_standard_test_short("AREB - User view on server side - FILE", PerformanceTests::writeResults, server.getSubEncoder("performer")); } } @Test public void x5_areb_multi_file() throws IOException { File dir = new File(System.getProperty("user.home")+"/Desktop/areb_file_storage_perf_test_dir/"); dir.mkdirs(); int port = getNextPort(); try(AuthenticatedRemoteEncoderServer server = new LIAuthenticatedRemoteEncoderServer_MultiFile(port, dir); AuthenticatedRemoteEncoderBytes areb = AuthenticatedRemoteEncoderBytes.register("localhost", port, "performer", "test") ) { server.clear(); GenericPerformanceTest.run_standard_test_short("AREB - Authenticated Remote Encoder MULTI-FILE", PerformanceTests::writeResults, areb); GenericPerformanceTest.run_standard_test_short("AREB - User view on server side - MULTI-FILE", PerformanceTests::writeResults, server.getSubEncoder("performer")); } } @Test public void x5_areb_file_concurrent_users_tag_system_test() throws Throwable { File f = new File(System.getProperty("user.home")+"/Desktop/areb_file_storage_perf_test_concurrent_users.litbe"); int port = 10008; try(AuthenticatedRemoteEncoderServer server = new LIAuthenticatedRemoteEncoderServer(port, f)) { server.clear(); TimeDiffMarker.println_setMark_d(""); int concurrent_users = 50; int max_threads = 100; int number_threads = Math.min(max_threads, concurrent_users); ConcurrentPoolTester executor = new ConcurrentPoolTester(number_threads); for(int i = 0; i < concurrent_users; i++) { // took roughly 1h 20m 21s with 250 once - todo: why. why is the discrepency to multi file THIS big? Especially considering they both fully block currently. Maybe: LIse usage in tags in single... // took roughly 11m 11s with 25 once int thread_id = i; executor.execute(() -> AuthenticatedRemoteEncoderTest.simple_test_tag_system("localhost", port,thread_id + "x" + thread_id)); } System.out.println("Waiting for all threads to finish"); executor.shutdown(); executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES); executor.throwLatestException(); result_writer.append("\n\n"+concurrent_users+" concurrent users on "+number_threads+" areb SINGLE file\n").append(TimeDiffMarker.getDiffFor_as_string_d()).append("\n\n").flush(); } } @Test public void x5_areb_multiFile_concurrent_users_tag_system_test() throws Throwable { File dir = new File(System.getProperty("user.home")+"/Desktop/areb_file_storage_perf_test_dir_concurrent/"); dir.mkdirs(); int port = 10009; try(AuthenticatedRemoteEncoderServer server = new LIAuthenticatedRemoteEncoderServer_MultiFile(port, dir)) { server.clear(); TimeDiffMarker.println_setMark_d(""); int concurrent_users = 250; int max_threads = 100; int number_threads = Math.min(max_threads, concurrent_users); ConcurrentPoolTester executor = new ConcurrentPoolTester(number_threads); for(int i = 0; i < concurrent_users; i++) { //took roughly 30s with 250 once. //took roughly 5s with 25 once int thread_id = i; executor.execute(() -> AuthenticatedRemoteEncoderTest.simple_test_tag_system("localhost", port,thread_id + "x" + thread_id)); } System.out.println("Waiting for all threads to finish"); executor.shutdown(); executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES); executor.throwLatestException(); result_writer.append("\n\n"+concurrent_users+" concurrent users on "+number_threads+" areb MULTI file\n").append(TimeDiffMarker.getDiffFor_as_string_d()).append("\n\n").flush(); } } @Test public void a1_litbe_liPositionCached_ramStorage() { GenericPerformanceTest.run_standard_test("LITagCachedEncoder | LIPositionCached RAM storage", PerformanceTests::writeResults, new LITagCachedEncoder<>(new LITagBytesEncoder())); } @Test public void x5_litbe_liPositionCached_fileStorage() throws FileNotFoundException { File f = new File(System.getProperty("user.home")+"/Desktop/litbe_lipositioncache_file_storage_perf_test.litbe"); GenericPerformanceTest.run_standard_test("LITagCachedEncoder | LIPositionCached | FILE storage", PerformanceTests::writeResults, new LITagCachedEncoder<>(new LITagBytesEncoder(new FileStorage(f)))); } @Test public void x5_litbe_liPositionCached_fileStorage_short() throws FileNotFoundException { File f = new File(System.getProperty("user.home")+"/Desktop/litbe_lipositioncache_file_storage_perf_test.litbe"); GenericPerformanceTest.run_standard_test_short("LITagCachedEncoder | LIPositionCached | FILE storage", PerformanceTests::writeResults, new LITagCachedEncoder<>(new LITagBytesEncoder(new FileStorage(f)))); } @Test public void x6_litbe_liPositionCached_remoteStorage_ram_short() throws IOException { int port = getNextPort(); File f = new File(System.getProperty("user.home")+"/Desktop/litbe_liPositionCached_remote_file_storage_perf_test.litbe"); try(RemoteStorageServer server = new RemoteStorageServer(port, f); RemoteStorage storage = new RemoteStorage("localhost", port) ) { server.clear(); GenericPerformanceTest.run_standard_test_short("LITagCachedEncoder | LIPositionCached | REMOTE storage | RAM", PerformanceTests::writeResults, new LITagCachedEncoder<>(new LITagBytesEncoder(storage))); } } @Test public void x5_areb_ram_cached_comparison() throws Exception { int port = getNextPort(); try(AuthenticatedRemoteEncoderServer ignored = new LIAuthenticatedRemoteEncoderServer(port, new ByteArrayStorage()); AuthenticatedRemoteEncoderBytes areb = AuthenticatedRemoteEncoderBytes.register("localhost", port, "performer", "test") ) { CachedTagBasedEncoderBytes cached = new CachedTagBasedEncoderBytes(areb); GenericPerformanceTest.run_standard_test_big("AREB - Authenticated Remote Encoder | RAM", PerformanceTests::writeResults, areb); ignored.clear(); GenericPerformanceTest.run_standard_test_big("AREB - Authenticated Remote Encoder | Cached RAM", PerformanceTests::writeResults, cached); ignored.clear(); GenericPerformanceTest.run_standard_test_many("AREB - Authenticated Remote Encoder | RAM", PerformanceTests::writeResults, areb); ignored.clear(); GenericPerformanceTest.run_standard_test_many("AREB - Authenticated Remote Encoder | Cached RAM", PerformanceTests::writeResults, cached); } } @Test public void x3_litbe_fileStorage_fastDrive_cached() throws IOException { File f = new File(System.getProperty("user.home")+"/Desktop/litbe_file_storage_perf_test_cached.litbe"); try(FileStorage storage = new FileStorage(f)) { CachedTagBasedEncoderBytes cached = new CachedTagBasedEncoderBytes(new LITagBytesEncoder(storage)); GenericPerformanceTest.run_standard_test_short("LITagBytesEncoder | Cached FILE storage", PerformanceTests::writeResults, cached); } } @Test public void x3_litbe_ram_cached() { CachedTagBasedEncoderBytes cached = new CachedTagBasedEncoderBytes(new LITagBytesEncoder()); GenericPerformanceTest.run_standard_test("LITagBytesEncoder | Cached RAM", PerformanceTests::writeResults, cached); } private static void writeResults(String introduction, AverageCallTimeMarker.Call_Count_Average[] combined_res) { if(result_writer!=null) { try { result_writer.append(introduction).append("\n"); for(AverageCallTimeMarker.Call_Count_Average c:combined_res) result_writer.append(c.toString()).append("\n"); result_writer.append("==================================================\n"); result_writer.append("\n"); result_writer.append("\n"); result_writer.flush(); } catch (IOException e) { e.printStackTrace(); } } } }
/** * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devcoin.core; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; import static com.google.devcoin.core.Utils.*; /** * <p>Methods to serialize and de-serialize messages to the Bitcoin network format as defined in * <a href="https://en.devcoin.it/wiki/Protocol_specification">the protocol specification</a>.</p> * * <p>To be able to serialize and deserialize new Message subclasses the following criteria needs to be met.</p> * * <ul> * <li>The proper Class instance needs to be mapped to it's message name in the names variable below</li> * <li>There needs to be a constructor matching: NetworkParameters params, byte[] payload</li> * <li>Message.devcoinSerializeToStream() needs to be properly subclassed</li> * </ul> */ public class BitcoinSerializer { private static final Logger log = LoggerFactory.getLogger(BitcoinSerializer.class); private static final int COMMAND_LEN = 12; private NetworkParameters params; private boolean parseLazy = false; private boolean parseRetain = false; private static Map<Class<? extends Message>, String> names = new HashMap<Class<? extends Message>, String>(); static { names.put(VersionMessage.class, "version"); names.put(InventoryMessage.class, "inv"); names.put(Block.class, "block"); names.put(GetDataMessage.class, "getdata"); names.put(Transaction.class, "tx"); names.put(AddressMessage.class, "addr"); names.put(Ping.class, "ping"); names.put(Pong.class, "pong"); names.put(VersionAck.class, "verack"); names.put(GetBlocksMessage.class, "getblocks"); names.put(GetHeadersMessage.class, "getheaders"); names.put(GetAddrMessage.class, "getaddr"); names.put(HeadersMessage.class, "headers"); names.put(BloomFilter.class, "filterload"); names.put(FilteredBlock.class, "merkleblock"); names.put(NotFoundMessage.class, "notfound"); names.put(MemoryPoolMessage.class, "mempool"); } /** * Constructs a BitcoinSerializer with the given behavior. * * @param params networkParams used to create Messages instances and termining packetMagic */ public BitcoinSerializer(NetworkParameters params) { this(params, false, false); } /** * Constructs a BitcoinSerializer with the given behavior. * * @param params networkParams used to create Messages instances and termining packetMagic * @param parseLazy deserialize messages in lazy mode. * @param parseRetain retain the backing byte array of a message for fast reserialization. */ public BitcoinSerializer(NetworkParameters params, boolean parseLazy, boolean parseRetain) { this.params = params; this.parseLazy = parseLazy; this.parseRetain = parseRetain; } /** * Writes message to to the output stream. */ public void serialize(String name, byte[] message, OutputStream out) throws IOException { byte[] header = new byte[4 + COMMAND_LEN + 4 + 4 /* checksum */]; uint32ToByteArrayBE(params.getPacketMagic(), header, 0); // The header array is initialized to zero by Java so we don't have to worry about // NULL terminating the string here. for (int i = 0; i < name.length() && i < COMMAND_LEN; i++) { header[4 + i] = (byte) (name.codePointAt(i) & 0xFF); } Utils.uint32ToByteArrayLE(message.length, header, 4 + COMMAND_LEN); byte[] hash = doubleDigest(message); System.arraycopy(hash, 0, header, 4 + COMMAND_LEN + 4, 4); out.write(header); out.write(message); if (log.isDebugEnabled()) log.debug("Sending {} message: {}", name, bytesToHexString(header) + bytesToHexString(message)); } /** * Writes message to to the output stream. */ public void serialize(Message message, OutputStream out) throws IOException { String name = names.get(message.getClass()); if (name == null) { throw new Error("BitcoinSerializer doesn't currently know how to serialize " + message.getClass()); } serialize(name, message.devcoinSerialize(), out); } /** * Reads a message from the given InputStream and returns it. */ public Message deserialize(InputStream in) throws ProtocolException, IOException { // A Bitcoin protocol message has the following format. // // - 4 byte magic number: 0xfabfb5da for the testnet or // 0xf9beb4d9 for production // - 12 byte command in ASCII // - 4 byte payload size // - 4 byte checksum // - Payload data // // The checksum is the first 4 bytes of a SHA256 hash of the message payload. It isn't // present for all messages, notably, the first one on a connection. // // Satoshi's implementation ignores garbage before the magic header bytes. We have to do the same because // sometimes it sends us stuff that isn't part of any message. seekPastMagicBytes(in); BitcoinPacketHeader header = new BitcoinPacketHeader(in); // Now try to read the whole message. return deserializePayload(header, in); } /** * Deserializes only the header in case packet meta data is needed before decoding * the payload. This method assumes you have already called seekPastMagicBytes() */ public BitcoinPacketHeader deserializeHeader(InputStream in) throws ProtocolException, IOException { return new BitcoinPacketHeader(in); } /** * Deserialize payload only. You must provide a header, typically obtained by calling * {@link BitcoinSerializer#deserializeHeader}. */ public Message deserializePayload(BitcoinPacketHeader header, InputStream in) throws ProtocolException, IOException { int readCursor = 0; byte[] payloadBytes = new byte[header.size]; while (readCursor < payloadBytes.length - 1) { int bytesRead = in.read(payloadBytes, readCursor, header.size - readCursor); if (bytesRead == -1) { throw new IOException("Socket is disconnected"); } readCursor += bytesRead; } // Verify the checksum. byte[] hash; hash = doubleDigest(payloadBytes); if (header.checksum[0] != hash[0] || header.checksum[1] != hash[1] || header.checksum[2] != hash[2] || header.checksum[3] != hash[3]) { throw new ProtocolException("Checksum failed to verify, actual " + bytesToHexString(hash) + " vs " + bytesToHexString(header.checksum)); } if (log.isDebugEnabled()) { log.debug("Received {} byte '{}' message: {}", new Object[]{ header.size, header.command, Utils.bytesToHexString(payloadBytes) }); } try { return makeMessage(header.command, header.size, payloadBytes, hash, header.checksum); } catch (Exception e) { throw new ProtocolException("Error deserializing message " + Utils.bytesToHexString(payloadBytes) + "\n", e); } } private Message makeMessage(String command, int length, byte[] payloadBytes, byte[] hash, byte[] checksum) throws ProtocolException { // We use an if ladder rather than reflection because reflection is very slow on Android. Message message; if (command.equals("version")) { return new VersionMessage(params, payloadBytes); } else if (command.equals("inv")) { message = new InventoryMessage(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("block")) { message = new Block(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("merkleblock")) { message = new FilteredBlock(params, payloadBytes); } else if (command.equals("getdata")) { message = new GetDataMessage(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("tx")) { Transaction tx = new Transaction(params, payloadBytes, null, parseLazy, parseRetain, length); if (hash != null) tx.setHash(new Sha256Hash(Utils.reverseBytes(hash))); message = tx; } else if (command.equals("addr")) { message = new AddressMessage(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("ping")) { message = new Ping(params, payloadBytes); } else if (command.equals("pong")) { message = new Pong(params, payloadBytes); } else if (command.equals("verack")) { return new VersionAck(params, payloadBytes); } else if (command.equals("headers")) { return new HeadersMessage(params, payloadBytes); } else if (command.equals("alert")) { return new AlertMessage(params, payloadBytes); } else if (command.equals("filterload")) { return new BloomFilter(params, payloadBytes); } else if (command.equals("notfound")) { return new NotFoundMessage(params, payloadBytes); } else if (command.equals("mempool")) { return new MemoryPoolMessage(); } else { log.warn("No support for deserializing message with name {}", command); return new UnknownMessage(params, command, payloadBytes); } if (checksum != null) message.setChecksum(checksum); return message; } public void seekPastMagicBytes(InputStream in) throws IOException { int magicCursor = 3; // Which byte of the magic we're looking for currently. while (true) { int b = in.read(); // Read a byte. if (b == -1) { // There's no more data to read. throw new IOException("Socket is disconnected"); } // We're looking for a run of bytes that is the same as the packet magic but we want to ignore partial // magics that aren't complete. So we keep track of where we're up to with magicCursor. int expectedByte = 0xFF & (int) (params.getPacketMagic() >>> (magicCursor * 8)); if (b == expectedByte) { magicCursor--; if (magicCursor < 0) { // We found the magic sequence. return; } else { // We still have further to go to find the next message. } } else { magicCursor = 3; } } } /** * Whether the serializer will produce lazy parse mode Messages */ public boolean isParseLazyMode() { return parseLazy; } /** * Whether the serializer will produce cached mode Messages */ public boolean isParseRetainMode() { return parseRetain; } public static class BitcoinPacketHeader { public final byte[] header; public final String command; public final int size; public final byte[] checksum; public BitcoinPacketHeader(InputStream in) throws ProtocolException, IOException { header = new byte[COMMAND_LEN + 4 + 4]; int readCursor = 0; while (readCursor < header.length) { int bytesRead = in.read(header, readCursor, header.length - readCursor); if (bytesRead == -1) { // There's no more data to read. throw new IOException("Incomplete packet in underlying stream"); } readCursor += bytesRead; } int cursor = 0; // The command is a NULL terminated string, unless the command fills all twelve bytes // in which case the termination is implicit. int mark = cursor; for (; header[cursor] != 0 && cursor - mark < COMMAND_LEN; cursor++) ; byte[] commandBytes = new byte[cursor - mark]; System.arraycopy(header, mark, commandBytes, 0, cursor - mark); try { command = new String(commandBytes, "US-ASCII"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); // Cannot happen. } cursor = mark + COMMAND_LEN; size = (int) readUint32(header, cursor); cursor += 4; if (size > Message.MAX_SIZE) throw new ProtocolException("Message size too large: " + size); // Old clients don't send the checksum. checksum = new byte[4]; // Note that the size read above includes the checksum bytes. System.arraycopy(header, cursor, checksum, 0, 4); cursor += 4; } } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.standalone.client.screens; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.enterprise.context.Dependent; import javax.inject.Inject; import com.google.gwt.logging.client.LogConfiguration; import com.google.gwt.user.client.ui.IsWidget; import org.kie.workbench.common.stunner.client.widgets.presenters.Viewer; import org.kie.workbench.common.stunner.client.widgets.presenters.diagram.DiagramEditor; import org.kie.workbench.common.stunner.client.widgets.presenters.diagram.DiagramPresenterFactory; import org.kie.workbench.common.stunner.client.widgets.presenters.diagram.DiagramViewer; import org.kie.workbench.common.stunner.client.widgets.views.session.ScreenErrorView; import org.kie.workbench.common.stunner.client.widgets.views.session.ScreenPanelView; import org.kie.workbench.common.stunner.core.client.service.ClientRuntimeError; import org.kie.workbench.common.stunner.core.client.service.ServiceCallback; import org.kie.workbench.common.stunner.core.diagram.Diagram; import org.uberfire.client.annotations.WorkbenchContextId; import org.uberfire.client.annotations.WorkbenchMenu; import org.uberfire.client.annotations.WorkbenchPartTitle; import org.uberfire.client.annotations.WorkbenchPartView; import org.uberfire.client.annotations.WorkbenchScreen; import org.uberfire.ext.widgets.common.client.common.BusyPopup; import org.uberfire.lifecycle.OnClose; import org.uberfire.lifecycle.OnOpen; import org.uberfire.lifecycle.OnStartup; import org.uberfire.mvp.PlaceRequest; import org.uberfire.workbench.model.menu.MenuFactory; import org.uberfire.workbench.model.menu.Menus; /** * This screen wraps the diagram viewer & editor widgets * and provides the menu items for both loading or editing a diagram. */ @Dependent @WorkbenchScreen(identifier = DiagramPresenterScreen.SCREEN_ID) public class DiagramPresenterScreen { private static Logger LOGGER = Logger.getLogger(DiagramPresenterScreen.class.getName()); public static final String SCREEN_ID = "DiagramPresenterScreen"; public static final String DIAGRAM_NAME = "evaluation2"; /** * A loader helper instance for loading the diagram that will be used by <code>diagramViewer</code> */ @Inject ShowcaseDiagramService diagramLoader; /** * The diagram presenter factory instance. */ @Inject DiagramPresenterFactory<Diagram> diagramPresenterFactory; @Inject ScreenPanelView screenPanelView; @Inject ScreenErrorView screenErrorView; private Menus menu = null; private Viewer<Diagram, ?, ?, ?> presenter; @PostConstruct public void init() { } @OnStartup public void onStartup(final PlaceRequest placeRequest) { this.menu = makeMenuBar(); } private Menus makeMenuBar() { return MenuFactory .newTopLevelMenu("View " + DIAGRAM_NAME) .respondsWith(this::show) .endMenu() .newTopLevelMenu("Edit " + DIAGRAM_NAME) .respondsWith(this::edit) .endMenu() .build(); } private void show() { Logger.getLogger("org.kie.workbench.common.stunner").setLevel(Level.FINE); BusyPopup.showMessage("Loading"); destroy(); diagramLoader.loadByName(DIAGRAM_NAME, new ServiceCallback<Diagram>() { @Override public void onSuccess(final Diagram diagram) { final DiagramViewer<Diagram, ?> diagramViewer = diagramPresenterFactory.newViewer(diagram); screenPanelView.setWidget(diagramViewer.getView()); DiagramPresenterScreen.this.presenter = diagramViewer; diagramViewer.open(diagram, new ScreenViewerCallback()); } @Override public void onError(final ClientRuntimeError error) { showError(error); } }); } private void edit() { Logger.getLogger("org.kie.workbench.common.stunner").setLevel(Level.FINE); BusyPopup.showMessage("Loading"); destroy(); diagramLoader.loadByName(DIAGRAM_NAME, new ServiceCallback<Diagram>() { @Override public void onSuccess(final Diagram diagram) { final DiagramEditor<Diagram, ?> diagramEditor = diagramPresenterFactory.newEditor(diagram); screenPanelView.setWidget(diagramEditor.getView()); DiagramPresenterScreen.this.presenter = diagramEditor; diagramEditor.open(diagram, new ScreenViewerCallback()); } @Override public void onError(ClientRuntimeError error) { showError(error); } }); } @OnOpen public void onOpen() { } @OnClose public void onClose() { clear(); } @WorkbenchMenu public Menus getMenu() { return menu; } @WorkbenchPartTitle public String getTitle() { return "Diagram Presenter"; } @WorkbenchPartView public IsWidget getWidget() { return screenPanelView; } @WorkbenchContextId public String getMyContextRef() { return "diagramPresenterScreenContext"; } private final class ScreenViewerCallback implements DiagramViewer.DiagramViewerCallback<Diagram> { @Override public void afterCanvasInitialized() { } @Override public void onSuccess() { LOGGER.log(Level.FINE, DIAGRAM_NAME + " loaded!."); BusyPopup.close(); } @Override public void onError(final ClientRuntimeError error) { showError(error); } } private void clear() { if (null != presenter) { presenter.clear(); } } private void destroy() { if (null != presenter) { presenter.destroy(); } } private void showError(final ClientRuntimeError error) { screenErrorView.showError(error); screenPanelView.setWidget(screenErrorView.asWidget()); log(Level.SEVERE, DIAGRAM_NAME + " cannot be loaded! [Error=" + error + "]"); BusyPopup.close(); } private void log(final Level level, final String message) { if (LogConfiguration.loggingIsEnabled()) { LOGGER.log(level, message); } } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package org.jitsi.impl.neomedia.device; import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*; import java.util.*; import javax.media.*; import javax.media.format.*; import org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.*; import org.jitsi.impl.neomedia.jmfext.media.renderer.audio.*; import org.jitsi.service.neomedia.codec.*; import org.jitsi.util.*; /** * Implements an <tt>AudioSystem</tt> using Windows Audio Session API (WASAPI) * and related Core Audio APIs such as Multimedia Device (MMDevice) API. * * @author Lyubomir Marinov */ public class WASAPISystem extends AudioSystem { /** * A GUID which identifies the audio session that streams belong to. */ private static String audioSessionGuid; /** * The protocol of the <tt>MediaLocator</tt> identifying * <tt>CaptureDeviceInfo</tt> contributed by <tt>WASAPISystem</tt>. */ private static final String LOCATOR_PROTOCOL = LOCATOR_PROTOCOL_WASAPI; /** * The logger used by the <tt>WASAPISystem</tt> class and its instances to * log debugging information. */ private static final Logger logger = Logger.getLogger(WASAPISystem.class); /** * The pointer to the native <tt>IMMDeviceEnumerator</tt> interface instance * which this <tt>WASAPISystem</tt> uses to enumerate the audio endpoint * devices. */ private long iMMDeviceEnumerator; /** * The <tt>IMMNotificationClient</tt> which is to notify this * <tt>WASAPISystem</tt> when an audio endpoint device is added or removed, * when the state or properties of an endpoint device change, or when there * is a change in the default role assigned to an endpoint device. */ private IMMNotificationClient pNotify; /** * A <tt>WAVEFORMATEX</tt> instance allocated in {@link #preInitialize()}, * freed in {@link #postInitialize()} and made available during the * execution of {@link #doInitialize()} in order to minimize memory * fragmentation. */ private long waveformatex; /** * Initializes a new <tt>WASAPISystem</tt> instance. * * @throws Exception if anything goes wrong while initializing the new * <tt>WASAPISystem</tt> instance */ WASAPISystem() throws Exception { super( LOCATOR_PROTOCOL, FEATURE_NOTIFY_AND_PLAYBACK_DEVICES | FEATURE_REINITIALIZE); } /** * {@inheritDoc} */ protected void doInitialize() throws Exception { /* * XXX Multiple threads may invoke the initialization of a DeviceSystem * so we cannot be sure that the COM library has been initialized for * the current thread. */ CoInitializeEx(0, COINIT_MULTITHREADED); if (iMMDeviceEnumerator == 0) { iMMDeviceEnumerator = CoCreateInstance( CLSID_MMDeviceEnumerator, 0, CLSCTX_ALL, IID_IMMDeviceEnumerator); if (iMMDeviceEnumerator == 0) throw new IllegalStateException("iMMDeviceEnumerator"); /* * Register this DeviceSystem to be notified when an audio endpoint * device is added or removed, when the state or properties of an * endpoint device change, or when there is a change in the default * role assigned to an endpoint device. */ MMNotificationClient.RegisterEndpointNotificationCallback(pNotify); } long iMMDeviceCollection = IMMDeviceEnumerator_EnumAudioEndpoints( iMMDeviceEnumerator, eAll, DEVICE_STATE_ACTIVE); List<CaptureDeviceInfo2> captureDevices; List<CaptureDeviceInfo2> playbackDevices; if (iMMDeviceCollection == 0) { throw new RuntimeException( "IMMDeviceEnumerator_EnumAudioEndpoints"); } try { int count = IMMDeviceCollection_GetCount(iMMDeviceCollection); captureDevices = new ArrayList<CaptureDeviceInfo2>(count); playbackDevices = new ArrayList<CaptureDeviceInfo2>(count); for (int i = 0; i < count; i++) { long iMMDevice = IMMDeviceCollection_Item(iMMDeviceCollection, i); if (iMMDevice == 0) throw new RuntimeException("IMMDeviceCollection_Item"); try { doInitializeIMMDevice( iMMDevice, captureDevices, playbackDevices); } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; /* * We do not want the initialization of one IMMDevice to * prevent the initialization of other IMMDevices. */ logger.error( "Failed to doInitialize for IMMDevice at index " + i, t); } finally { IMMDevice_Release(iMMDevice); } } } finally { IMMDeviceCollection_Release(iMMDeviceCollection); } setCaptureDevices(captureDevices); setPlaybackDevices(playbackDevices); } /** * Implements the part of {@link #doInitialize()} related to a specific * <tt>IMMDevice</tt>. * * @param iMMDevice the <tt>IMMDevice</tt> to initialize as part of the * invocation of <tt>doInitialize()</tt> on this instance * @throws HResultException if an error occurs while initializing the * specified <tt>iMMDevice</tt> in a native WASAPI function which returns an * <tt>HRESULT</tt> value * @param captureDevices the state of the execution of * <tt>doInitialize()</tt> which stores the <tt>CaptureDeviceInfo2</tt>s of * the capture devices discovered by this <tt>WASAPISystem</tt> * @param playbackDevices the state of the execution of * <tt>doInitialize()</tt> which stores the <tt>CaptureDeviceInfo2</tt>s of * the playback devices discovered by this <tt>WASAPISystem</tt> */ private void doInitializeIMMDevice( long iMMDevice, List<CaptureDeviceInfo2> captureDevices, List<CaptureDeviceInfo2> playbackDevices) throws HResultException { String id = IMMDevice_GetId(iMMDevice); /* * The ID of the IMMDevice is required because it will be used within * the MediaLocator of its representative CaptureDeviceInfo. */ if (id == null) throw new RuntimeException("IMMDevice_GetId"); long iAudioClient = IMMDevice_Activate(iMMDevice, IID_IAudioClient, CLSCTX_ALL, 0); List<AudioFormat> formats; if (iAudioClient == 0) throw new RuntimeException("IMMDevice_Activate"); try { formats = getIAudioClientSupportedFormats(iAudioClient); } finally { IAudioClient_Release(iAudioClient); } if ((formats != null) && !formats.isEmpty()) { String name = null; try { name = getIMMDeviceFriendlyName(iMMDevice); } catch (Throwable t) { if (t instanceof ThreadDeath) throw (ThreadDeath) t; logger.warn( "Failed to retrieve the PKEY_Device_FriendlyName" + " of IMMDevice " + id, t); } if ((name == null) || (name.length() == 0)) name = id; int dataFlow = getIMMDeviceDataFlow(iMMDevice); CaptureDeviceInfo2 cdi2 = new CaptureDeviceInfo2( name, new MediaLocator(LOCATOR_PROTOCOL + ":" + id), formats.toArray(new Format[formats.size()]), id, /* transportType */ null, /* modelIdentifier */ null); switch (dataFlow) { case eCapture: captureDevices.add(cdi2); break; case eRender: playbackDevices.add(cdi2); break; default: logger.error( "Failed to retrieve dataFlow from IMMEndpoint " + id); break; } } } /** * {@inheritDoc} */ @Override protected void finalize() throws Throwable { try { if (iMMDeviceEnumerator != 0) { IMMDeviceEnumerator_Release(iMMDeviceEnumerator); iMMDeviceEnumerator = 0; } } finally { super.finalize(); } } /** * Gets an array of alternative <tt>AudioFormat</tt>s based on * <tt>format</tt> with which an attempt is to be made to initialize a new * <tt>IAudioClient</tt> instance. * * @param format the <tt>AudioFormat</tt> on which the alternative * <tt>AudioFormat</tt>s are to be based * @return an array of alternative <tt>AudioFormat</tt>s based on * <tt>format</tt> with which an attempt is to be made to initialize a new * <tt>IAudioClient</tt> instance */ public static AudioFormat[] getFormatsToInitializeIAudioClient( AudioFormat format) { // We are able to convert between mono and stereo. int channels; switch (format.getChannels()) { case 1: channels = 2; break; case 2: channels = 1; break; default: return new AudioFormat[] { format }; } return new AudioFormat[] { /* * Regardless of the differences in the states of the * support of mono and stereo in the library at the time * of this writing, try to initialize a new IAudioClient * instance with a format which will not require * conversion between mono and stereo. */ format, new AudioFormat( format.getEncoding(), format.getSampleRate(), format.getSampleSizeInBits(), channels, AudioFormat.LITTLE_ENDIAN, AudioFormat.SIGNED, Format.NOT_SPECIFIED /* frameSizeInBits */, Format.NOT_SPECIFIED /* frameRate */, format.getDataType()) }; } /** * Gets a <tt>List</tt> of the <tt>AudioFormat</tt>s supported by a specific * <tt>IAudioClient</tt>. * * @param iAudioClient the <tt>IAudioClient</tt> to get the <tt>List</tt> of * supported <tt>AudioFormat</tt>s of * @return a <tt>List</tt> of the <tt>AudioFormat</tt>s supported by the * specified <tt>iAudioClient</tt> * @throws HResultException if an error occurs while retrieving the * <tt>List</tt> of <tt>AudioFormat</tt>s supported by the specified * <tt>iAudioClient</tt> in a native WASAPI function which returns an * <tt>HRESULT</tt> value */ private List<AudioFormat> getIAudioClientSupportedFormats(long iAudioClient) throws HResultException { char cbSize = 0; List<AudioFormat> supportedFormats = new ArrayList<AudioFormat>(); for (char nChannels = 1; nChannels <= 2; nChannels++) { for (int i = 0; i < Constants.AUDIO_SAMPLE_RATES.length; i++) { int nSamplesPerSec = (int) Constants.AUDIO_SAMPLE_RATES[i]; for (char wBitsPerSample = 16; wBitsPerSample > 0; wBitsPerSample -= 8) { char nBlockAlign = (char) ((nChannels * wBitsPerSample) / 8); WASAPI.WAVEFORMATEX_fill( waveformatex, WAVE_FORMAT_PCM, nChannels, nSamplesPerSec, nSamplesPerSec * nBlockAlign, nBlockAlign, wBitsPerSample, cbSize); long pClosestMatch = IAudioClient_IsFormatSupported( iAudioClient, AUDCLNT_SHAREMODE_SHARED, waveformatex); if (pClosestMatch == 0) // not supported continue; try { /* * Succeeded with a closest match to the specified * format? */ if (pClosestMatch != waveformatex) { // We support AutioFormat.LINEAR only. if (WAVEFORMATEX_getWFormatTag(pClosestMatch) != WAVE_FORMAT_PCM) continue; nChannels = WAVEFORMATEX_getNChannels(pClosestMatch); nSamplesPerSec = WAVEFORMATEX_getNSamplesPerSec( pClosestMatch); wBitsPerSample = WAVEFORMATEX_getWBitsPerSample( pClosestMatch); } AudioFormat supportedFormat; /* * We are able to convert between mono and stereo. * Additionally, the stereo support within the library * is not as advanced as the mono support at the time of * this writing. */ if (nChannels == 2) { supportedFormat = new AudioFormat( AudioFormat.LINEAR, nSamplesPerSec, wBitsPerSample, /* channels */ 1, AudioFormat.LITTLE_ENDIAN, AudioFormat.SIGNED, /* frameSizeInBits */ Format.NOT_SPECIFIED, /* frameRate */ Format.NOT_SPECIFIED, Format.byteArray); if (!supportedFormats.contains(supportedFormat)) supportedFormats.add(supportedFormat); } supportedFormat = new AudioFormat( AudioFormat.LINEAR, nSamplesPerSec, wBitsPerSample, nChannels, AudioFormat.LITTLE_ENDIAN, AudioFormat.SIGNED, /* frameSizeInBits */ Format.NOT_SPECIFIED, /* frameRate */ Format.NOT_SPECIFIED, Format.byteArray); if (!supportedFormats.contains(supportedFormat)) supportedFormats.add(supportedFormat); } finally { if (pClosestMatch != waveformatex) CoTaskMemFree(pClosestMatch); } } } } return supportedFormats; } /** * Gets an audio endpoint device that is identified by a specific endpoint * ID string. * * @param id the endpoing ID string which identifies the audio endpoint * device to be retrieved * @return an <tt>IMMDevice</tt> instance which represents the audio * endpoint device that is identified by the specified enpoint ID string * @throws HResultException if an error occurs while retrieving the audio * endpoint device that is identified by the specified endpoint ID string in * a native WASAPI function which returns an <tt>HRESULT</tt> value */ public long getIMMDevice(String id) throws HResultException { long iMMDeviceEnumerator = this.iMMDeviceEnumerator; if (iMMDeviceEnumerator == 0) throw new IllegalStateException("iMMDeviceEnumerator"); else return IMMDeviceEnumerator_GetDevice(iMMDeviceEnumerator, id); } /** * Gets the data flow of a specific <tt>IMMDevice</tt> in the form of an * <tt>EDataFlow</tt> value. * * @param iMMDevice the <tt>IMMDevice</tt> to get the data flow of * @return an <tt>EDataFlow</tt> value which represents the data flow of the * specified <tt>IMMDevice</tt> * @throws HResultException if an error occurs while retrieving the data * flow of the specified <tt>iMMDevice</tt> in a native WASAPI function * which returns an <tt>HRESULT</tt> value */ public int getIMMDeviceDataFlow(long iMMDevice) throws HResultException { long iMMEndpoint = IMMDevice_QueryInterface(iMMDevice, IID_IMMEndpoint); int dataFlow; if (iMMEndpoint == 0) throw new RuntimeException("IMMDevice_QueryInterface"); try { dataFlow = IMMEndpoint_GetDataFlow(iMMEndpoint); } finally { IMMEndpoint_Release(iMMEndpoint); } switch (dataFlow) { case eAll: case eCapture: case eRender: return dataFlow; default: throw new RuntimeException("IMMEndpoint_GetDataFlow"); } } /** * Gets the <tt>PKEY_Device_FriendlyName</tt> of a specific * <tt>IMMDevice</tt> which represents the human-readable name of the device * (interface). * * @param iMMDevice the <tt>IMMDevice</tt> to get the * friendly/human-readable name of * @return the friendly/human-readable name of the specified * <tt>iMMDevice</tt> * @throws HResultException if an error occurs while retrieving the friendly * name of the specified <tt>iMMDevice</tt> in a native WASAPI function * which returns an <tt>HRESULT</tt> value */ private String getIMMDeviceFriendlyName(long iMMDevice) throws HResultException { long iPropertyStore = IMMDevice_OpenPropertyStore(iMMDevice, STGM_READ); if (iPropertyStore == 0) throw new RuntimeException("IMMDevice_OpenPropertyStore"); String deviceFriendlyName; try { deviceFriendlyName = IPropertyStore_GetString( iPropertyStore, PKEY_Device_FriendlyName); } finally { IPropertyStore_Release(iPropertyStore); } return deviceFriendlyName; } /** * {@inheritDoc} */ @Override protected String getRendererClassName() { return WASAPIRenderer.class.getName(); } /** * Gets the size in bytes of an audio sample of a specific * <tt>AudioFormat</tt>. * * @param format the <tt>AudioFormat</tt> to get the size in bytes of an * audio sample of * @return the size in bytes of an audio sample of the specified * <tt>format</tt> */ public static int getSampleSizeInBytes(AudioFormat format) { int sampleSizeInBits = format.getSampleSizeInBits(); switch (sampleSizeInBits) { case 8: return 1; case 16: return 2; default: return sampleSizeInBits / 8; } } /** * Initializes a new <tt>IAudioClient</tt> instance for an audio endpoint * device identified by a specific <tt>MediaLocator</tt>. The initialization * is performed to an extent suitable for the operation of * {@link WASAPIRenderer} and {@link WASAPIStream}. * * @param locator a <tt>MediaLocator</tt> which identifies the audio * endpoint device to initialize a new <tt>IAudioClient</tt> instance for * @param dataFlow the flow of media data to be supported by the audio * endpoint device identified by the specified <tt>locator</tt> * @param eventHandle * @param hnsBufferDuration * @param formats an array of alternative <tt>AudioFormat</tt>s with which * initialization of a new <tt>IAudioClient</tt> instance is to be * attempted. The first element of the <tt>formats</tt> array which is * supported by the new <tt>IAudioClient</tt> instance is used to initialize * it and any preceding elements are set to <tt>null</tt> to signify that * they are not supported and to make it possible to retrieve the * <tt>AudioFormat</tt> with which the new <tt>IAudioClient</tt> instance * has been initialized. * @return a new <tt>IAudioClient</tt> instance initialized for the audio * endpoint device identified by the specified <tt>locator</tt> * @throws HResultException if an error occurs while initializing a new * <tt>IAudioClient</tt> for the audio endpoint device identified by the * specified <tt>locator</tt> in a native WASAPI function which returns an * <tt>HRESULT</tt> value */ public long initializeIAudioClient( MediaLocator locator, DataFlow dataFlow, long eventHandle, long hnsBufferDuration, AudioFormat[] formats) throws HResultException { String id = locator.getRemainder(); long iMMDevice = getIMMDevice(id); if (iMMDevice == 0) { throw new RuntimeException( "Failed to retrieve audio endpoint device " + "with endpoint ID string " + id); } long ret = 0; try { /* * Assert that the audio endpoint device identified by the specified * locator supports the specified dataFlow. */ int iMMDeviceDataFlow = getIMMDeviceDataFlow(iMMDevice); switch (dataFlow) { case CAPTURE: if ((iMMDeviceDataFlow != eAll) && (iMMDeviceDataFlow != eCapture)) throw new IllegalArgumentException("dataFlow"); break; case NOTIFY: case PLAYBACK: if ((iMMDeviceDataFlow != eAll) && (iMMDeviceDataFlow != eRender)) throw new IllegalArgumentException("dataFlow"); break; } long iAudioClient = IMMDevice_Activate( iMMDevice, IID_IAudioClient, CLSCTX_ALL, 0); if (iAudioClient == 0) throw new RuntimeException("IMMDevice_Activate"); try { long waveformatex = WAVEFORMATEX_alloc(); if (waveformatex == 0) throw new OutOfMemoryError("WAVEFORMATEX_alloc"); try { int shareMode = AUDCLNT_SHAREMODE_SHARED; boolean waveformatexIsInitialized = false; for (int i = 0; i < formats.length; i++) { WAVEFORMATEX_fill(waveformatex, formats[i]); long pClosestMatch = IAudioClient_IsFormatSupported( iAudioClient, shareMode, waveformatex); if (pClosestMatch == 0) // not supported formats[i] = null; else { try { if (pClosestMatch == waveformatex) { waveformatexIsInitialized = true; break; } else { /* * Succeeded with a closest match to the * specified format. */ formats[i] = null; } } finally { if (pClosestMatch != waveformatex) CoTaskMemFree(pClosestMatch); } } } if (!waveformatexIsInitialized) throw new IllegalArgumentException("formats"); int streamFlags = AUDCLNT_STREAMFLAGS_NOPERSIST; if (eventHandle != 0) eventHandle |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK; int hresult = IAudioClient_Initialize( iAudioClient, shareMode, streamFlags, hnsBufferDuration, /* hnsPeriodicity */ 0, waveformatex, audioSessionGuid); if (hresult != S_OK) { /* * The execution is not expected to reach here. Anyway, * be prepared to handle even such a case for the sake * of completeness. */ throw new HResultException(hresult); } if (((streamFlags & AUDCLNT_STREAMFLAGS_EVENTCALLBACK) == AUDCLNT_STREAMFLAGS_EVENTCALLBACK) && (eventHandle != 0)) { IAudioClient_SetEventHandle(iAudioClient, eventHandle); } ret = iAudioClient; iAudioClient = 0; } finally { CoTaskMemFree(waveformatex); } } finally { if (iAudioClient != 0) IAudioClient_Release(iAudioClient); } } finally { if (iMMDevice != 0) IMMDevice_Release(iMMDevice); } return ret; } /** * {@inheritDoc} */ @Override protected void postInitialize() { try { super.postInitialize(); } finally { if (waveformatex != 0) { CoTaskMemFree(waveformatex); waveformatex = 0; } } } /** * {@inheritDoc} */ @Override protected void preInitialize() { super.preInitialize(); /* * Make sure a WAVEFORMATEX instance is available during the execution * of doInitialize(). The field has been introduced to minimize memory * fragmentation. */ if (waveformatex != 0) { CoTaskMemFree(waveformatex); waveformatex = 0; } waveformatex = WAVEFORMATEX_alloc(); if (waveformatex == 0) throw new OutOfMemoryError("WAVEFORMATEX_alloc"); if (pNotify == null) { pNotify = new IMMNotificationClient() { public void OnDefaultDeviceChanged( int flow, int role, String pwstrDefaultDevice) { } public void OnDeviceAdded(String pwstrDeviceId) { reinitialize(pwstrDeviceId); } public void OnDeviceRemoved(String pwstrDeviceId) { reinitialize(pwstrDeviceId); } public void OnDeviceStateChanged( String pwstrDeviceId, int dwNewState) { reinitialize(pwstrDeviceId); } public void OnPropertyValueChanged( String pwstrDeviceId, long key) { } }; } /* * Generate a GUID to identify an audio session that steams to be * initialized will belong to. */ if (audioSessionGuid == null) { try { audioSessionGuid = CoCreateGuid(); } catch (HResultException hre) { /* * The application/library will work with the default audio * session GUID. */ logger.warn("Failed to generate a new audio session GUID", hre); } } } /** * Reinitializes this <tt>WASAPISystem</tt>. The implementation assumes that * the invocation is performed by the Multimedia Device (MMDevice) API and * swallows any thrown <tt>Exception</tt>. * * @param deviceId the endpoint ID string that identifies the audio endpoint * device which is related to the decision to reinitialize this * <tt>WASAPISystem</tt> */ private void reinitialize(String deviceId) { try { /* * XXX Invoke the initialize() method asynchronously in order to * allow the Multimedia Device (MMDevice) callback to return * immediately. Otherwise, the execution will freeze in the * IAudioClient_Release function will freeze. Besides, the callback * dispatches the notifications after the respective changes have * been realized anyway. */ invokeDeviceSystemInitialize(this, true); } catch (Exception e) { logger.error("Failed to reinitialize " + getClass().getName(), e); } } /** * {@inheritDoc} */ @Override public String toString() { return "Windows Audio Session API (WASAPI)"; } /** * Sets the fields of a specific <tt>WAVEFORMATEX</tt> instance from a * specific <tt>AudioFormat</tt> instance so that the two of them are * equivalent in terms of the formats of audio data that they describe. * * @param waveformatex the <tt>WAVEFORMATEX</tt> instance to set the fields * of from the specified <tt>audioFormat</tt> * @param audioFormat the <tt>AudioFormat</tt> instance to set the fields of * the specified <tt>waveformatex</tt> from */ public static void WAVEFORMATEX_fill( long waveformatex, AudioFormat audioFormat) { if (!AudioFormat.LINEAR.equals(audioFormat.getEncoding())) throw new IllegalArgumentException("audioFormat.encoding"); int channels = audioFormat.getChannels(); if (channels == Format.NOT_SPECIFIED) throw new IllegalArgumentException("audioFormat.channels"); int sampleRate = (int) audioFormat.getSampleRate(); if (sampleRate == Format.NOT_SPECIFIED) throw new IllegalArgumentException("audioFormat.sampleRate"); int sampleSizeInBits = audioFormat.getSampleSizeInBits(); if (sampleSizeInBits == Format.NOT_SPECIFIED) throw new IllegalArgumentException("audioFormat.sampleSizeInBits"); char nBlockAlign = (char) ((channels * sampleSizeInBits) / 8); WASAPI.WAVEFORMATEX_fill( waveformatex, WAVE_FORMAT_PCM, (char) channels, sampleRate, sampleRate * nBlockAlign, nBlockAlign, (char) sampleSizeInBits, /* cbSize */ (char) 0); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rokolabs.app.common.image; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.Closeable; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Array; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** ****************************************************************************** * Taken from the JB source code, can be found in: * libcore/luni/src/main/java/libcore/io/DiskLruCache.java or direct link: * https: * //android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/ * main/java/libcore/io/DiskLruCache.java ****************************************************************************** * * A cache that uses a bounded amount of space on a filesystem. Each cache entry * has a string key and a fixed number of values. Values are byte sequences, * accessible as streams or files. Each value must be between {@code 0} and * {@code Integer.MAX_VALUE} bytes in length. * * <p> * The cache stores its data in a directory on the filesystem. This directory * must be exclusive to the cache; the cache may delete or overwrite files from * its directory. It is an error for multiple processes to use the same cache * directory at the same time. * * <p> * This cache limits the number of bytes that it will store on the filesystem. * When the number of stored bytes exceeds the limit, the cache will remove * entries in the background until the limit is satisfied. The limit is not * strict: the cache may temporarily exceed it while waiting for files to be * deleted. The limit does not include filesystem overhead or the cache journal * so space-sensitive applications should set a conservative limit. * * <p> * Clients call {@link #edit} to create or update the values of an entry. An * entry may have only one editor at one time; if a value is not available to be * edited then {@link #edit} will return null. * <ul> * <li>When an entry is being <strong>created</strong> it is necessary to supply * a full set of values; the empty value should be used as a placeholder if * necessary. * <li>When an entry is being <strong>edited</strong>, it is not necessary to * supply data for every value; values default to their previous value. * </ul> * Every {@link #edit} call must be matched by a call to {@link Editor#commit} * or {@link Editor#abort}. Committing is atomic: a read observes the full set * of values as they were before or after the commit, but never a mix of values. * * <p> * Clients call {@link #get} to read a snapshot of an entry. The read will * observe the value at the time that {@link #get} was called. Updates and * removals after the call do not impact ongoing reads. * * <p> * This class is tolerant of some I/O errors. If files are missing from the * filesystem, the corresponding entries will be dropped from the cache. If an * error occurs while writing a cache value, the edit will fail silently. * Callers should handle other problems by catching {@code IOException} and * responding appropriately. */ public final class DiskLruCache implements Closeable { static final String JOURNAL_FILE = "journal"; static final String JOURNAL_FILE_TMP = "journal.tmp"; static final String MAGIC = "libcore.io.DiskLruCache"; static final String VERSION_1 = "1"; static final long ANY_SEQUENCE_NUMBER = -1; private static final String CLEAN = "CLEAN"; private static final String DIRTY = "DIRTY"; private static final String REMOVE = "REMOVE"; private static final String READ = "READ"; private static final Charset UTF_8 = Charset.forName("UTF-8"); private static final int IO_BUFFER_SIZE = 8 * 1024; /* * This cache uses a journal file named "journal". A typical journal file * looks like this: libcore.io.DiskLruCache 1 100 2 * * CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054 DIRTY * 335c4c6028171cfddfbaae1a9c313c52 CLEAN 335c4c6028171cfddfbaae1a9c313c52 * 3934 2342 REMOVE 335c4c6028171cfddfbaae1a9c313c52 DIRTY * 1ab96a171faeeee38496d8b330771a7a CLEAN 1ab96a171faeeee38496d8b330771a7a * 1600 234 READ 335c4c6028171cfddfbaae1a9c313c52 READ * 3400330d1dfc7f3f7f4b8d4d803dfcf6 * * The first five lines of the journal form its header. They are the * constant string "libcore.io.DiskLruCache", the disk cache's version, the * application's version, the value count, and a blank line. * * Each of the subsequent lines in the file is a record of the state of a * cache entry. Each line contains space-separated values: a state, a key, * and optional state-specific values. o DIRTY lines track that an entry is * actively being created or updated. Every successful DIRTY action should * be followed by a CLEAN or REMOVE action. DIRTY lines without a matching * CLEAN or REMOVE indicate that temporary files may need to be deleted. o * CLEAN lines track a cache entry that has been successfully published and * may be read. A publish line is followed by the lengths of each of its * values. o READ lines track accesses for LRU. o REMOVE lines track entries * that have been deleted. * * The journal file is appended to as cache operations occur. The journal * may occasionally be compacted by dropping redundant lines. A temporary * file named "journal.tmp" will be used during compaction; that file should * be deleted if it exists when the cache is opened. */ private final File directory; private final File journalFile; private final File journalFileTmp; private final int appVersion; private final long maxSize; private final int valueCount; private long size = 0; private Writer journalWriter; private final LinkedHashMap<String, Entry> lruEntries = new LinkedHashMap<String, Entry>(0, 0.75f, true); private int redundantOpCount; /** * To differentiate between old and current snapshots, each entry is given a * sequence number each time an edit is committed. A snapshot is stale if * its sequence number is not equal to its entry's sequence number. */ private long nextSequenceNumber = 0; /* From java.util.Arrays */ @SuppressWarnings("unchecked") private static <T> T[] copyOfRange(T[] original, int start, int end) { final int originalLength = original.length; // For exception priority compatibility. if (start > end) { throw new IllegalArgumentException(); } if (start < 0 || start > originalLength) { throw new ArrayIndexOutOfBoundsException(); } final int resultLength = end - start; final int copyLength = Math.min(resultLength, originalLength - start); final T[] result = (T[]) Array.newInstance(original.getClass().getComponentType(), resultLength); System.arraycopy(original, start, result, 0, copyLength); return result; } /** * Returns the remainder of 'reader' as a string, closing it when done. */ public static String readFully(Reader reader) throws IOException { try { StringWriter writer = new StringWriter(); char[] buffer = new char[1024]; int count; while ((count = reader.read(buffer)) != -1) { writer.write(buffer, 0, count); } return writer.toString(); } finally { reader.close(); } } /** * Returns the ASCII characters up to but not including the next "\r\n", or * "\n". * * @throws java.io.EOFException * if the stream is exhausted before the next newline character. */ public static String readAsciiLine(InputStream in) throws IOException { // TODO: support UTF-8 here instead StringBuilder result = new StringBuilder(80); while (true) { int c = in.read(); if (c == -1) { throw new EOFException(); } else if (c == '\n') { break; } result.append((char) c); } int length = result.length(); if (length > 0 && result.charAt(length - 1) == '\r') { result.setLength(length - 1); } return result.toString(); } /** * Closes 'closeable', ignoring any checked exceptions. Does nothing if * 'closeable' is null. */ public static void closeQuietly(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (RuntimeException rethrown) { throw rethrown; } catch (Exception ignored) { } } } /** * Recursively delete everything in {@code dir}. */ // TODO: this should specify paths as Strings rather than as Files public static void deleteContents(File dir) throws IOException { File[] files = dir.listFiles(); if (files == null) { //throw new IllegalArgumentException("not a directory: " + dir); return; } for (File file : files) { if (file.isDirectory()) { deleteContents(file); } if (!file.delete()) { throw new IOException("failed to delete file: " + file); } } } /** This cache uses a single background thread to evict entries. */ private final ExecutorService executorService = new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); private final Callable<Void> cleanupCallable = new Callable<Void>() { @Override public Void call() throws Exception { synchronized (DiskLruCache.this) { if (journalWriter == null) { return null; // closed } trimToSize(); if (journalRebuildRequired()) { rebuildJournal(); redundantOpCount = 0; } } return null; } }; private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) { this.directory = directory; this.appVersion = appVersion; this.journalFile = new File(directory, JOURNAL_FILE); this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP); this.valueCount = valueCount; this.maxSize = maxSize; } /** * Opens the cache in {@code directory}, creating a cache if none exists * there. * * @param directory * a writable directory * @param appVersion * @param valueCount * the number of values per cache entry. Must be positive. * @param maxSize * the maximum number of bytes this cache should use to store * @throws IOException * if reading or writing the cache directory fails */ public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize) throws IOException { if (maxSize <= 0) { throw new IllegalArgumentException("maxSize <= 0"); } if (valueCount <= 0) { throw new IllegalArgumentException("valueCount <= 0"); } // prefer to pick up where we left off DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); if (cache.journalFile.exists()) { try { cache.readJournal(); cache.processJournal(); cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true), IO_BUFFER_SIZE); return cache; } catch (IOException journalIsCorrupt) { // System.logW("DiskLruCache " + directory + " is corrupt: " // + journalIsCorrupt.getMessage() + ", removing"); cache.delete(); } } // create a new empty cache directory.mkdirs(); cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); cache.rebuildJournal(); return cache; } private void readJournal() throws IOException { InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE); try { String magic = readAsciiLine(in); String version = readAsciiLine(in); String appVersionString = readAsciiLine(in); String valueCountString = readAsciiLine(in); String blank = readAsciiLine(in); if (!MAGIC.equals(magic) || !VERSION_1.equals(version) || !Integer.toString(appVersion).equals(appVersionString) || !Integer.toString(valueCount).equals(valueCountString) || !"".equals(blank)) { throw new IOException("unexpected journal header: [" + magic + ", " + version + ", " + valueCountString + ", " + blank + "]"); } while (true) { try { readJournalLine(readAsciiLine(in)); } catch (EOFException endOfJournal) { break; } } } finally { closeQuietly(in); } } private void readJournalLine(String line) throws IOException { String[] parts = line.split(" "); if (parts.length < 2) { throw new IOException("unexpected journal line: " + line); } String key = parts[1]; if (parts[0].equals(REMOVE) && parts.length == 2) { lruEntries.remove(key); return; } Entry entry = lruEntries.get(key); if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) { entry.readable = true; entry.currentEditor = null; entry.setLengths(copyOfRange(parts, 2, parts.length)); } else if (parts[0].equals(DIRTY) && parts.length == 2) { entry.currentEditor = new Editor(entry); } else if (parts[0].equals(READ) && parts.length == 2) { // this work was already done by calling lruEntries.get() } else { throw new IOException("unexpected journal line: " + line); } } /** * Computes the initial size and collects garbage as a part of opening the * cache. Dirty entries are assumed to be inconsistent and will be deleted. */ private void processJournal() throws IOException { deleteIfExists(journalFileTmp); for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext();) { Entry entry = i.next(); if (entry.currentEditor == null) { for (int t = 0; t < valueCount; t++) { size += entry.lengths[t]; } } else { entry.currentEditor = null; for (int t = 0; t < valueCount; t++) { deleteIfExists(entry.getCleanFile(t)); deleteIfExists(entry.getDirtyFile(t)); } i.remove(); } } } /** * Creates a new journal that omits redundant information. This replaces the * current journal if it exists. */ private synchronized void rebuildJournal() throws IOException { if (journalWriter != null) { journalWriter.close(); } Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE); writer.write(MAGIC); writer.write("\n"); writer.write(VERSION_1); writer.write("\n"); writer.write(Integer.toString(appVersion)); writer.write("\n"); writer.write(Integer.toString(valueCount)); writer.write("\n"); writer.write("\n"); for (Entry entry : lruEntries.values()) { if (entry.currentEditor != null) { writer.write(DIRTY + ' ' + entry.key + '\n'); } else { writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); } } writer.close(); journalFileTmp.renameTo(journalFile); journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE); } private static void deleteIfExists(File file) throws IOException { // try { // Libcore.os.remove(file.getPath()); // } catch (ErrnoException errnoException) { // if (errnoException.errno != OsConstants.ENOENT) { // throw errnoException.rethrowAsIOException(); // } // } if (file.exists() && !file.delete()) { throw new IOException(); } } public synchronized File get(String key, int index){ checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return null; } if (!entry.readable) { return null; } return entry.getCleanFile(index); } public synchronized boolean has(String key, int index) { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return false; } if (!entry.readable) { return false; } boolean hasFile = false; for (int i = 0; i < valueCount; i++) { File f = entry.getCleanFile(i); if (!f.exists()) return false; if (i == index && f.length() > 0) hasFile = true; } return hasFile; } /** * Returns a snapshot of the entry named {@code key}, or null if it doesn't * exist is not currently readable. If a value is returned, it is moved to * the head of the LRU queue. */ public synchronized Snapshot get(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return null; } if (!entry.readable) { return null; } /* * Open all streams eagerly to guarantee that we see a single published * snapshot. If we opened streams lazily then the streams could come * from different edits. */ InputStream[] ins = new InputStream[valueCount]; try { for (int i = 0; i < valueCount; i++) { ins[i] = new FileInputStream(entry.getCleanFile(i)); } } catch (FileNotFoundException e) { // a file must have been deleted manually! return null; } redundantOpCount++; journalWriter.append(READ + ' ' + key + '\n'); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return new Snapshot(key, entry.sequenceNumber, ins); } /** * Returns an editor for the entry named {@code key}, or null if another * edit is in progress. */ public Editor edit(String key) throws IOException { return edit(key, ANY_SEQUENCE_NUMBER); } private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER && (entry == null || entry.sequenceNumber != expectedSequenceNumber)) { return null; // snapshot is stale } if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } else if (entry.currentEditor != null) { return null; // another edit is in progress } Editor editor = new Editor(entry); entry.currentEditor = editor; // flush the journal before creating files to prevent file leaks journalWriter.write(DIRTY + ' ' + key + '\n'); journalWriter.flush(); return editor; } /** * Returns the directory where this cache stores its data. */ public File getDirectory() { return directory; } /** * Returns the maximum number of bytes that this cache should use to store * its data. */ public long maxSize() { return maxSize; } /** * Returns the number of bytes currently being used to store the values in * this cache. This may be greater than the max size if a background * deletion is pending. */ public synchronized long size() { return size; } private synchronized void completeEdit(Editor editor, boolean success) throws IOException { Entry entry = editor.entry; if (entry.currentEditor != editor) { throw new IllegalStateException(); } // if this edit is creating the entry for the first time, every index must have a value if (success && !entry.readable) { for (int i = 0; i < valueCount; i++) { if (!entry.getDirtyFile(i).exists()) { editor.abort(); throw new IllegalStateException("edit didn't create file " + i); } } } for (int i = 0; i < valueCount; i++) { File dirty = entry.getDirtyFile(i); if (success) { if (dirty.exists()) { File clean = entry.getCleanFile(i); dirty.renameTo(clean); long oldLength = entry.lengths[i]; long newLength = clean.length(); entry.lengths[i] = newLength; size = size - oldLength + newLength; } } else { deleteIfExists(dirty); } } redundantOpCount++; entry.currentEditor = null; if (entry.readable | success) { entry.readable = true; journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); if (success) { entry.sequenceNumber = nextSequenceNumber++; } } else { lruEntries.remove(entry.key); journalWriter.write(REMOVE + ' ' + entry.key + '\n'); } if (size > maxSize || journalRebuildRequired()) { executorService.submit(cleanupCallable); } } /** * We only rebuild the journal when it will halve the size of the journal * and eliminate at least 2000 ops. */ private boolean journalRebuildRequired() { final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000; return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD && redundantOpCount >= lruEntries.size(); } /** * Drops the entry for {@code key} if it exists and can be removed. Entries * actively being edited cannot be removed. * * @return true if an entry was removed. */ public synchronized boolean remove(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null || entry.currentEditor != null) { return false; } for (int i = 0; i < valueCount; i++) { File file = entry.getCleanFile(i); if (!file.delete()) { throw new IOException("failed to delete " + file); } size -= entry.lengths[i]; entry.lengths[i] = 0; } redundantOpCount++; journalWriter.append(REMOVE + ' ' + key + '\n'); lruEntries.remove(key); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return true; } /** * Returns true if this cache has been closed. */ public boolean isClosed() { return journalWriter == null; } private void checkNotClosed() { if (journalWriter == null) { throw new IllegalStateException("cache is closed"); } } /** * Force buffered operations to the filesystem. */ public synchronized void flush() throws IOException { checkNotClosed(); trimToSize(); journalWriter.flush(); } /** * Closes this cache. Stored values will remain on the filesystem. */ public synchronized void close() throws IOException { if (journalWriter == null) { return; // already closed } for (Entry entry : new ArrayList<Entry>(lruEntries.values())) { if (entry.currentEditor != null) { entry.currentEditor.abort(); } } trimToSize(); journalWriter.close(); journalWriter = null; } private void trimToSize() throws IOException { while (size > maxSize) { // Map.Entry<String, Entry> toEvict = lruEntries.eldest(); final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next(); remove(toEvict.getKey()); } } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { close(); deleteContents(directory); } private void validateKey(String key) { if (key.contains(" ") || key.contains("\n") || key.contains("\r")) { throw new IllegalArgumentException("keys must not contain spaces or newlines: \"" + key + "\""); } } private static String inputStreamToString(InputStream in) throws IOException { return readFully(new InputStreamReader(in, UTF_8)); } /** * A snapshot of the values for an entry. */ public final class Snapshot implements Closeable { private final String key; private final long sequenceNumber; private final InputStream[] ins; private Snapshot(String key, long sequenceNumber, InputStream[] ins) { this.key = key; this.sequenceNumber = sequenceNumber; this.ins = ins; } /** * Returns an editor for this snapshot's entry, or null if either the * entry has changed since this snapshot was created or if another edit * is in progress. */ public Editor edit() throws IOException { return DiskLruCache.this.edit(key, sequenceNumber); } /** * Returns the unbuffered stream with the value for {@code index}. */ public InputStream getInputStream(int index) { return ins[index]; } /** * Returns the string value for {@code index}. */ public String getString(int index) throws IOException { return inputStreamToString(getInputStream(index)); } @Override public void close() { for (InputStream in : ins) { closeQuietly(in); } } } /** * Edits the values for an entry. */ public final class Editor { private final Entry entry; private boolean hasErrors; private Editor(Entry entry) { this.entry = entry; } /** * Returns an unbuffered input stream to read the last committed value, * or null if no value has been committed. */ public InputStream newInputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } return new FileInputStream(entry.getCleanFile(index)); } } /** * Returns the last committed value as a string, or null if no value has * been committed. */ public String getString(int index) throws IOException { InputStream in = newInputStream(index); return in != null ? inputStreamToString(in) : null; } /** * Returns a new unbuffered output stream to write the value at * {@code index}. If the underlying output stream encounters errors when * writing to the filesystem, this edit will be aborted when * {@link #commit} is called. The returned output stream does not throw * IOExceptions. */ public OutputStream newOutputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index))); } } /** * Sets the value at {@code index} to {@code value}. */ public void set(int index, String value) throws IOException { Writer writer = null; try { writer = new OutputStreamWriter(newOutputStream(index), UTF_8); writer.write(value); } finally { closeQuietly(writer); } } /** * Commits this edit so it is visible to readers. This releases the edit * lock so another edit may be started on the same key. */ public void commit() throws IOException { if (hasErrors) { completeEdit(this, false); remove(entry.key); // the previous entry is stale } else { completeEdit(this, true); } } /** * Aborts this edit. This releases the edit lock so another edit may be * started on the same key. */ public void abort() throws IOException { completeEdit(this, false); } private class FaultHidingOutputStream extends FilterOutputStream { private FaultHidingOutputStream(OutputStream out) { super(out); } @Override public void write(int oneByte) { try { out.write(oneByte); } catch (IOException e) { hasErrors = true; } } @Override public void write(byte[] buffer, int offset, int length) { try { out.write(buffer, offset, length); } catch (IOException e) { hasErrors = true; } } @Override public void close() { try { out.close(); } catch (IOException e) { hasErrors = true; } } @Override public void flush() { try { out.flush(); } catch (IOException e) { hasErrors = true; } } } } private final class Entry { private final String key; /** Lengths of this entry's files. */ private final long[] lengths; /** True if this entry has ever been published */ private boolean readable; /** The ongoing edit or null if this entry is not being edited. */ private Editor currentEditor; /** * The sequence number of the most recently committed edit to this * entry. */ private long sequenceNumber; private Entry(String key) { this.key = key; this.lengths = new long[valueCount]; } public String getLengths() throws IOException { StringBuilder result = new StringBuilder(); for (long size : lengths) { result.append(' ').append(size); } return result.toString(); } /** * Set lengths using decimal numbers like "10123". */ private void setLengths(String[] strings) throws IOException { if (strings.length != valueCount) { throw invalidLengths(strings); } try { for (int i = 0; i < strings.length; i++) { lengths[i] = Long.parseLong(strings[i]); } } catch (NumberFormatException e) { throw invalidLengths(strings); } } private IOException invalidLengths(String[] strings) throws IOException { throw new IOException("unexpected journal line: " + Arrays.toString(strings)); } public File getCleanFile(int i) { return new File(directory, key + "." + i); } public File getDirtyFile(int i) { return new File(directory, key + "." + i + ".tmp"); } } }
package com.sequenceiq.redbeams.api.endpoint.v4.databaseserver.requests; import static com.sequenceiq.cloudbreak.validation.ValidCrn.Effect.DENY; import java.util.HashMap; import java.util.Map; import javax.validation.Valid; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.sequenceiq.cloudbreak.auth.crn.CrnResourceDescriptor; import com.sequenceiq.cloudbreak.common.mappable.Mappable; import com.sequenceiq.cloudbreak.common.mappable.ProviderParametersBase; import com.sequenceiq.cloudbreak.validation.ValidCrn; import com.sequenceiq.redbeams.api.endpoint.v4.stacks.DatabaseServerV4StackRequest; import com.sequenceiq.redbeams.api.endpoint.v4.stacks.NetworkV4StackRequest; import com.sequenceiq.redbeams.api.endpoint.v4.stacks.aws.AwsDBStackV4Parameters; import com.sequenceiq.redbeams.api.endpoint.v4.stacks.azure.AzureDBStackV4Parameters; import com.sequenceiq.redbeams.api.endpoint.v4.stacks.gcp.GcpDBStackV4Parameters; import com.sequenceiq.redbeams.doc.ModelDescriptions; import com.sequenceiq.redbeams.doc.ModelDescriptions.DBStack; import com.sequenceiq.redbeams.doc.ModelDescriptions.DatabaseServer; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @ApiModel(description = ModelDescriptions.ALLOCATE_DATABASE_SERVER_REQUEST) @JsonIgnoreProperties(ignoreUnknown = true) public class AllocateDatabaseServerV4Request extends ProviderParametersBase { public static final int RDS_NAME_MAX_LENGTH = 40; @Size(max = RDS_NAME_MAX_LENGTH, min = 5, message = "The length of the name must be between 5 to " + RDS_NAME_MAX_LENGTH + " inclusive") @Pattern(regexp = "(^[a-z][-a-z0-9]*[a-z0-9]$)", message = "The name can only contain lowercase alphanumeric characters and hyphens and must start with an alphanumeric character") @ApiModelProperty(value = DBStack.STACK_NAME) private String name; @NotNull @ValidCrn(resource = CrnResourceDescriptor.ENVIRONMENT) @ApiModelProperty(value = DatabaseServer.ENVIRONMENT_CRN, required = true) private String environmentCrn; @NotNull @ValidCrn(resource = { CrnResourceDescriptor.ENVIRONMENT }, effect = DENY) @ApiModelProperty(value = DatabaseServer.CLUSTER_CRN, required = true) private String clusterCrn; @Valid @ApiModelProperty(DBStack.NETWORK) private NetworkV4StackRequest network; @NotNull @Valid @ApiModelProperty(value = DBStack.DATABASE_SERVER, required = true) private DatabaseServerV4StackRequest databaseServer; @ApiModelProperty(DBStack.AWS_PARAMETERS) private AwsDBStackV4Parameters aws; @ApiModelProperty(DBStack.AZURE_PARAMETERS) private AzureDBStackV4Parameters azure; @ApiModelProperty(DBStack.AZURE_PARAMETERS) private GcpDBStackV4Parameters gcp; @ApiModelProperty(DatabaseServer.SSL_CONFIG) private SslConfigV4Request sslConfig; @ApiModelProperty(DatabaseServer.TAGS) private Map<String, String> tags = new HashMap<>(); public String getName() { return name; } public void setName(String name) { this.name = name; } public String getEnvironmentCrn() { return environmentCrn; } public void setEnvironmentCrn(String environmentCrn) { this.environmentCrn = environmentCrn; } public NetworkV4StackRequest getNetwork() { return network; } public void setNetwork(NetworkV4StackRequest network) { this.network = network; } public DatabaseServerV4StackRequest getDatabaseServer() { return databaseServer; } public void setDatabaseServer(DatabaseServerV4StackRequest databaseServer) { this.databaseServer = databaseServer; } public String getClusterCrn() { return clusterCrn; } public void setClusterCrn(String clusterCrn) { this.clusterCrn = clusterCrn; } public SslConfigV4Request getSslConfig() { return sslConfig; } public void setSslConfig(SslConfigV4Request sslConfig) { this.sslConfig = sslConfig; } @Override public AwsDBStackV4Parameters createAws() { if (aws == null) { aws = new AwsDBStackV4Parameters(); } return aws; } public void setAws(AwsDBStackV4Parameters aws) { this.aws = aws; } @Override public Mappable createGcp() { if (gcp == null) { gcp = new GcpDBStackV4Parameters(); } return gcp; } public GcpDBStackV4Parameters getGcp() { return gcp; } public void setGcp(GcpDBStackV4Parameters gcp) { this.gcp = gcp; } @Override public Mappable createAzure() { if (azure == null) { azure = new AzureDBStackV4Parameters(); } return azure; } public void setAzure(AzureDBStackV4Parameters azure) { this.azure = azure; } @Override public Mappable createYarn() { return null; } @Override public Mappable createMock() { if (aws == null) { aws = new AwsDBStackV4Parameters(); } return aws; } public Map<String, String> getTags() { return tags; } public void setTags(Map<String, String> tags) { this.tags = tags; } public AwsDBStackV4Parameters getAws() { return aws; } @Override public String toString() { return "AllocateDatabaseServerV4Request{" + "name='" + name + '\'' + ", environmentCrn='" + environmentCrn + '\'' + ", clusterCrn='" + clusterCrn + '\'' + ", network=" + network + ", databaseServer=" + databaseServer + ", aws=" + aws + ", azure=" + azure + ", gcp=" + gcp + ", sslConfig=" + sslConfig + ", tags=" + tags + '}'; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2.stub; import static com.google.cloud.dialogflow.v2.ConversationProfilesClient.ListConversationProfilesPagedResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.v2.ConversationProfile; import com.google.cloud.dialogflow.v2.CreateConversationProfileRequest; import com.google.cloud.dialogflow.v2.DeleteConversationProfileRequest; import com.google.cloud.dialogflow.v2.GetConversationProfileRequest; import com.google.cloud.dialogflow.v2.ListConversationProfilesRequest; import com.google.cloud.dialogflow.v2.ListConversationProfilesResponse; import com.google.cloud.dialogflow.v2.UpdateConversationProfileRequest; import com.google.common.collect.ImmutableMap; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the ConversationProfiles service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcConversationProfilesStub extends ConversationProfilesStub { private static final MethodDescriptor< ListConversationProfilesRequest, ListConversationProfilesResponse> listConversationProfilesMethodDescriptor = MethodDescriptor .<ListConversationProfilesRequest, ListConversationProfilesResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.dialogflow.v2.ConversationProfiles/ListConversationProfiles") .setRequestMarshaller( ProtoUtils.marshaller(ListConversationProfilesRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListConversationProfilesResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<GetConversationProfileRequest, ConversationProfile> getConversationProfileMethodDescriptor = MethodDescriptor.<GetConversationProfileRequest, ConversationProfile>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.dialogflow.v2.ConversationProfiles/GetConversationProfile") .setRequestMarshaller( ProtoUtils.marshaller(GetConversationProfileRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ConversationProfile.getDefaultInstance())) .build(); private static final MethodDescriptor<CreateConversationProfileRequest, ConversationProfile> createConversationProfileMethodDescriptor = MethodDescriptor.<CreateConversationProfileRequest, ConversationProfile>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.dialogflow.v2.ConversationProfiles/CreateConversationProfile") .setRequestMarshaller( ProtoUtils.marshaller(CreateConversationProfileRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ConversationProfile.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateConversationProfileRequest, ConversationProfile> updateConversationProfileMethodDescriptor = MethodDescriptor.<UpdateConversationProfileRequest, ConversationProfile>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.dialogflow.v2.ConversationProfiles/UpdateConversationProfile") .setRequestMarshaller( ProtoUtils.marshaller(UpdateConversationProfileRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ConversationProfile.getDefaultInstance())) .build(); private static final MethodDescriptor<DeleteConversationProfileRequest, Empty> deleteConversationProfileMethodDescriptor = MethodDescriptor.<DeleteConversationProfileRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.dialogflow.v2.ConversationProfiles/DeleteConversationProfile") .setRequestMarshaller( ProtoUtils.marshaller(DeleteConversationProfileRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); private final UnaryCallable<ListConversationProfilesRequest, ListConversationProfilesResponse> listConversationProfilesCallable; private final UnaryCallable< ListConversationProfilesRequest, ListConversationProfilesPagedResponse> listConversationProfilesPagedCallable; private final UnaryCallable<GetConversationProfileRequest, ConversationProfile> getConversationProfileCallable; private final UnaryCallable<CreateConversationProfileRequest, ConversationProfile> createConversationProfileCallable; private final UnaryCallable<UpdateConversationProfileRequest, ConversationProfile> updateConversationProfileCallable; private final UnaryCallable<DeleteConversationProfileRequest, Empty> deleteConversationProfileCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcConversationProfilesStub create(ConversationProfilesStubSettings settings) throws IOException { return new GrpcConversationProfilesStub(settings, ClientContext.create(settings)); } public static final GrpcConversationProfilesStub create(ClientContext clientContext) throws IOException { return new GrpcConversationProfilesStub( ConversationProfilesStubSettings.newBuilder().build(), clientContext); } public static final GrpcConversationProfilesStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcConversationProfilesStub( ConversationProfilesStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcConversationProfilesStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcConversationProfilesStub( ConversationProfilesStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcConversationProfilesCallableFactory()); } /** * Constructs an instance of GrpcConversationProfilesStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcConversationProfilesStub( ConversationProfilesStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<ListConversationProfilesRequest, ListConversationProfilesResponse> listConversationProfilesTransportSettings = GrpcCallSettings .<ListConversationProfilesRequest, ListConversationProfilesResponse>newBuilder() .setMethodDescriptor(listConversationProfilesMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); }) .build(); GrpcCallSettings<GetConversationProfileRequest, ConversationProfile> getConversationProfileTransportSettings = GrpcCallSettings.<GetConversationProfileRequest, ConversationProfile>newBuilder() .setMethodDescriptor(getConversationProfileMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); }) .build(); GrpcCallSettings<CreateConversationProfileRequest, ConversationProfile> createConversationProfileTransportSettings = GrpcCallSettings.<CreateConversationProfileRequest, ConversationProfile>newBuilder() .setMethodDescriptor(createConversationProfileMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("parent", String.valueOf(request.getParent())); return params.build(); }) .build(); GrpcCallSettings<UpdateConversationProfileRequest, ConversationProfile> updateConversationProfileTransportSettings = GrpcCallSettings.<UpdateConversationProfileRequest, ConversationProfile>newBuilder() .setMethodDescriptor(updateConversationProfileMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put( "conversation_profile.name", String.valueOf(request.getConversationProfile().getName())); return params.build(); }) .build(); GrpcCallSettings<DeleteConversationProfileRequest, Empty> deleteConversationProfileTransportSettings = GrpcCallSettings.<DeleteConversationProfileRequest, Empty>newBuilder() .setMethodDescriptor(deleteConversationProfileMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("name", String.valueOf(request.getName())); return params.build(); }) .build(); this.listConversationProfilesCallable = callableFactory.createUnaryCallable( listConversationProfilesTransportSettings, settings.listConversationProfilesSettings(), clientContext); this.listConversationProfilesPagedCallable = callableFactory.createPagedCallable( listConversationProfilesTransportSettings, settings.listConversationProfilesSettings(), clientContext); this.getConversationProfileCallable = callableFactory.createUnaryCallable( getConversationProfileTransportSettings, settings.getConversationProfileSettings(), clientContext); this.createConversationProfileCallable = callableFactory.createUnaryCallable( createConversationProfileTransportSettings, settings.createConversationProfileSettings(), clientContext); this.updateConversationProfileCallable = callableFactory.createUnaryCallable( updateConversationProfileTransportSettings, settings.updateConversationProfileSettings(), clientContext); this.deleteConversationProfileCallable = callableFactory.createUnaryCallable( deleteConversationProfileTransportSettings, settings.deleteConversationProfileSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<ListConversationProfilesRequest, ListConversationProfilesResponse> listConversationProfilesCallable() { return listConversationProfilesCallable; } @Override public UnaryCallable<ListConversationProfilesRequest, ListConversationProfilesPagedResponse> listConversationProfilesPagedCallable() { return listConversationProfilesPagedCallable; } @Override public UnaryCallable<GetConversationProfileRequest, ConversationProfile> getConversationProfileCallable() { return getConversationProfileCallable; } @Override public UnaryCallable<CreateConversationProfileRequest, ConversationProfile> createConversationProfileCallable() { return createConversationProfileCallable; } @Override public UnaryCallable<UpdateConversationProfileRequest, ConversationProfile> updateConversationProfileCallable() { return updateConversationProfileCallable; } @Override public UnaryCallable<DeleteConversationProfileRequest, Empty> deleteConversationProfileCallable() { return deleteConversationProfileCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xbean.osgi.bundle.util; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Dictionary; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; import org.osgi.framework.Version; import org.osgi.framework.wiring.BundleCapability; import org.osgi.framework.wiring.BundleRevision; import org.osgi.framework.wiring.BundleWiring; /** * Bundle that delegates ClassLoader operations to a collection of {@link Bundle} objects. * * @version $Rev$ $Date$ */ public class DelegatingBundle implements Bundle { private static final String PACKAGE_CACHE = DelegatingBundle.class.getName() + ".packageCache"; private static final String RESOURCE_CACHE_SIZE = DelegatingBundle.class.getName() + ".resourceCacheSize"; private static final URL NOT_FOUND_RESOURCE; static { try { NOT_FOUND_RESOURCE = new URL("file://foo"); } catch (MalformedURLException e) { throw new Error(e); } } private CopyOnWriteArrayList<Bundle> bundles; private Bundle bundle; private BundleContext bundleContext; private final boolean hasDynamicImports; private final Map<String, URL> resourceCache; private final boolean packageCacheEnabled; private Map<String, Bundle> packageCache; public DelegatingBundle(Collection<Bundle> bundles) { if (bundles.isEmpty()) { throw new IllegalArgumentException("At least one bundle is required"); } this.bundles = new CopyOnWriteArrayList<Bundle>(bundles); Iterator<Bundle> iterator = bundles.iterator(); // assume first Bundle is the main bundle this.bundle = iterator.next(); this.bundleContext = new DelegatingBundleContext(this, bundle.getBundleContext()); this.hasDynamicImports = hasDynamicImports(iterator); this.resourceCache = initResourceCache(); this.packageCacheEnabled = initPackageCacheEnabled(); } public DelegatingBundle(Bundle bundle) { this(Collections.singletonList(bundle)); } private static Map<String, URL> initResourceCache() { String value = System.getProperty(RESOURCE_CACHE_SIZE, "250"); int size = Integer.parseInt(value); if (size > 0) { return Collections.synchronizedMap(new Cache<String, URL>(size)); } else { return null; } } private static boolean initPackageCacheEnabled() { String value = System.getProperty(PACKAGE_CACHE, "true"); boolean enabled = Boolean.parseBoolean(value); return enabled; } /* * Returns true if a single bundle has Dynamic-ImportPackage: *. False, otherwise. */ private boolean hasDynamicImports(Iterator<Bundle> iterator) { while (iterator.hasNext()) { Bundle delegate = iterator.next(); if (hasWildcardDynamicImport(delegate)) { return true; } } return false; } private synchronized Map<String, Bundle> getPackageBundleMap() { if (packageCache == null) { packageCache = buildPackageBundleMap(); } return packageCache; } private synchronized void reset() { resourceCache.clear(); packageCache = null; } private Map<String, Bundle> buildPackageBundleMap() { Map<String, Bundle> map = new HashMap<String, Bundle>(); Iterator<Bundle> iterator = bundles.iterator(); // skip first bundle iterator.next(); // attempt to load the class from the remaining bundles while (iterator.hasNext()) { Bundle bundle = iterator.next(); BundleWiring wiring = bundle.adapt(BundleWiring.class); if (wiring != null) { List<BundleCapability> capabilities = wiring.getCapabilities(BundleRevision.PACKAGE_NAMESPACE); if (capabilities != null && !capabilities.isEmpty()) { for (BundleCapability capability : capabilities) { Map<String, Object> attributes = capability.getAttributes(); if (attributes != null) { String packageName = String.valueOf(attributes.get(BundleRevision.PACKAGE_NAMESPACE)); if (!map.containsKey(packageName)) { map.put(packageName, bundle); } } } } } } return map; } public Bundle getMainBundle() { return bundle; } public Class<?> loadClass(String name) throws ClassNotFoundException { try { Class<?> clazz = bundle.loadClass(name); return clazz; } catch (ClassNotFoundException cnfe) { if (name.startsWith("java.")) { throw cnfe; } int index = name.lastIndexOf('.'); if (index > 0 && bundles.size() > 1) { String packageName = name.substring(0, index); if (packageCacheEnabled) { return findCachedClass(name, packageName, cnfe); } else { return findClass(name, packageName, cnfe); } } throw cnfe; } } private Class<?> findCachedClass(String className, String packageName, ClassNotFoundException cnfe) throws ClassNotFoundException { Map<String, Bundle> map = getPackageBundleMap(); Bundle bundle = map.get(packageName); if (bundle == null) { // Work-around for Introspector always looking for classes in sun.beans.infos if (packageName.equals("sun.beans.infos") && className.endsWith("BeanInfo")) { throw cnfe; } return findClass(className, packageName, cnfe); } else { return bundle.loadClass(className); } } private Class<?> findClass(String className, String packageName, ClassNotFoundException cnfe) throws ClassNotFoundException { Iterator<Bundle> iterator = bundles.iterator(); // skip first bundle iterator.next(); while (iterator.hasNext()) { Bundle delegate = iterator.next(); if (hasDynamicImports && hasWildcardDynamicImport(delegate)) { // skip any bundles with Dynamic-ImportPackage: * to avoid unnecessary wires continue; } try { return delegate.loadClass(className); } catch (ClassNotFoundException e) { // ignore } } throw cnfe; } private static boolean hasWildcardDynamicImport(Bundle bundle) { Dictionary<String, String> headers = bundle.getHeaders(); if (headers != null) { String value = headers.get(Constants.DYNAMICIMPORT_PACKAGE); if (value == null) { return false; } else { return "*".equals(value.trim()); } } else { return false; } } public void addBundle(Bundle b) { bundles.add(b); reset(); } public void removeBundle(Bundle b) { bundles.remove(b); reset(); } public URL getResource(String name) { URL resource = null; if (resourceCache == null) { resource = findResource(name); } else { resource = findCachedResource(name); } return resource; } private URL findCachedResource(String name) { URL resource = bundle.getResource(name); if (resource == null) { resource = resourceCache.get(name); if (resource == null) { Iterator<Bundle> iterator = bundles.iterator(); // skip first bundle iterator.next(); // look for resource in the remaining bundles resource = findResource(name, iterator); resourceCache.put(name, (resource == null) ? NOT_FOUND_RESOURCE : resource); } else if (resource == NOT_FOUND_RESOURCE) { resource = null; } } return resource; } private URL findResource(String name) { Iterator<Bundle> iterator = bundles.iterator(); return findResource(name, iterator); } private URL findResource(String name, Iterator<Bundle> iterator) { URL resource = null; while (iterator.hasNext() && resource == null) { Bundle delegate = iterator.next(); resource = delegate.getResource(name); } return resource; } public Enumeration<URL> getResources(String name) throws IOException { ArrayList<URL> allResources = new ArrayList<URL>(); for (Bundle bundle : bundles) { Enumeration<URL> e = bundle.getResources(name); addToList(allResources, e); } return Collections.enumeration(allResources); } private static void addToList(List<URL> list, Enumeration<URL> enumeration) { if (enumeration != null) { while (enumeration.hasMoreElements()) { list.add(enumeration.nextElement()); } } } public BundleContext getBundleContext() { return bundleContext; } public Enumeration findEntries(String arg0, String arg1, boolean arg2) { return bundle.findEntries(arg0, arg1, arg2); } public long getBundleId() { return bundle.getBundleId(); } public URL getEntry(String arg0) { return bundle.getEntry(arg0); } public Enumeration getEntryPaths(String arg0) { return bundle.getEntryPaths(arg0); } public Dictionary getHeaders() { return bundle.getHeaders(); } public Dictionary getHeaders(String arg0) { return bundle.getHeaders(arg0); } public long getLastModified() { return bundle.getLastModified(); } public String getLocation() { return bundle.getLocation(); } public ServiceReference[] getRegisteredServices() { return bundle.getRegisteredServices(); } public ServiceReference[] getServicesInUse() { return bundle.getServicesInUse(); } public Map getSignerCertificates(int arg0) { return bundle.getSignerCertificates(arg0); } public int getState() { return bundle.getState(); } public String getSymbolicName() { return bundle.getSymbolicName(); } public Version getVersion() { return bundle.getVersion(); } public boolean hasPermission(Object arg0) { return bundle.hasPermission(arg0); } public void start() throws BundleException { bundle.start(); } public void start(int arg0) throws BundleException { bundle.start(arg0); } public void stop() throws BundleException { bundle.stop(); } public void stop(int arg0) throws BundleException { bundle.stop(arg0); } public void uninstall() throws BundleException { bundle.uninstall(); } public void update() throws BundleException { bundle.update(); } public void update(InputStream arg0) throws BundleException { bundle.update(arg0); } public int compareTo(Bundle other) { return bundle.compareTo(other); } public <A> A adapt(Class<A> type) { return bundle.adapt(type); } public File getDataFile(String filename) { return bundle.getDataFile(filename); } public String toString() { return "[DelegatingBundle: " + bundles + "]"; } private static class Cache<K, V> extends LinkedHashMap<K, V> { private final int maxSize; public Cache(int maxSize) { this(16, maxSize, 0.75f); } public Cache(int initialSize, int maxSize, float loadFactor) { super(initialSize, loadFactor, true); this.maxSize = maxSize; } @Override protected boolean removeEldestEntry(Map.Entry<K, V> eldest) { if (size() > maxSize) { return true; } else { return false; } } } }
/* * The contents of this file are subject to the terms * of the Common Development and Distribution License * (the "License"). You may not use this file except * in compliance with the License. * * You can obtain a copy of the license at * https://jwsdp.dev.java.net/CDDLv1.0.html * See the License for the specific language governing * permissions and limitations under the License. * * When distributing Covered Code, include this CDDL * HEADER in each file and include the License file at * https://jwsdp.dev.java.net/CDDLv1.0.html If applicable, * add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your * own identifying information: Portions Copyright [yyyy] * [name of copyright owner] */ package com.github.cxfplus.com.sun.xml.txw2; import com.github.cxfplus.com.sun.xml.txw2.output.XmlSerializer; import java.util.Map; import java.util.HashMap; /** * Coordinates the entire writing process. * * @author Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com) */ public final class Document { private final XmlSerializer out; /** * Set to true once we invoke {@link XmlSerializer#startDocument()}. * * <p> * This is so that we can defer the writing as much as possible. */ private boolean started=false; /** * Currently active writer. * * <p> * This points to the last written token. */ private Content current = null; private final Map<Class,DatatypeWriter> datatypeWriters = new HashMap<Class,DatatypeWriter>(); /** * Used to generate unique namespace prefix. */ private int iota = 1; /** * Used to keep track of in-scope namespace bindings declared in ancestors. */ private final NamespaceSupport inscopeNamespace = new NamespaceSupport(); /** * Remembers the namespace declarations of the last unclosed start tag, * so that we can fix up dummy prefixes in {@link Pcdata}. */ private NamespaceDecl activeNamespaces; Document(XmlSerializer out) { this.out = out; for( DatatypeWriter dw : DatatypeWriter.BUILTIN ) datatypeWriters.put(dw.getType(),dw); } void flush() { out.flush(); } void setFirstContent(Content c) { assert current==null; current = new StartDocument(); current.setNext(this,c); } /** * Defines additional user object -> string conversion logic. * * <p> * Applications can add their own {@link DatatypeWriter} so that * application-specific objects can be turned into {@link String} * for output. * * @param dw * The {@link DatatypeWriter} to be added. Must not be null. */ public void addDatatypeWriter( DatatypeWriter<?> dw ) { datatypeWriters.put(dw.getType(),dw); } /** * Performs the output as much as possible */ void run() { while(true) { Content next = current.getNext(); if(next==null || !next.isReadyToCommit()) return; next.accept(visitor); next.written(); current = next; } } /** * Appends the given object to the end of the given buffer. * * @param nsResolver * use */ void writeValue( Object obj, NamespaceResolver nsResolver, StringBuilder buf ) { if(obj==null) throw new IllegalArgumentException("argument contains null"); if(obj instanceof Object[]) { for( Object o : (Object[])obj ) writeValue(o,nsResolver,buf); return; } if(obj instanceof Iterable) { for( Object o : (Iterable<?>)obj ) writeValue(o,nsResolver,buf); return; } if(buf.length()>0) buf.append(' '); Class c = obj.getClass(); while(c!=null) { DatatypeWriter dw = datatypeWriters.get(c); if(dw!=null) { dw.print(obj,nsResolver,buf); return; } c = c.getSuperclass(); } // if nothing applies, just use toString buf.append(obj); } // I wanted to hide those write method from users private final ContentVisitor visitor = new ContentVisitor() { public void onStartDocument() { // the startDocument token is used as the sentry, so this method shall never // be called. // out.startDocument() is invoked when we write the start tag of the root element. throw new IllegalStateException(); } public void onEndDocument() { out.endDocument(); } public void onEndTag() { out.endTag(); inscopeNamespace.popContext(); activeNamespaces = null; } public void onPcdata(StringBuilder buffer) { if(activeNamespaces!=null) buffer = fixPrefix(buffer); out.text(buffer); } public void onCdata(StringBuilder buffer) { if(activeNamespaces!=null) buffer = fixPrefix(buffer); out.cdata(buffer); } public void onComment(StringBuilder buffer) { if(activeNamespaces!=null) buffer = fixPrefix(buffer); out.comment(buffer); } public void onStartTag(String nsUri, String localName, Attribute attributes, NamespaceDecl namespaces) { assert nsUri!=null; assert localName!=null; activeNamespaces = namespaces; if(!started) { started = true; out.startDocument(); } inscopeNamespace.pushContext(); // declare the explicitly bound namespaces for( NamespaceDecl ns=namespaces; ns!=null; ns=ns.next ) { ns.declared = false; // reset this flag if(ns.prefix!=null) { String uri = inscopeNamespace.getURI(ns.prefix); if(uri!=null && uri.equals(ns.uri)) ; // already declared else { // declare this new binding inscopeNamespace.declarePrefix(ns.prefix,ns.uri); ns.declared = true; } } } // then use in-scope namespace to assign prefixes to others for( NamespaceDecl ns=namespaces; ns!=null; ns=ns.next ) { if(ns.prefix==null) { if(inscopeNamespace.getURI("").equals(ns.uri)) ns.prefix=""; else { String p = inscopeNamespace.getPrefix(ns.uri); if(p==null) { // assign a new one while(inscopeNamespace.getURI(p=newPrefix())!=null) ; ns.declared = true; inscopeNamespace.declarePrefix(p,ns.uri); } ns.prefix = p; } } } // the first namespace decl must be the one for the element assert namespaces.uri.equals(nsUri); assert namespaces.prefix!=null : "a prefix must have been all allocated"; out.beginStartTag(nsUri,localName,namespaces.prefix); // declare namespaces for( NamespaceDecl ns=namespaces; ns!=null; ns=ns.next ) { if(ns.declared) out.writeXmlns( ns.prefix, ns.uri ); } // writeBody attributes for( Attribute a=attributes; a!=null; a=a.next) { String prefix; if(a.nsUri.length()==0) prefix=""; else prefix=inscopeNamespace.getPrefix(a.nsUri); out.writeAttribute( a.nsUri, a.localName, prefix, fixPrefix(a.value) ); } out.endStartTag(nsUri,localName,namespaces.prefix); } }; /** * Used by {@link #newPrefix()}. */ private final StringBuilder prefixSeed = new StringBuilder("ns"); private int prefixIota = 0; /** * Allocates a new unique prefix. */ private String newPrefix() { prefixSeed.setLength(2); prefixSeed.append(++prefixIota); return prefixSeed.toString(); } /** * Replaces dummy prefixes in the value to the real ones * by using {@link #activeNamespaces}. * * @return * the buffer passed as the <tt>buf</tt> parameter. */ private StringBuilder fixPrefix(StringBuilder buf) { assert activeNamespaces!=null; int i; int len=buf.length(); for(i=0;i<len;i++) if( buf.charAt(i)==MAGIC ) break; // typically it doens't contain any prefix. // just return the original buffer in that case if(i==len) return buf; while(i<len) { char uriIdx = buf.charAt(i+1); NamespaceDecl ns = activeNamespaces; while(ns!=null && ns.uniqueId!=uriIdx) ns=ns.next; if(ns==null) throw new IllegalStateException("Unexpected use of prefixes "+buf); int length = 2; String prefix = ns.prefix; if(prefix.length()==0) { if(buf.length()<=i+2 || buf.charAt(i+2)!=':') throw new IllegalStateException("Unexpected use of prefixes "+buf); length=3; } buf.replace(i,i+length,prefix); len += prefix.length()-length; while(i<len && buf.charAt(i)!=MAGIC) i++; } return buf; } /** * The first char of the dummy prefix. */ static final char MAGIC = '\u0000'; char assignNewId() { return (char)iota++; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.ai.metricsadvisor; import com.azure.ai.metricsadvisor.administration.MetricsAdvisorAdministrationClientBuilder; import com.azure.ai.metricsadvisor.models.Alert; import com.azure.ai.metricsadvisor.models.Anomaly; import com.azure.ai.metricsadvisor.models.DimensionKey; import com.azure.ai.metricsadvisor.models.EnrichmentStatus; import com.azure.ai.metricsadvisor.models.ErrorCodeException; import com.azure.ai.metricsadvisor.models.Incident; import com.azure.ai.metricsadvisor.models.IncidentRootCause; import com.azure.ai.metricsadvisor.models.ListAlertOptions; import com.azure.ai.metricsadvisor.models.ListAnomaliesAlertedOptions; import com.azure.ai.metricsadvisor.models.ListAnomaliesDetectedOptions; import com.azure.ai.metricsadvisor.models.ListIncidentsAlertedOptions; import com.azure.ai.metricsadvisor.models.ListIncidentsDetectedOptions; import com.azure.ai.metricsadvisor.models.ListMetricDimensionValuesOptions; import com.azure.ai.metricsadvisor.models.ListMetricEnrichmentStatusOptions; import com.azure.ai.metricsadvisor.models.ListMetricFeedbackOptions; import com.azure.ai.metricsadvisor.models.ListMetricSeriesDataOptions; import com.azure.ai.metricsadvisor.models.ListMetricSeriesDefinitionOptions; import com.azure.ai.metricsadvisor.models.ListValuesOfDimensionWithAnomaliesOptions; import com.azure.ai.metricsadvisor.models.MetricEnrichedSeriesData; import com.azure.ai.metricsadvisor.models.MetricFeedback; import com.azure.ai.metricsadvisor.models.MetricSeriesData; import com.azure.ai.metricsadvisor.models.MetricSeriesDefinition; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceClient; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import java.time.OffsetDateTime; import java.util.List; /** * This class provides an asynchronous client that contains all the operations that apply to Azure Metrics Advisor. * * <p><strong>Instantiating an synchronous Metric Advisor Client</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.instantiation} * * @see MetricsAdvisorClientBuilder */ @ServiceClient(builder = MetricsAdvisorClientBuilder.class) public class MetricsAdvisorClient { private final MetricsAdvisorAsyncClient client; /** * Create a {@link MetricsAdvisorClient client} that sends requests to the Metrics Advisor service's * endpoint. * Each service call goes through the {@link MetricsAdvisorAdministrationClientBuilder#pipeline http pipeline}. * * @param client The {@link MetricsAdvisorAsyncClient} that the * client routes its request through. */ MetricsAdvisorClient(MetricsAdvisorAsyncClient client) { this.client = client; } /** * List series (dimension combinations) from metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricSeriesDefinitions#String-ListMetricSeriesDefinitionOptions} * * @param metricId metric unique id. * @param options the additional filtering attributes that can be provided to query the series. * * @return A {@link PagedIterable} of the {@link MetricSeriesDefinition metric series definitions}. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code metricId} or {@code options.activeSince} * is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricSeriesDefinition> listMetricSeriesDefinitions( String metricId, ListMetricSeriesDefinitionOptions options) { return listMetricSeriesDefinitions(metricId, options, Context.NONE); } /** * List series (dimension combinations) from metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricSeriesDefinitions#String-ListMetricSeriesDefinitionOptions-Context} * * @param metricId metric unique id. * @param options the additional filtering attributes that can be provided to query the series. * @param context Additional context that is passed through the HTTP pipeline during the service call. * * @return A {@link PagedIterable} of the {@link MetricSeriesDefinition metric series definitions}. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code metricId} or {@code options.activeSince} * is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricSeriesDefinition> listMetricSeriesDefinitions( String metricId, ListMetricSeriesDefinitionOptions options, Context context) { return new PagedIterable<>(client.listMetricSeriesDefinitions(metricId, options, context == null ? Context.NONE : context)); } /** * Get time series data from metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricSeriesData#String-List-ListMetricSeriesDataOptions} * * @param metricId metric unique id. * @param seriesKeys the series key to filter. * <p>This enables additional filtering of dimension values being queried. * For example, let's say we've the dimensions 'category' and 'city', * so the api can query value of the dimension 'category', with series key as 'city=redmond'. * </p> * @param options query time series data condition. * * @return A {@link PagedIterable} of the {@link MetricSeriesData metric series data points}. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws NullPointerException thrown if the {@code metricId}, {@code options.startTime} or {@code options.endTime} * is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricSeriesData> listMetricSeriesData(String metricId, List<DimensionKey> seriesKeys, ListMetricSeriesDataOptions options) { return listMetricSeriesData(metricId, seriesKeys, options, Context.NONE); } /** * Get time series data from metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricSeriesData#String-List-ListMetricSeriesDataOptions-Context} * * @param metricId metric unique id. * @param seriesKeys the series key to filter. * <p>This enables additional filtering of dimension values being queried. * For example, let's say we've the dimensions 'category' and 'city', * so the api can query value of the dimension 'category', with series key as 'city=redmond'. * </p> * @param options query time series data condition. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return A {@link PagedIterable} of the {@link MetricSeriesData metric series data points}. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws NullPointerException thrown if the {@code metricId}, {@code options.startTime} or {@code options.endTime} * is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricSeriesData> listMetricSeriesData(String metricId, List<DimensionKey> seriesKeys, ListMetricSeriesDataOptions options, Context context) { return new PagedIterable<>(client.listMetricSeriesData(metricId, seriesKeys, options, context == null ? Context.NONE : context)); } /** * List the enrichment status for a metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricEnrichmentStatus#String-ListMetricEnrichmentStatusOptions} * * @param metricId metric unique id. * @param options th e additional configurable options to specify when querying the result.. * * @return the list of enrichment status's for the specified metric. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if {@code metricId}, {@code options.startTime} and {@code options.endTime} * is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<EnrichmentStatus> listMetricEnrichmentStatus( String metricId, ListMetricEnrichmentStatusOptions options) { return listMetricEnrichmentStatus(metricId, options, Context.NONE); } /** * List the enrichment status for a metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricEnrichmentStatus#String-ListMetricEnrichmentStatusOptions-Context} * * @param metricId metric unique id. * @param options th e additional configurable options to specify when querying the result.. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return the list of enrichment status's for the specified metric. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if {@code metricId}, {@code options.startTime} and {@code options.endTime} * is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<EnrichmentStatus> listMetricEnrichmentStatus( String metricId, ListMetricEnrichmentStatusOptions options, Context context) { return new PagedIterable<>(client.listMetricEnrichmentStatus(metricId, options, context == null ? Context.NONE : context)); } /** * Given a list of time series keys, retrieve time series version enriched using * a detection configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricEnrichedSeriesData#List-String-OffsetDateTime-OffsetDateTime} * * @param seriesKeys The time series key list, each key identifies a specific time series. * @param detectionConfigurationId The id of the configuration used to enrich the time series * identified by the keys in {@code seriesKeys}. * @param startTime The start time. * @param endTime The end time. * @return The enriched time series. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation * or if {@code seriesKeys} is empty. * @throws NullPointerException thrown if the {@code detectionConfigurationId} * or {@code startTime} or {@code endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricEnrichedSeriesData> listMetricEnrichedSeriesData(List<DimensionKey> seriesKeys, String detectionConfigurationId, OffsetDateTime startTime, OffsetDateTime endTime) { return listMetricEnrichedSeriesData(seriesKeys, detectionConfigurationId, startTime, endTime, Context.NONE); } /** * Given a list of time series keys, retrieve time series version enriched using * a detection configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricEnrichedSeriesData#List-String-OffsetDateTime-OffsetDateTime-Context} * * @param seriesKeys The time series key list, each key identifies a specific time series. * @param detectionConfigurationId The id of the configuration used to enrich the time series * identified by the keys in {@code seriesKeys}. * @param startTime The start time. * @param endTime The end time. * @param context Additional context that is passed through the Http pipeline during the service call. * @return The enriched time series. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation * or if {@code seriesKeys} is empty. * @throws NullPointerException thrown if the {@code detectionConfigurationId} * or {@code startTime} or {@code endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricEnrichedSeriesData> listMetricEnrichedSeriesData( List<DimensionKey> seriesKeys, String detectionConfigurationId, OffsetDateTime startTime, OffsetDateTime endTime, Context context) { return new PagedIterable<>(client.listMetricEnrichedSeriesData(seriesKeys, detectionConfigurationId, startTime, endTime, context == null ? Context.NONE : context)); } /** * Fetch the anomalies identified by an anomaly detection configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listAnomaliesForDetectionConfiguration#String-ListAnomaliesDetectedOptions} * * @param detectionConfigurationId The anomaly detection configuration id. * @param options The additional parameters. * @return The anomalies. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform * to the UUID format specification * or {@code options.filter} is used to set severity but either min or max severity is missing. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options} * or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Anomaly> listAnomaliesForDetectionConfiguration( String detectionConfigurationId, ListAnomaliesDetectedOptions options) { return listAnomaliesForDetectionConfiguration(detectionConfigurationId, options, Context.NONE); } /** * Fetch the anomalies identified by an anomaly detection configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listAnomaliesForDetectionConfiguration#String-ListAnomaliesDetectedOptions-Context} * * @param detectionConfigurationId The anomaly detection configuration id. * @param options The additional parameters. * @param context Additional context that is passed through the Http pipeline during the service call. * @return The anomalies. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform * to the UUID format specification * or {@code options.filter} is used to set severity but either min or max severity is missing. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options} * or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Anomaly> listAnomaliesForDetectionConfiguration( String detectionConfigurationId, ListAnomaliesDetectedOptions options, Context context) { return new PagedIterable<>(client.listAnomaliesForDetectionConfiguration(detectionConfigurationId, options, context == null ? Context.NONE : context)); } /** * Fetch the incidents identified by an anomaly detection configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentsForDetectionConfiguration#String-ListIncidentsDetectedOptions} * * @param detectionConfigurationId The anomaly detection configuration id. * @param options The additional parameters. * @return The incidents. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform * to the UUID format specification. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options} * or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Incident> listIncidentsForDetectionConfiguration( String detectionConfigurationId, ListIncidentsDetectedOptions options) { return listIncidentsForDetectionConfiguration(detectionConfigurationId, options, Context.NONE); } /** * Fetch the incidents identified by an anomaly detection configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentsForDetectionConfiguration#String-ListIncidentsDetectedOptions-Context} * * @param detectionConfigurationId The anomaly detection configuration id. * @param options The additional parameters. * @param context Additional context that is passed through the Http pipeline during the service call. * @return The incidents. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform * to the UUID format specification. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options} * or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Incident> listIncidentsForDetectionConfiguration( String detectionConfigurationId, ListIncidentsDetectedOptions options, Context context) { return new PagedIterable<>(client.listIncidentsForDetectionConfiguration(detectionConfigurationId, options, context == null ? Context.NONE : context)); } /** * List the root causes for an incident. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentRootCauses#String-String} * * @param detectionConfigurationId anomaly detection configuration unique id. * @param incidentId the incident for which you want to query root causes for. * * @return the list of root causes for that incident. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code incidentId} is null. **/ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<IncidentRootCause> listIncidentRootCauses( String detectionConfigurationId, String incidentId) { return new PagedIterable<>(client.listIncidentRootCauses(detectionConfigurationId, incidentId)); } /** * List the root causes for an incident. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentRootCauses#String-String-Context} * * @param detectionConfigurationId anomaly detection configuration unique id. * @param incidentId the incident for which you want to query root causes for. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return the list of root causes for that incident. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code incidentId} is null. **/ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<IncidentRootCause> listIncidentRootCauses( String detectionConfigurationId, String incidentId, Context context) { return new PagedIterable<>(client.listIncidentRootCauses(detectionConfigurationId, incidentId, context)); } /** * List the root causes for an incident. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentRootCauses#Incident} * * @param incident the incident for which you want to query root causes for. * * @return the list of root causes for that incident. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code incidentId} is null. **/ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<IncidentRootCause> listIncidentRootCauses(Incident incident) { return new PagedIterable<>(client.listIncidentRootCauses(incident, Context.NONE)); } /** * Fetch the values of a dimension that have anomalies. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listValuesOfDimensionWithAnomalies#String-String-ListValuesOfDimensionWithAnomaliesOptions} * * @param detectionConfigurationId Identifies the configuration used to detect the anomalies. * @param dimensionName The dimension name to retrieve the values for. * @param options The additional parameters. * * @return The dimension values with anomalies. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform * to the UUID format specification. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code dimensionName} * or {@code options} or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<String> listValuesOfDimensionWithAnomalies( String detectionConfigurationId, String dimensionName, ListValuesOfDimensionWithAnomaliesOptions options) { return listValuesOfDimensionWithAnomalies(detectionConfigurationId, dimensionName, options, Context.NONE); } /** * Fetch the values of a dimension that have anomalies. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listValuesOfDimensionWithAnomalies#String-String-ListValuesOfDimensionWithAnomaliesOptions-Context} * * @param detectionConfigurationId Identifies the configuration used to detect the anomalies. * @param dimensionName The dimension name to retrieve the values for. * @param options The additional parameters. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return The dimension values with anomalies. * @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform * to the UUID format specification. * @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code dimensionName} * or {@code options} or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<String> listValuesOfDimensionWithAnomalies( String detectionConfigurationId, String dimensionName, ListValuesOfDimensionWithAnomaliesOptions options, Context context) { return new PagedIterable<>(client.listValuesOfDimensionWithAnomalies(detectionConfigurationId, dimensionName, options, context == null ? Context.NONE : context)); } /** * Fetch the alerts triggered by an anomaly alert configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listAlerts#String-ListAlertOptions} * * @param alertConfigurationId The anomaly alert configuration id. * @param options The additional parameters. * * @return The alerts. * @throws IllegalArgumentException thrown if {@code alertConfigurationId} does not conform * to the UUID format specification. * @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code options} * or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Alert> listAlerts( String alertConfigurationId, ListAlertOptions options) { return listAlerts(alertConfigurationId, options, Context.NONE); } /** * Fetch the alerts triggered by an anomaly alert configuration. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listAlerts#String-ListAlertOptions-Context} * * @param alertConfigurationId The anomaly alert configuration id. * @param options The additional parameters. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return The alerts. * @throws IllegalArgumentException thrown if {@code alertConfigurationId} does not conform * to the UUID format specification. * @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code options} * or {@code options.startTime} or {@code options.endTime} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Alert> listAlerts( String alertConfigurationId, ListAlertOptions options, Context context) { return new PagedIterable<>(client.listAlerts(alertConfigurationId, options, context == null ? Context.NONE : context)); } /** * Fetch the anomalies in an alert. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listAnomaliesForAlert#String-String-ListAnomaliesAlertedOptions} * * @param alertConfigurationId The anomaly alert configuration id. * @param alertId The alert id. * @return The anomalies. * @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not * conform to the UUID format specification. * @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Anomaly> listAnomaliesForAlert( String alertConfigurationId, String alertId) { return listAnomaliesForAlert(alertConfigurationId, alertId, null, Context.NONE); } /** * Fetch the anomalies in an alert. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listAnomaliesForAlert#String-String-ListAnomaliesAlertedOptions-Context} * * @param alertConfigurationId The anomaly alert configuration id. * @param alertId The alert id. * @param options The additional parameters. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return The anomalies. * @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not * conform to the UUID format specification. * @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Anomaly> listAnomaliesForAlert( String alertConfigurationId, String alertId, ListAnomaliesAlertedOptions options, Context context) { return new PagedIterable<>(client.listAnomaliesForAlert(alertConfigurationId, alertId, options, context == null ? Context.NONE : context)); } /** * Fetch the incidents in an alert. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentsForAlert#String-String-ListIncidentsAlertedOptions} * * @param alertConfigurationId The anomaly alert configuration id. * @param alertId The alert id. * @param options The additional parameters. * * @return The incidents. * @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not * conform to the UUID format specification. * @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Incident> listIncidentsForAlert( String alertConfigurationId, String alertId, ListIncidentsAlertedOptions options) { return listIncidentsForAlert(alertConfigurationId, alertId, options, Context.NONE); } /** * Fetch the incidents in an alert. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listIncidentsForAlert#String-String-ListIncidentsAlertedOptions-Context} * * @param alertConfigurationId The anomaly alert configuration id. * @param alertId The alert id. * @param options The additional parameters. * @param context Additional context that is passed through the Http pipeline during the service call. * * @return The incidents. * @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not * conform to the UUID format specification. * @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<Incident> listIncidentsForAlert( String alertConfigurationId, String alertId, ListIncidentsAlertedOptions options, Context context) { return new PagedIterable<>(client.listIncidentsForAlert(alertConfigurationId, alertId, options, context == null ? Context.NONE : context)); } /** * Create a new metric feedback. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.createMetricFeedback#String-MetricFeedback} * * @param metricId the unique id for which the feedback needs to be submitted. * @param metricFeedback the actual metric feedback. * * @return the created {@link MetricFeedback metric feedback}. * @throws NullPointerException If {@code metricId}, {@code metricFeedback.dimensionFilter} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public MetricFeedback createMetricFeedback(String metricId, MetricFeedback metricFeedback) { return createMetricFeedbackWithResponse(metricId, metricFeedback, Context.NONE).getValue(); } /** * Create a new metric feedback. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.createMetricFeedbackWithResponse#String-MetricFeedback-Context} * * @param metricId the unique id for which the feedback needs to be submitted. * @param metricFeedback the actual metric feedback. * @param context Additional context that is passed through the HTTP pipeline during the service call. * * @return A {@link Response} containing the created {@link MetricFeedback metric feedback}. * @throws NullPointerException If {@code metricId}, {@code metricFeedback.dimensionFilter} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<MetricFeedback> createMetricFeedbackWithResponse(String metricId, MetricFeedback metricFeedback, Context context) { return client.createMetricFeedbackWithResponse(metricId, metricFeedback, context).block(); } /** * Get a metric feedback by its id. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.getMetricFeedback#String} * * @param feedbackId The metric feedback unique id. * * @return The metric feedback for the provided id. * @throws IllegalArgumentException If {@code feedbackId} does not conform to the UUID format specification. * @throws NullPointerException thrown if the {@code feedbackId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public MetricFeedback getMetricFeedback(String feedbackId) { return getMetricFeedbackWithResponse(feedbackId, Context.NONE).getValue(); } /** * Get a metric feedback by its id. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.getMetricFeedbackWithResponse#String-Context} * * @param feedbackId The metric feedback unique id. * @param context Additional context that is passed through the HTTP pipeline during the service call. * * @return The metric feedback for the provided id. * @throws IllegalArgumentException If {@code feedbackId} does not conform to the UUID format specification. * @throws NullPointerException thrown if the {@code feedbackId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<MetricFeedback> getMetricFeedbackWithResponse(String feedbackId, Context context) { return client.getMetricFeedbackWithResponse(feedbackId, context).block(); } /** * List information of all metric feedbacks on the metrics advisor account. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricFeedbacks#String} * * @param metricId the unique metric Id. * * @return A {@link PagedIterable} containing information of all the {@link MetricFeedback metric feedbacks} * in the account. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code metricId} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricFeedback> listMetricFeedbacks( String metricId) { return listMetricFeedbacks(metricId, null, Context.NONE); } /** * List information of all metric feedbacks on the metrics advisor account. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricFeedbacks#String-ListMetricFeedbackOptions-Context} * * @param metricId the unique metric Id. * @param options The configurable {@link ListMetricFeedbackOptions options} to pass for filtering the output * result. * @param context Additional context that is passed through the HTTP pipeline during the service call. * * @return A {@link PagedIterable} containing information of all the {@link MetricFeedback metric feedbacks} * in the account. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code metricId} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MetricFeedback> listMetricFeedbacks( String metricId, ListMetricFeedbackOptions options, Context context) { return new PagedIterable<>(client.listMetricFeedbacks(metricId, options, context == null ? Context.NONE : context)); } /** * List dimension values from certain metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricDimensionValues#String-String} * * @param metricId metric unique id. * @param dimensionName the query dimension name. * * @return the {@link PagedIterable} of the dimension values for that metric. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code metricId} or {@code dimensionName} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<String> listMetricDimensionValues( String metricId, String dimensionName) { return listMetricDimensionValues(metricId, dimensionName, null, Context.NONE); } /** * List dimension values from certain metric. * * <p><strong>Code sample</strong></p> * {@codesnippet com.azure.ai.metricsadvisor.MetricsAdvisorClient.listMetricDimensionValues#String-String-ListMetricDimensionValuesOptions-Context} * * @param metricId metric unique id. * @param dimensionName the query dimension name. * @param options the additional parameters to specify while querying. * @param context Additional context that is passed through the HTTP pipeline during the service call. * * @return the {@link PagedIterable} of the dimension values for that metric. * @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation. * @throws ErrorCodeException thrown if the request is rejected by server. * @throws NullPointerException thrown if the {@code metricId} or {@code dimensionName} is null. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<String> listMetricDimensionValues( String metricId, String dimensionName, ListMetricDimensionValuesOptions options, Context context) { return new PagedIterable<>(client.listMetricDimensionValues(metricId, dimensionName, options, context == null ? Context.NONE : context)); } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apach3.http.impl.client; import org.apach3.http.HttpVersion; import org.apach3.http.annotation.ThreadSafe; import org.apach3.http.client.HttpClient; import org.apach3.http.client.protocol.RequestAddCookies; import org.apach3.http.client.protocol.RequestAuthCache; import org.apach3.http.client.protocol.RequestClientConnControl; import org.apach3.http.client.protocol.RequestDefaultHeaders; import org.apach3.http.client.protocol.RequestProxyAuthentication; import org.apach3.http.client.protocol.RequestTargetAuthentication; import org.apach3.http.client.protocol.ResponseProcessCookies; import org.apach3.http.conn.ClientConnectionManager; import org.apach3.http.params.CoreConnectionPNames; import org.apach3.http.params.CoreProtocolPNames; import org.apach3.http.params.HttpConnectionParams; import org.apach3.http.params.HttpParams; import org.apach3.http.params.HttpProtocolParams; import org.apach3.http.params.SyncBasicHttpParams; import org.apach3.http.protocol.BasicHttpProcessor; import org.apach3.http.protocol.HTTP; import org.apach3.http.protocol.RequestContent; import org.apach3.http.protocol.RequestExpectContinue; import org.apach3.http.protocol.RequestTargetHost; import org.apach3.http.protocol.RequestUserAgent; import org.apach3.http.util.VersionInfo; /** * Default implementation of {@link HttpClient} pre-configured for most common use scenarios. * <p> * Please see the Javadoc for {@link #createHttpProcessor()} for the details of the interceptors * that are set up by default. * <p> * Additional interceptors can be added as follows, but * take care not to add the same interceptor more than once. * <pre> * DefaultHttpClient httpclient = new DefaultHttpClient(); * httpclient.addRequestInterceptor(new RequestAcceptEncoding()); * httpclient.addResponseInterceptor(new ResponseContentEncoding()); * </pre> * <p> * This class sets up the following parameters if not explicitly set: * <ul> * <li>Version: HttpVersion.HTTP_1_1</li> * <li>ContentCharset: HTTP.DEFAULT_CONTENT_CHARSET</li> * <li>NoTcpDelay: true</li> * <li>SocketBufferSize: 8192</li> * <li>UserAgent: Apache-HttpClient/release (java 1.5)</li> * </ul> * <p> * The following parameters can be used to customize the behavior of this * class: * <ul> * <li>{@link org.apach3.http.params.CoreProtocolPNames#PROTOCOL_VERSION}</li> * <li>{@link org.apach3.http.params.CoreProtocolPNames#STRICT_TRANSFER_ENCODING}</li> * <li>{@link org.apach3.http.params.CoreProtocolPNames#HTTP_ELEMENT_CHARSET}</li> * <li>{@link org.apach3.http.params.CoreProtocolPNames#USE_EXPECT_CONTINUE}</li> * <li>{@link org.apach3.http.params.CoreProtocolPNames#WAIT_FOR_CONTINUE}</li> * <li>{@link org.apach3.http.params.CoreProtocolPNames#USER_AGENT}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#TCP_NODELAY}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#SO_TIMEOUT}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#SO_LINGER}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#SO_REUSEADDR}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#SOCKET_BUFFER_SIZE}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#CONNECTION_TIMEOUT}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#MAX_LINE_LENGTH}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#MAX_HEADER_COUNT}</li> * <li>{@link org.apach3.http.params.CoreConnectionPNames#STALE_CONNECTION_CHECK}</li> * <li>{@link org.apach3.http.conn.params.ConnRoutePNames#FORCED_ROUTE}</li> * <li>{@link org.apach3.http.conn.params.ConnRoutePNames#LOCAL_ADDRESS}</li> * <li>{@link org.apach3.http.conn.params.ConnRoutePNames#DEFAULT_PROXY}</li> * <li>{@link org.apach3.http.cookie.params.CookieSpecPNames#DATE_PATTERNS}</li> * <li>{@link org.apach3.http.cookie.params.CookieSpecPNames#SINGLE_COOKIE_HEADER}</li> * <li>{@link org.apach3.http.auth.params.AuthPNames#CREDENTIAL_CHARSET}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#COOKIE_POLICY}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#HANDLE_AUTHENTICATION}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#HANDLE_REDIRECTS}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#MAX_REDIRECTS}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#ALLOW_CIRCULAR_REDIRECTS}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#VIRTUAL_HOST}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#DEFAULT_HOST}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#DEFAULT_HEADERS}</li> * <li>{@link org.apach3.http.client.params.ClientPNames#CONN_MANAGER_TIMEOUT}</li> * </ul> * * @since 4.0 */ @ThreadSafe public class DefaultHttpClient extends AbstractHttpClient { /** * Creates a new HTTP client from parameters and a connection manager. * * @param params the parameters * @param conman the connection manager */ public DefaultHttpClient( final ClientConnectionManager conman, final HttpParams params) { super(conman, params); } /** * @since 4.1 */ public DefaultHttpClient( final ClientConnectionManager conman) { super(conman, null); } public DefaultHttpClient(final HttpParams params) { super(null, params); } public DefaultHttpClient() { super(null, null); } /** * Creates the default set of HttpParams by invoking {@link DefaultHttpClient#setDefaultHttpParams(HttpParams)} * * @return a new instance of {@link SyncBasicHttpParams} with the defaults applied to it. */ @Override protected HttpParams createHttpParams() { HttpParams params = new SyncBasicHttpParams(); setDefaultHttpParams(params); return params; } /** * Saves the default set of HttpParams in the provided parameter. * These are: * <ul> * <li>{@link CoreProtocolPNames#PROTOCOL_VERSION}: 1.1</li> * <li>{@link CoreProtocolPNames#HTTP_CONTENT_CHARSET}: ISO-8859-1</li> * <li>{@link CoreConnectionPNames#TCP_NODELAY}: true</li> * <li>{@link CoreConnectionPNames#SOCKET_BUFFER_SIZE}: 8192</li> * <li>{@link CoreProtocolPNames#USER_AGENT}: Apache-HttpClient/<release> (java 1.5)</li> * </ul> */ public static void setDefaultHttpParams(HttpParams params) { HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1); HttpProtocolParams.setContentCharset(params, HTTP.DEF_CONTENT_CHARSET.name()); HttpConnectionParams.setTcpNoDelay(params, true); HttpConnectionParams.setSocketBufferSize(params, 8192); // determine the release version from packaged version info final VersionInfo vi = VersionInfo.loadVersionInfo ("org.apach3.http.client", DefaultHttpClient.class.getClassLoader()); final String release = (vi != null) ? vi.getRelease() : VersionInfo.UNAVAILABLE; HttpProtocolParams.setUserAgent(params, "Apache-HttpClient/" + release + " (java 1.5)"); } /** * Create the processor with the following interceptors: * <ul> * <li>{@link RequestDefaultHeaders}</li> * <li>{@link RequestContent}</li> * <li>{@link RequestTargetHost}</li> * <li>{@link RequestClientConnControl}</li> * <li>{@link RequestUserAgent}</li> * <li>{@link RequestExpectContinue}</li> * <li>{@link RequestAddCookies}</li> * <li>{@link ResponseProcessCookies}</li> * <li>{@link RequestAuthCache}</li> * <li>{@link RequestTargetAuthentication}</li> * <li>{@link RequestProxyAuthentication}</li> * </ul> * <p> * @return the processor with the added interceptors. */ @Override protected BasicHttpProcessor createHttpProcessor() { BasicHttpProcessor httpproc = new BasicHttpProcessor(); httpproc.addInterceptor(new RequestDefaultHeaders()); // Required protocol interceptors httpproc.addInterceptor(new RequestContent()); httpproc.addInterceptor(new RequestTargetHost()); // Recommended protocol interceptors httpproc.addInterceptor(new RequestClientConnControl()); httpproc.addInterceptor(new RequestUserAgent()); httpproc.addInterceptor(new RequestExpectContinue()); // HTTP state management interceptors httpproc.addInterceptor(new RequestAddCookies()); httpproc.addInterceptor(new ResponseProcessCookies()); // HTTP authentication interceptors httpproc.addInterceptor(new RequestAuthCache()); httpproc.addInterceptor(new RequestTargetAuthentication()); httpproc.addInterceptor(new RequestProxyAuthentication()); return httpproc; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.jms; import org.apache.activemq.artemis.tests.unit.util.InVMNamingContext; import org.apache.activemq.artemis.core.registry.JndiBindingRegistry; import org.junit.Before; import org.junit.After; import org.junit.Test; import static java.util.concurrent.TimeUnit.SECONDS; import java.util.ArrayList; import java.util.Date; import java.util.concurrent.CountDownLatch; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.Destination; import javax.jms.ExceptionListener; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.Session; import javax.naming.Context; import org.junit.Assert; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.jms.server.JMSServerManager; import org.apache.activemq.artemis.jms.server.config.ConnectionFactoryConfiguration; import org.apache.activemq.artemis.jms.server.config.JMSConfiguration; import org.apache.activemq.artemis.jms.server.config.impl.ConnectionFactoryConfigurationImpl; import org.apache.activemq.artemis.jms.server.config.impl.JMSConfigurationImpl; import org.apache.activemq.artemis.jms.server.config.impl.JMSQueueConfigurationImpl; import org.apache.activemq.artemis.jms.server.impl.JMSServerManagerImpl; import org.apache.activemq.artemis.tests.integration.IntegrationTestLogger; import org.apache.activemq.artemis.tests.util.ServiceTestBase; public class ManualReconnectionToSingleServerTest extends ServiceTestBase { // Constants ----------------------------------------------------- private static final IntegrationTestLogger log = IntegrationTestLogger.LOGGER; private Connection connection; private MessageConsumer consumer; private CountDownLatch exceptionLatch; private CountDownLatch reconnectionLatch; private CountDownLatch allMessagesReceived; private JMSServerManager serverManager; private InVMNamingContext context; private static final String QUEUE_NAME = ManualReconnectionToSingleServerTest.class.getSimpleName() + ".queue"; private static final int NUM = 20; private final ExceptionListener exceptionListener = new ExceptionListener() { public void onException(final JMSException e) { exceptionLatch.countDown(); disconnect(); connect(); reconnectionLatch.countDown(); } }; private Listener listener; private ActiveMQServer server; @Test public void testExceptionListener() throws Exception { connect(); ConnectionFactory cf = (ConnectionFactory)context.lookup("/cf"); Destination dest = (Destination)context.lookup(QUEUE_NAME); Connection conn = cf.createConnection(); Session sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer prod = sess.createProducer(dest); for (int i = 0; i < NUM; i++) { Message message = sess.createTextMessage(new Date().toString()); message.setIntProperty("counter", i + 1); prod.send(message); if (i == NUM / 2) { conn.close(); serverManager.stop(); Thread.sleep(5000); serverManager.start(); cf = (ConnectionFactory)context.lookup("/cf"); dest = (Destination)context.lookup(QUEUE_NAME); conn = cf.createConnection(); sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE); prod = sess.createProducer(dest); } } conn.close(); boolean gotException = exceptionLatch.await(10, SECONDS); Assert.assertTrue(gotException); boolean clientReconnected = reconnectionLatch.await(10, SECONDS); Assert.assertTrue("client did not reconnect after server was restarted", clientReconnected); boolean gotAllMessages = allMessagesReceived.await(10, SECONDS); Assert.assertTrue(gotAllMessages); connection.close(); } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- @Override @Before public void setUp() throws Exception { super.setUp(); context = new InVMNamingContext(); Configuration conf = createBasicConfig() .addAcceptorConfiguration(new TransportConfiguration(NETTY_ACCEPTOR_FACTORY)); server = createServer(false, conf); JMSConfiguration configuration = new JMSConfigurationImpl(); serverManager = new JMSServerManagerImpl(server, configuration); serverManager.setRegistry(new JndiBindingRegistry(context)); configuration.getQueueConfigurations().add(new JMSQueueConfigurationImpl().setName(QUEUE_NAME).setBindings(QUEUE_NAME)); ArrayList<TransportConfiguration> configs = new ArrayList<TransportConfiguration>(); configs.add(new TransportConfiguration(NETTY_CONNECTOR_FACTORY)); ConnectionFactoryConfiguration cfConfig = new ConnectionFactoryConfigurationImpl() .setName("cf") .setConnectorNames(registerConnectors(server, configs)) .setBindings("/cf") .setRetryInterval(1000) .setReconnectAttempts(-1); configuration.getConnectionFactoryConfigurations().add(cfConfig); serverManager.start(); listener = new Listener(); exceptionLatch = new CountDownLatch(1); reconnectionLatch = new CountDownLatch(1); allMessagesReceived = new CountDownLatch(1); } @Override @After public void tearDown() throws Exception { try { serverManager.stop(); serverManager = null; if (connection != null) { connection.close(); } connection = null; } finally { super.tearDown(); } } // Private ------------------------------------------------------- // Inner classes ------------------------------------------------- protected void disconnect() { ManualReconnectionToSingleServerTest.log.info("calling disconnect"); if (connection == null) { ManualReconnectionToSingleServerTest.log.info("connection is null"); return; } try { connection.setExceptionListener(null); ManualReconnectionToSingleServerTest.log.info("closing the connection"); connection.close(); connection = null; ManualReconnectionToSingleServerTest.log.info("connection closed"); } catch (Exception e) { ManualReconnectionToSingleServerTest.log.info("** got exception"); e.printStackTrace(); } } protected void connect() { int retries = 0; final int retryLimit = 1000; try { if (context == null) { return; } Context initialContext = context; Queue queue; ConnectionFactory cf; while (true) { try { queue = (Queue)initialContext.lookup(QUEUE_NAME); cf = (ConnectionFactory)initialContext.lookup("/cf"); break; } catch (Exception e) { if (retries++ > retryLimit) throw e; // retry until server is up Thread.sleep(100); } } connection = cf.createConnection(); connection.setExceptionListener(exceptionListener); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); consumer = session.createConsumer(queue); consumer.setMessageListener(listener); connection.start(); } catch (Exception e) { if (connection != null) { try { connection.close(); } catch (JMSException e1) { e1.printStackTrace(); } } } } private class Listener implements MessageListener { private int count = 0; public void onMessage(final Message msg) { count++; try { msg.getIntProperty("counter"); } catch (JMSException e) { e.printStackTrace(); } if (count == NUM) { allMessagesReceived.countDown(); } } } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.spi.impl.operationservice.impl; import com.hazelcast.internal.util.ConcurrencyDetection; import com.hazelcast.internal.util.ThreadLocalRandomProvider; import com.hazelcast.logging.ILogger; import com.hazelcast.spi.impl.operationservice.BackupAwareOperation; import com.hazelcast.spi.impl.operationservice.UrgentSystemOperation; import com.hazelcast.spi.impl.sequence.CallIdFactory; import com.hazelcast.spi.impl.sequence.CallIdSequence; import com.hazelcast.spi.properties.HazelcastProperties; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import static com.hazelcast.spi.properties.ClusterProperty.BACKPRESSURE_BACKOFF_TIMEOUT_MILLIS; import static com.hazelcast.spi.properties.ClusterProperty.BACKPRESSURE_ENABLED; import static com.hazelcast.spi.properties.ClusterProperty.BACKPRESSURE_MAX_CONCURRENT_INVOCATIONS_PER_PARTITION; import static com.hazelcast.spi.properties.ClusterProperty.BACKPRESSURE_SYNCWINDOW; import static com.hazelcast.spi.properties.ClusterProperty.OPERATION_BACKUP_TIMEOUT_MILLIS; import static com.hazelcast.spi.properties.ClusterProperty.PARTITION_COUNT; import static java.lang.Math.max; import static java.lang.Math.round; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MINUTES; /** * The BackpressureRegulator is responsible for regulating invocation 'pressure'. If it sees that the system * is getting overloaded, it will apply back pressure so the the system won't crash. * <p> * The BackpressureRegulator is responsible for regulating invocation pressure on the Hazelcast system to prevent it from * crashing on overload. Most Hazelcast invocations on Hazelcast are simple; you do (for example) a map.get and you wait for the * response (synchronous call) so you won't get more requests than you have threads. * <p> * But if there is no balance between the number of invocations and the number of threads, then it is very easy to produce * more invocations that the system can handle. To prevent the system crashing under overload, back pressure is applied * so that the invocation pressure is bound to a certain maximum and can't lead to the system crashing. * <p> * The BackpressureRegulator needs to be hooked into 2 parts: * <ol> * <li>when a new invocation is about to be made. If there are too many requests, then the invocation is delayed * until there is space or eventually a timeout happens and the {@link com.hazelcast.core.HazelcastOverloadException} * is thrown. * </li> * <li> * when asynchronous backups are made. In this case, we rely on periodically making the async backups sync. By * doing this, we force the invocation to wait for operation queues to drain and this prevents them from getting * overloaded. * </li> * </ol> */ class BackpressureRegulator { /** * The percentage above and below a certain sync-window we should randomize. */ static final float RANGE = 0.25f; private final AtomicInteger syncCountdown = new AtomicInteger(); private final boolean enabled; private final boolean disabled; private final int syncWindow; private final int partitionCount; private final int maxConcurrentInvocations; private final int backoffTimeoutMs; BackpressureRegulator(HazelcastProperties properties, ILogger logger) { this.enabled = properties.getBoolean(BACKPRESSURE_ENABLED); this.disabled = !enabled; this.partitionCount = properties.getInteger(PARTITION_COUNT); this.syncWindow = getSyncWindow(properties); this.syncCountdown.set(syncWindow); this.maxConcurrentInvocations = getMaxConcurrentInvocations(properties); this.backoffTimeoutMs = getBackoffTimeoutMs(properties); if (enabled) { logger.info("Backpressure is enabled" + ", maxConcurrentInvocations:" + maxConcurrentInvocations + ", syncWindow: " + syncWindow); int backupTimeoutMillis = properties.getInteger(OPERATION_BACKUP_TIMEOUT_MILLIS); if (backupTimeoutMillis < MINUTES.toMillis(1)) { logger.warning( format("Back pressure is enabled, but '%s' is too small. ", OPERATION_BACKUP_TIMEOUT_MILLIS.getName())); } } else { logger.info("Backpressure is disabled"); } } int syncCountDown() { return syncCountdown.get(); } private int getSyncWindow(HazelcastProperties props) { int syncWindow = props.getInteger(BACKPRESSURE_SYNCWINDOW); if (enabled && syncWindow <= 0) { throw new IllegalArgumentException("Can't have '" + BACKPRESSURE_SYNCWINDOW + "' with a value smaller than 1"); } return syncWindow; } private int getBackoffTimeoutMs(HazelcastProperties props) { int backoffTimeoutMs = (int) props.getMillis(BACKPRESSURE_BACKOFF_TIMEOUT_MILLIS); if (enabled && backoffTimeoutMs < 0) { throw new IllegalArgumentException("Can't have '" + BACKPRESSURE_BACKOFF_TIMEOUT_MILLIS + "' with a value smaller than 0"); } return backoffTimeoutMs; } private int getMaxConcurrentInvocations(HazelcastProperties props) { if (disabled) { return Integer.MAX_VALUE; } int invocationsPerPartition = props.getInteger(BACKPRESSURE_MAX_CONCURRENT_INVOCATIONS_PER_PARTITION); if (invocationsPerPartition < 1) { throw new IllegalArgumentException("Can't have '" + BACKPRESSURE_MAX_CONCURRENT_INVOCATIONS_PER_PARTITION + "' with a value smaller than 1"); } return (partitionCount + 1) * invocationsPerPartition; } /** * Checks if back-pressure is enabled. * <p> * This method is only used for testing. */ boolean isEnabled() { return enabled; } int getMaxConcurrentInvocations() { if (enabled) { return maxConcurrentInvocations; } else { return Integer.MAX_VALUE; } } CallIdSequence newCallIdSequence(ConcurrencyDetection concurrencyDetection) { return CallIdFactory.newCallIdSequence(maxConcurrentInvocations, backoffTimeoutMs, concurrencyDetection); } /** * Checks if a sync is forced for the given BackupAwareOperation. * <p> * Once and a while for every BackupAwareOperation with one or more async backups, these async backups are transformed * into a sync backup. * * @param backupAwareOp the BackupAwareOperation to check * @return {@code true} if a sync needs to be forced, {@code false} otherwise */ boolean isSyncForced(BackupAwareOperation backupAwareOp) { if (disabled) { return false; } // if there are no asynchronous backups, there is nothing to regulate. if (backupAwareOp.getAsyncBackupCount() == 0) { return false; } if (backupAwareOp instanceof UrgentSystemOperation) { return false; } for (; ; ) { int current = syncCountdown.decrementAndGet(); if (current > 0) { return false; } if (syncCountdown.compareAndSet(current, randomSyncDelay())) { return true; } } } private int randomSyncDelay() { if (syncWindow == 1) { return 1; } Random random = ThreadLocalRandomProvider.get(); int randomSyncWindow = round((1 - RANGE) * syncWindow + random.nextInt(round(2 * RANGE * syncWindow))); return max(1, randomSyncWindow); } }
package AST; import java.util.HashSet; import java.util.LinkedHashSet; import java.io.File; import java.util.*; import beaver.*; import java.util.ArrayList; import java.util.zip.*; import java.io.*; import java.util.Stack; import java.util.regex.Pattern; import java.io.FileOutputStream; import java.io.IOException; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Element; import org.w3c.dom.Document; import java.util.HashMap; import java.util.Map.Entry; import javax.xml.transform.TransformerException; import javax.xml.parsers.ParserConfigurationException; import java.util.Collection; /** * @ast node * @declaredat java.ast:16 */ public class VarAccess extends Access implements Cloneable { /** * @apilvl low-level */ public void flushCache() { super.flushCache(); isConstant_visited = -1; isConstant_computed = false; isConstant_initialized = false; isDAafter_Variable_values = null; decls_computed = false; decls_value = null; decl_computed = false; decl_value = null; isFieldAccess_computed = false; type_computed = false; type_value = null; } /** * @apilvl internal */ public void flushCollectionCache() { super.flushCollectionCache(); } /** * @apilvl internal */ @SuppressWarnings({"unchecked", "cast"}) public VarAccess clone() throws CloneNotSupportedException { VarAccess node = (VarAccess)super.clone(); node.isConstant_visited = -1; node.isConstant_computed = false; node.isConstant_initialized = false; node.isDAafter_Variable_values = null; node.decls_computed = false; node.decls_value = null; node.decl_computed = false; node.decl_value = null; node.isFieldAccess_computed = false; node.type_computed = false; node.type_value = null; node.in$Circle(false); node.is$Final(false); return node; } /** * @apilvl internal */ @SuppressWarnings({"unchecked", "cast"}) public VarAccess copy() { try { VarAccess node = (VarAccess)clone(); if(children != null) node.children = (ASTNode[])children.clone(); return node; } catch (CloneNotSupportedException e) { } System.err.println("Error: Could not clone node of type " + getClass().getName() + "!"); return null; } /** * @apilvl low-level */ @SuppressWarnings({"unchecked", "cast"}) public VarAccess fullCopy() { VarAccess res = (VarAccess)copy(); for(int i = 0; i < getNumChildNoTransform(); i++) { ASTNode node = getChildNoTransform(i); if(node != null) node = node.fullCopy(); res.setChild(node, i); } return res; } /** * @ast method * @aspect DefiniteAssignment * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\DefiniteAssignment.jrag:94 */ public void definiteAssignment() { if(isSource()) { if(decl() instanceof VariableDeclaration) { VariableDeclaration v = (VariableDeclaration)decl(); //System.err.println("Is " + v + " final? " + v.isFinal() + ", DAbefore: " + isDAbefore(v)); if(v.isValue()) { } else if(v.isBlankFinal()) { //if(!isDAbefore(v) && !v.hasInit() && !v.getInit().isConstant()) if(!isDAbefore(v)) error("Final variable " + v.name() + " is not assigned before used"); } else { //if(!v.hasInit() && !isDAbefore(v)) { if(!isDAbefore(v)) error("Local variable " + v.name() + " in not assigned before used"); } } else if(decl() instanceof FieldDeclaration && !isQualified()) { FieldDeclaration f = (FieldDeclaration)decl(); //if(f.isFinal() && f.isInstanceVariable() && !isDAbefore(f)) { //if(f.isFinal() && !isDAbefore(f) && (!f.hasInit() || !f.getInit().isConstant())) { //if(f.isFinal() && (!f.hasInit() || !f.getInit().isConstant()) && !isDAbefore(f)) { if(f.isFinal() && !f.hasInit() && !isDAbefore(f)) { error("Final field " + f + " is not assigned before used"); } } } if(isDest()) { Variable v = decl(); // Blank final field if(v.isFinal() && v.isBlank() && !hostType().instanceOf(v.hostType())) error("The final variable is not a blank final in this context, so it may not be assigned."); else if(v.isFinal() && isQualified() && (!qualifier().isThisAccess() || ((Access)qualifier()).isQualified())) error("the blank final field " + v.name() + " may only be assigned by simple name"); // local variable or parameter else if(v instanceof VariableDeclaration) { VariableDeclaration var = (VariableDeclaration)v; //System.out.println("### is variable"); if(!var.isValue() && var.getParent().getParent().getParent() instanceof SwitchStmt && var.isFinal()) { if(!isDUbefore(var)) error("Final variable " + var.name() + " may only be assigned once"); } else if(var.isValue()) { if(var.hasInit() || !isDUbefore(var)) error("Final variable " + var.name() + " may only be assigned once"); } else if(var.isBlankFinal()) { if(var.hasInit() || !isDUbefore(var)) error("Final variable " + var.name() + " may only be assigned once"); } if(var.isFinal() && (var.hasInit() || !isDUbefore(var))) { //if(var.isFinal() && ((var.hasInit() && var.getInit().isConstant()) || !isDUbefore(var))) { } } // field else if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; if(f.isFinal()) { if(f.hasInit()) error("initialized field " + f.name() + " can not be assigned"); else { BodyDecl bodyDecl = enclosingBodyDecl(); if(!(bodyDecl instanceof ConstructorDecl) && !(bodyDecl instanceof InstanceInitializer) && !(bodyDecl instanceof StaticInitializer) && !(bodyDecl instanceof FieldDeclaration)) error("final field " + f.name() + " may only be assigned in constructors and initializers"); else if(!isDUbefore(f)) error("Final field " + f.name() + " may only be assigned once"); } } } else if(v instanceof ParameterDeclaration) { ParameterDeclaration p = (ParameterDeclaration)v; // 8.4.1 if(p.isFinal()) { error("Final parameter " + p.name() + " may not be assigned"); } } } } /** * @ast method * @aspect DA * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\DefiniteAssignment.jrag:458 */ protected boolean checkDUeverywhere(Variable v) { if(isDest() && decl() == v) return false; return super.checkDUeverywhere(v); } /** * @ast method * @aspect ExternalVars * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\GOP.jrag:1747 */ public void registerVariables(){ if(hostType().isShadowClassDecl()&&(decl() instanceof FieldDeclaration)&&((FieldDeclaration)decl()).hasSameField()){ FieldDeclaration f=(FieldDeclaration)decl(); if(!f.fieldToVar.contains(this)) f.fieldToVar.add(this); } } /** * @ast method * @aspect NameCheck * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\NameCheck.jrag:212 */ public BodyDecl closestBodyDecl(TypeDecl t) { ASTNode node = this; while(!(node.getParent().getParent() instanceof Program) && node.getParent().getParent() != t) { node = node.getParent(); } if(node instanceof BodyDecl) return (BodyDecl)node; return null; } /** * @ast method * @aspect NodeConstructors * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\NodeConstructors.jrag:38 */ public VarAccess(String name, int start, int end) { this(name); this.start = start; this.end = end; } /** * @ast method * @aspect PrettyPrint * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\PrettyPrint.jadd:580 */ public void toString(StringBuffer s) { s.append(name()); } /** * @ast method * @aspect CodeGeneration * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CodeGeneration.jrag:717 */ public void refined_CodeGeneration_VarAccess_emitStore(CodeGeneration gen) { Variable v = decl(); if(v instanceof VariableDeclaration) { VariableDeclaration decl = (VariableDeclaration)v; if(isDUbefore(v)) gen.addLocalVariableEntryAtCurrentPC(decl.name(), decl.type().typeDescriptor(), decl.localNum(), decl.variableScopeEndLabel(gen)); decl.type().emitStoreLocal(gen, decl.localNum()); } else if(v instanceof ParameterDeclaration) { ParameterDeclaration decl = (ParameterDeclaration)v; decl.type().emitStoreLocal(gen, decl.localNum()); } else if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; if(f.isPrivate() && !hostType().hasField(v.name())) f.createAccessorWrite(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); else f.emitStoreField(gen, fieldQualifierType()); } } /** * @ast method * @aspect CreateBCode * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CreateBCode.jrag:617 */ public void createAssignSimpleLoadDest(CodeGeneration gen) { createLoadQualifier(gen); } /** * @ast method * @aspect CreateBCode * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CreateBCode.jrag:631 */ public void createPushAssignmentResult(CodeGeneration gen) { if(hostType().needsAccessorFor(decl())) return; if(decl().isInstanceVariable()) type().emitDup_x1(gen); else type().emitDup(gen); } /** * @ast method * @aspect CreateBCode * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CreateBCode.jrag:649 */ public void refined_CreateBCode_VarAccess_createAssignLoadDest(CodeGeneration gen) { createLoadQualifier(gen); Variable v = decl(); if(v.isInstanceVariable()) gen.emitDup(); if(v instanceof VariableDeclaration) { VariableDeclaration decl = (VariableDeclaration)v; decl.type().emitLoadLocal(gen, decl.localNum()); } else if(v instanceof ParameterDeclaration) { ParameterDeclaration decl = (ParameterDeclaration)v; decl.type().emitLoadLocal(gen, decl.localNum()); } else if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; if(requiresAccessor()) f.createAccessor(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); else f.emitLoadField(gen, fieldQualifierType()); } // ==================GOP================================== else if(v instanceof ContextVarDeclaration) { ContextVarDeclaration c=(ContextVarDeclaration)v; c.emitLoadField(gen,fieldQualifierType()); } //================END===================================== } /** * @ast method * @aspect CreateBCode * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CreateBCode.jrag:706 */ public void refined_CreateBCode_VarAccess_createBCode(CodeGeneration gen) { Variable v = decl(); if(v instanceof VariableDeclaration) { VariableDeclaration decl = (VariableDeclaration)v; if(decl.hostType() == hostType()) decl.type().emitLoadLocal(gen, decl.localNum()); else emitLoadLocalInNestedClass(gen, decl); } else if(v instanceof ParameterDeclaration) { ParameterDeclaration decl = (ParameterDeclaration)v; if(decl.hostType() == hostType()) decl.type().emitLoadLocal(gen, decl.localNum()); else emitLoadLocalInNestedClass(gen, decl); } else if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; createLoadQualifier(gen); if(f.isConstant() && (f.type().isPrimitive() || f.type().isString())) { if(!f.isStatic()) fieldQualifierType().emitPop(gen); f.constant().createBCode(gen); } else if(requiresAccessor()) f.createAccessor(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); else f.emitLoadField(gen, fieldQualifierType()); } // ===================GOP========================================= else if(v instanceof ContextVarDeclaration) { ContextVarDeclaration f=(ContextVarDeclaration)v; createLoadQualifier(gen); //if(requiresAccessor()) // f.createAccessor(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); //else f.emitLoadField(gen, fieldQualifierType()); } //==================END=========================================== } /** * @ast method * @aspect CreateBCode * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CreateBCode.jrag:773 */ protected void createLoadQualifier(CodeGeneration gen) { Variable v = decl(); if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; if(hasPrevExpr()) { // load explicit qualifier prevExpr().createBCode(gen); // pop qualifier stack element for class variables // this qualifier must be computed to ensure side effects if(!prevExpr().isTypeAccess() && f.isClassVariable()) prevExpr().type().emitPop(gen); } else if(f.isInstanceVariable()) { emitThis(gen, fieldQualifierType()); } } // ==============================GOP=============================== if(v instanceof ContextVarDeclaration){ ContextVarDeclaration f=(ContextVarDeclaration)v; emitThis(gen, fieldQualifierType()); } } /** * @ast method * @aspect InnerClasses * @declaredat D:\zhh\JastAddJ\Java1.4Backend\InnerClasses.jrag:54 */ private TypeDecl refined_InnerClasses_VarAccess_fieldQualifierType() { if(hasPrevExpr()) return prevExpr().type(); TypeDecl typeDecl = hostType(); while(typeDecl != null && !typeDecl.hasField(name())) typeDecl = typeDecl.enclosingType(); if(typeDecl != null) return typeDecl; return decl().hostType(); } /** * @ast method * @aspect InnerClasses * @declaredat D:\zhh\JastAddJ\Java1.4Backend\InnerClasses.jrag:261 */ public void collectEnclosingVariables(HashSet set, TypeDecl typeDecl) { Variable v = decl(); if(!v.isInstanceVariable() && !v.isClassVariable() && v.hostType() == typeDecl) set.add(v); super.collectEnclosingVariables(set, typeDecl); } /** * @ast method * @aspect Transformations * @declaredat D:\zhh\JastAddJ\Java1.4Backend\Transformations.jrag:110 */ public void transformation() { Variable v = decl(); if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; if(requiresAccessor()) { TypeDecl typeDecl = fieldQualifierType(); if(isSource()) f.createAccessor(typeDecl); if(isDest()) f.createAccessorWrite(typeDecl); } } super.transformation(); } /** * @ast method * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:344 */ public void checkModifiers() { if(decl() instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)decl(); if(f.isDeprecated() && !withinDeprecatedAnnotation() && hostType().topLevelType() != f.hostType().topLevelType() && !withinSuppressWarnings("deprecation")) warning(f.name() + " in " + f.hostType().typeName() + " has been deprecated"); } } /** * @ast method * @aspect Enums * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Enums.jrag:466 */ protected void checkEnum(EnumDecl enumDecl) { super.checkEnum(enumDecl); if(decl().isStatic() && decl().hostType() == enumDecl && !isConstant()) error("may not reference a static field of an enum type from here"); } /** * @ast method * @declaredat java.ast:1 */ public VarAccess() { super(); } /** * @ast method * @declaredat java.ast:7 */ public VarAccess(String p0) { setID(p0); } /** * @ast method * @declaredat java.ast:10 */ public VarAccess(beaver.Symbol p0) { setID(p0); } /** * @apilvl low-level * @ast method * @declaredat java.ast:16 */ protected int numChildren() { return 0; } /** * @apilvl internal * @ast method * @declaredat java.ast:22 */ public boolean mayHaveRewrite() { return true; } /** * Setter for lexeme ID * @apilvl high-level * @ast method * @declaredat java.ast:5 */ public void setID(String value) { tokenString_ID = value; } /** * @apilvl internal * @ast method * @declaredat java.ast:8 */ /** * @apilvl internal */ protected String tokenString_ID; /** * @ast method * @declaredat java.ast:9 */ public int IDstart; /** * @ast method * @declaredat java.ast:10 */ public int IDend; /** * @ast method * @declaredat java.ast:11 */ public void setID(beaver.Symbol symbol) { if(symbol.value != null && !(symbol.value instanceof String)) throw new UnsupportedOperationException("setID is only valid for String lexemes"); tokenString_ID = (String)symbol.value; IDstart = symbol.getStart(); IDend = symbol.getEnd(); } /** * Getter for lexeme ID * @apilvl high-level * @ast method * @declaredat java.ast:22 */ public String getID() { return tokenString_ID != null ? tokenString_ID : ""; } /** * @ast method * @aspect GOP * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\GOP.jrag:1320 */ public void nameCheck() { if(decls().isEmpty() && (!isQualified() || !qualifier().type().isUnknown() || qualifier().isPackageAccess())) error("no field named " + name()); if(decls().size() > 1) { StringBuffer s = new StringBuffer(); s.append("several fields named " + name()); for(Iterator iter = decls().iterator(); iter.hasNext(); ) { Variable v = (Variable)iter.next(); if(v.hostType()!=null) s.append("\n " + v.type().typeName() + "." + v.name() + " declared in " + v.hostType().typeName()); else { s.append("\n " + v.type().typeName() + "." + v.name() + " declared in program publish " ); } } error(s.toString()); } // 8.8.5.1 if(inExplicitConstructorInvocation() && !isQualified() && decl().isInstanceVariable() && hostType() == decl().hostType()) error("instance variable " + name() + " may not be accessed in an explicit constructor invocation"); Variable v = decl(); if(!v.isFinal() && !v.isClassVariable() && !v.isInstanceVariable() && v.hostType()!=null && v.hostType() != hostType() && !v.hostType().name().equals(hostType().name()) && v.hostType().isGranuleDecl() ) error("A parameter/variable used but not declared in an inner class must be declared final"); // 8.3.2.3 if((decl().isInstanceVariable() || decl().isClassVariable()) && !isQualified()) { if(hostType() != null && !hostType().declaredBeforeUse(decl(), this)) { if(inSameInitializer() && !simpleAssignment() && inDeclaringClass()) { BodyDecl b = closestBodyDecl(hostType()); error("variable " + decl().name() + " is used in " + b + " before it is declared"); } } } } /** * @ast method * @aspect GenericsCodegen * @declaredat D:\zhh\JastAddJ\Java1.5Backend\GenericsCodegen.jrag:60 */ public void emitStore(CodeGeneration gen) { Variable v = decl(); if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; f = f.erasedField(); if(requiresAccessor()) f.createAccessorWrite(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); else f.emitStoreField(gen, fieldQualifierType()); } else refined_CodeGeneration_VarAccess_emitStore(gen); } /** * @ast method * @aspect GenericsCodegen * @declaredat D:\zhh\JastAddJ\Java1.5Backend\GenericsCodegen.jrag:73 */ public void createAssignLoadDest(CodeGeneration gen) { Variable v = decl(); if(v instanceof FieldDeclaration) { createLoadQualifier(gen); if(v.isInstanceVariable()) gen.emitDup(); FieldDeclaration f = (FieldDeclaration)v; f = f.erasedField(); if(requiresAccessor()) f.createAccessor(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); else f.emitLoadField(gen, fieldQualifierType()); } else refined_CreateBCode_VarAccess_createAssignLoadDest(gen); } /** * @ast method * @aspect GenericsCodegen * @declaredat D:\zhh\JastAddJ\Java1.5Backend\GenericsCodegen.jrag:90 */ public void createBCode(CodeGeneration gen) { Variable v = decl(); if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; f = f.erasedField(); createLoadQualifier(gen); if(f.isConstant() && (f.type().isPrimitive() || f.type().isString())) { if(!f.isStatic()) fieldQualifierType().emitPop(gen); f.constant().createBCode(gen); } else if(requiresAccessor()) f.createAccessor(fieldQualifierType()).emitInvokeMethod(gen, fieldQualifierType()); else f.emitLoadField(gen, fieldQualifierType()); if(f.type() != decl().type()) gen.emitCheckCast(decl().type()); } else refined_CreateBCode_VarAccess_createBCode(gen); } /** * @ast method * @aspect GenericsCodegen * @declaredat D:\zhh\JastAddJ\Java1.5Backend\GenericsCodegen.jrag:189 */ protected TypeDecl refined_GenericsCodegen_VarAccess_fieldQualifierType() { TypeDecl typeDecl = refined_InnerClasses_VarAccess_fieldQualifierType(); return typeDecl == null ? null : typeDecl.erasure(); } /** * @ast method * @aspect StaticImportsCodegen * @declaredat D:\zhh\JastAddJ\Java1.5Backend\StaticImportsCodegen.jrag:11 */ protected TypeDecl fieldQualifierType() { TypeDecl typeDecl = refined_GenericsCodegen_VarAccess_fieldQualifierType(); if(typeDecl != null) return typeDecl; return decl().hostType(); } /** * @attribute syn * @aspect ConstantExpression * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\ConstantExpression.jrag:108 */ @SuppressWarnings({"unchecked", "cast"}) public Constant constant() { ASTNode$State state = state(); Constant constant_value = constant_compute(); return constant_value; } /** * @apilvl internal */ private Constant constant_compute() { return type().cast(decl().getInit().constant()); } /** * @apilvl internal */ protected int isConstant_visited = -1; /** * @apilvl internal */ protected boolean isConstant_computed = false; /** * @apilvl internal */ protected boolean isConstant_initialized = false; /** * @apilvl internal */ protected boolean isConstant_value; /** * @attribute syn * @aspect ConstantExpression * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\ConstantExpression.jrag:500 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isConstant() { if(isConstant_computed) { return isConstant_value; } ASTNode$State state = state(); if (!isConstant_initialized) { isConstant_initialized = true; isConstant_value = false; } if (!state.IN_CIRCLE) { state.IN_CIRCLE = true; int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); do { isConstant_visited = state.CIRCLE_INDEX; state.CHANGE = false; boolean new_isConstant_value = isConstant_compute(); if (new_isConstant_value!=isConstant_value) state.CHANGE = true; isConstant_value = new_isConstant_value; state.CIRCLE_INDEX++; } while (state.CHANGE); if(isFinal && num == state().boundariesCrossed) { isConstant_computed = true; } else { state.RESET_CYCLE = true; isConstant_compute(); state.RESET_CYCLE = false; isConstant_computed = false; isConstant_initialized = false; } state.IN_CIRCLE = false; return isConstant_value; } if(isConstant_visited != state.CIRCLE_INDEX) { isConstant_visited = state.CIRCLE_INDEX; if (state.RESET_CYCLE) { isConstant_computed = false; isConstant_initialized = false; isConstant_visited = -1; return isConstant_value; } boolean new_isConstant_value = isConstant_compute(); if (new_isConstant_value!=isConstant_value) state.CHANGE = true; isConstant_value = new_isConstant_value; return isConstant_value; } return isConstant_value; } /** * @apilvl internal */ private boolean isConstant_compute() { Variable v = decl(); if(v instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration)v; return f.isConstant() && (!isQualified() || (isQualified() && qualifier().isTypeAccess())); } boolean result = v.isFinal() && v.hasInit() && v.getInit().isConstant() && (v.type().isPrimitive() || v.type().isString()); return result && (!isQualified() || (isQualified() && qualifier().isTypeAccess())); } /** * @attribute syn * @aspect DefiniteAssignment * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\DefiniteAssignment.jrag:60 */ @SuppressWarnings({"unchecked", "cast"}) public Variable varDecl() { ASTNode$State state = state(); Variable varDecl_value = varDecl_compute(); return varDecl_value; } /** * @apilvl internal */ private Variable varDecl_compute() { return decl(); } protected java.util.Map isDAafter_Variable_values; /** * @attribute syn * @aspect DA * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\DefiniteAssignment.jrag:353 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isDAafter(Variable v) { Object _parameters = v; if(isDAafter_Variable_values == null) isDAafter_Variable_values = new java.util.HashMap(4); if(isDAafter_Variable_values.containsKey(_parameters)) { return ((Boolean)isDAafter_Variable_values.get(_parameters)).booleanValue(); } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); boolean isDAafter_Variable_value = isDAafter_compute(v); if(isFinal && num == state().boundariesCrossed) isDAafter_Variable_values.put(_parameters, Boolean.valueOf(isDAafter_Variable_value)); return isDAafter_Variable_value; } /** * @apilvl internal */ private boolean isDAafter_compute(Variable v) { return (isDest() && decl() == v) || isDAbefore(v); } /** * @attribute syn * @aspect DU * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\DefiniteAssignment.jrag:833 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isDUafter(Variable v) { ASTNode$State state = state(); boolean isDUafter_Variable_value = isDUafter_compute(v); return isDUafter_Variable_value; } /** * @apilvl internal */ private boolean isDUafter_compute(Variable v) { if(isDest() && decl() == v) return false; return isDUbefore(v); } /** * @attribute syn * @aspect DU * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\DefiniteAssignment.jrag:1208 */ @SuppressWarnings({"unchecked", "cast"}) public boolean unassignedEverywhere(Variable v, TryStmt stmt) { ASTNode$State state = state(); boolean unassignedEverywhere_Variable_TryStmt_value = unassignedEverywhere_compute(v, stmt); return unassignedEverywhere_Variable_TryStmt_value; } /** * @apilvl internal */ private boolean unassignedEverywhere_compute(Variable v, TryStmt stmt) { if(isDest() && decl() == v && enclosingStmt().reachable()) { return false; } return super.unassignedEverywhere(v, stmt); } /** * @apilvl internal */ protected boolean decls_computed = false; /** * @apilvl internal */ protected SimpleSet decls_value; /** * @attribute syn * @aspect VariableScopePropagation * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\LookupVariable.jrag:305 */ @SuppressWarnings({"unchecked", "cast"}) public SimpleSet decls() { if(decls_computed) { return decls_value; } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); decls_value = decls_compute(); if(isFinal && num == state().boundariesCrossed) decls_computed = true; return decls_value; } /** * @apilvl internal */ private SimpleSet decls_compute() { SimpleSet set=lookupVariable(name()); if(set.size() == 1) { Variable v = (Variable)set.iterator().next(); if(!isQualified() && inStaticContext()) { if(v.isInstanceVariable() && !hostType().memberFields(v.name()).isEmpty()) return SimpleSet.emptySet; } else if(isQualified() && qualifier().staticContextQualifier()) { if(v.isInstanceVariable()) return SimpleSet.emptySet; } } return set; } /** * @apilvl internal */ protected boolean decl_computed = false; /** * @apilvl internal */ protected Variable decl_value; /** * @attribute syn * @aspect VariableScopePropagation * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\LookupVariable.jrag:320 */ @SuppressWarnings({"unchecked", "cast"}) public Variable decl() { if(decl_computed) { return decl_value; } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); decl_value = decl_compute(); if(isFinal && num == state().boundariesCrossed) decl_computed = true; return decl_value; } /** * @apilvl internal */ private Variable decl_compute() { SimpleSet decls = decls(); if(decls.size() == 1) return (Variable)decls.iterator().next(); return unknownField(); } /** * @attribute syn * @aspect NameCheck * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\NameCheck.jrag:222 */ @SuppressWarnings({"unchecked", "cast"}) public boolean inSameInitializer() { ASTNode$State state = state(); boolean inSameInitializer_value = inSameInitializer_compute(); return inSameInitializer_value; } /** * @apilvl internal */ private boolean inSameInitializer_compute() { BodyDecl b = closestBodyDecl(decl().hostType()); if(b == null) return false; if(b instanceof FieldDeclaration && ((FieldDeclaration)b).isStatic() == decl().isStatic()) return true; if(b instanceof InstanceInitializer && !decl().isStatic()) return true; if(b instanceof StaticInitializer && decl().isStatic()) return true; return false; } /** * @attribute syn * @aspect NameCheck * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\NameCheck.jrag:234 */ @SuppressWarnings({"unchecked", "cast"}) public boolean simpleAssignment() { ASTNode$State state = state(); boolean simpleAssignment_value = simpleAssignment_compute(); return simpleAssignment_value; } /** * @apilvl internal */ private boolean simpleAssignment_compute() { return isDest() && getParent() instanceof AssignSimpleExpr; } /** * @attribute syn * @aspect NameCheck * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\NameCheck.jrag:236 */ @SuppressWarnings({"unchecked", "cast"}) public boolean inDeclaringClass() { ASTNode$State state = state(); boolean inDeclaringClass_value = inDeclaringClass_compute(); return inDeclaringClass_value; } /** * @apilvl internal */ private boolean inDeclaringClass_compute() { return hostType() == decl().hostType(); } /** * @attribute syn * @aspect PrettyPrint * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\PrettyPrint.jadd:932 */ @SuppressWarnings({"unchecked", "cast"}) public String dumpString() { ASTNode$State state = state(); String dumpString_value = dumpString_compute(); return dumpString_value; } /** * @apilvl internal */ private String dumpString_compute() { return getClass().getName() + " [" + getID() + "]"; } /** * @attribute syn * @aspect Names * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\QualifiedNames.jrag:17 */ @SuppressWarnings({"unchecked", "cast"}) public String name() { ASTNode$State state = state(); String name_value = name_compute(); return name_value; } /** * @apilvl internal */ private String name_compute() { return getID(); } /** * @apilvl internal */ protected boolean isFieldAccess_computed = false; /** * @apilvl internal */ protected boolean isFieldAccess_value; /** * @attribute syn * @aspect AccessTypes * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\ResolveAmbiguousNames.jrag:24 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isFieldAccess() { if(isFieldAccess_computed) { return isFieldAccess_value; } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); isFieldAccess_value = isFieldAccess_compute(); if(isFinal && num == state().boundariesCrossed) isFieldAccess_computed = true; return isFieldAccess_value; } /** * @apilvl internal */ private boolean isFieldAccess_compute() { return decl().isClassVariable() || decl().isInstanceVariable(); } /** * @attribute syn * @aspect SyntacticClassification * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\SyntacticClassification.jrag:114 */ @SuppressWarnings({"unchecked", "cast"}) public NameType predNameType() { ASTNode$State state = state(); NameType predNameType_value = predNameType_compute(); return predNameType_value; } /** * @apilvl internal */ private NameType predNameType_compute() { return NameType.AMBIGUOUS_NAME; } /** * @attribute syn * @aspect TypeAnalysis * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeAnalysis.jrag:291 */ @SuppressWarnings({"unchecked", "cast"}) public TypeDecl type() { if(type_computed) { return type_value; } ASTNode$State state = state(); int num = state.boundariesCrossed; boolean isFinal = this.is$Final(); type_value = type_compute(); if(isFinal && num == state().boundariesCrossed) type_computed = true; return type_value; } /** * @apilvl internal */ private TypeDecl type_compute() { return decl().type(); } /** * @attribute syn * @aspect TypeCheck * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeCheck.jrag:17 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isVariable() { ASTNode$State state = state(); boolean isVariable_value = isVariable_compute(); return isVariable_value; } /** * @apilvl internal */ private boolean isVariable_compute() { return true; } /** * @attribute syn * @aspect CreateBCode * @declaredat D:\zhh\JastAddJ\Java1.4Backend\CreateBCode.jrag:496 */ @SuppressWarnings({"unchecked", "cast"}) public boolean isVarAccessWithAccessor() { ASTNode$State state = state(); boolean isVarAccessWithAccessor_value = isVarAccessWithAccessor_compute(); return isVarAccessWithAccessor_value; } /** * @apilvl internal */ private boolean isVarAccessWithAccessor_compute() { return decl() instanceof FieldDeclaration && decl().isInstanceVariable() && requiresAccessor(); } /** * @attribute syn * @aspect InnerClasses * @declaredat D:\zhh\JastAddJ\Java1.4Backend\InnerClasses.jrag:523 */ @SuppressWarnings({"unchecked", "cast"}) public boolean requiresAccessor() { ASTNode$State state = state(); boolean requiresAccessor_value = requiresAccessor_compute(); return requiresAccessor_value; } /** * @apilvl internal */ private boolean requiresAccessor_compute() { Variable v = decl(); if(!(v instanceof FieldDeclaration)) return false; FieldDeclaration f = (FieldDeclaration)v; if(f.isPrivate() && !hostType().hasField(v.name())) return true; if(f.isProtected() && !f.hostPackage().equals(hostPackage()) && !hostType().hasField(v.name())) return true; return false; } /** * @attribute inh * @aspect TypeHierarchyCheck * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\TypeHierarchyCheck.jrag:158 */ @SuppressWarnings({"unchecked", "cast"}) public boolean inExplicitConstructorInvocation() { ASTNode$State state = state(); boolean inExplicitConstructorInvocation_value = getParent().Define_boolean_inExplicitConstructorInvocation(this, null); return inExplicitConstructorInvocation_value; } /** * @apilvl internal */ public ASTNode rewriteTo() { // Declared in D:\zhh\JastAddJ\Java1.4Frontend\GOP.jrag at line 1460 if(hostType().isGranuleDecl()&&(decl() instanceof ContextVarDeclaration)||hostType().isShadowClassDecl()&&(decl() instanceof ContextVarDeclaration)) { state().duringGOP++; ASTNode result = rewriteRule0(); state().duringGOP--; return result; } return super.rewriteTo(); } /** * @declaredat D:\zhh\JastAddJ\Java1.4Frontend\GOP.jrag:1460 * @apilvl internal */ private AbstractDot rewriteRule0() { { String methodname = new String("getContext"); TypeAccess ta = new TypeAccess("GopContext"); List<Expr> l =new List<Expr>(); StringLiteral para1 = new StringLiteral(this.getID()); l.add(para1); ContextVarDeclaration convar=(ContextVarDeclaration)decl(); Modifiers modifiers=convar.getModifiersNoTransform().fullCopy(); /*List lis=modifiers.getModifierList(); boolean isPrivate=false; for(int j = 0; j < lis.getNumChild(); j++) { Modifier modifier=(Modifier)lis.getChild(j); if(modifier.getID().equals("private")) isPrivate=true; } if(isPrivate){ String way=new String("getLocalIP"); ParseName ipname= new ParseName("getLocalIP"); List<Expr> lst=new List<Expr>(); MethodAccess m=new MethodAccess(way,lst); AbstractDot d=new AbstractDot(ta,ipname); d.replaceLast(m); l.add(d); } else {*/ StringLiteral para3=new StringLiteral(convar.getModifiers().toString()); l.add(para3); //} ParseName pn = new ParseName("getContext"); AbstractDot d = new AbstractDot(ta,pn); MethodAccess m = new MethodAccess(methodname,l); d.replaceLast(m); String typename=decl().type().typeName(); if(typename.equals("java.lang.String")) return d; char typechar=Character.toUpperCase(typename.charAt(0)); String descname=typechar+typename.substring(1); String conversion="parse"+descname; String classname=decl().type().boxed().typeName(); int n=classname.lastIndexOf("."); String realpackage=classname.substring(0,n); String realclass=classname.substring(n+1); TypeAccess tta=new TypeAccess(realpackage,realclass); ParseName ppn=new ParseName(conversion); List<Expr> ll=new List<Expr>(); ll.add(d); MethodAccess mm= new MethodAccess(conversion,ll); AbstractDot dd=new AbstractDot(tta,ppn); dd.replaceLast(mm); return dd; } } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.screens.explorer.backend.server; import static java.util.Collections.emptyList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import javax.inject.Inject; import javax.inject.Named; import org.guvnor.common.services.backend.file.LinkedDotFileFilter; import org.guvnor.common.services.backend.file.LinkedRegularFileFilter; import org.guvnor.common.services.project.model.Package; import org.guvnor.common.services.project.model.Project; import org.guvnor.common.services.shared.metadata.MetadataService; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.structure.organizationalunit.OrganizationalUnit; import org.guvnor.structure.repositories.Repository; import org.kie.workbench.common.screens.explorer.model.FolderItem; import org.kie.workbench.common.screens.explorer.model.FolderItemType; import org.kie.workbench.common.screens.explorer.model.FolderListing; import org.kie.workbench.common.screens.explorer.service.Option; import org.kie.workbench.common.screens.explorer.utils.Sorters; import org.kie.workbench.common.services.shared.project.KieProjectService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.uberfire.backend.server.UserServicesImpl; import org.uberfire.backend.server.VFSLockServiceImpl; import org.uberfire.backend.server.util.Paths; import org.uberfire.backend.vfs.Path; import org.uberfire.commons.async.DescriptiveRunnable; import org.uberfire.commons.async.SimpleAsyncExecutorService; import org.uberfire.io.IOService; import org.uberfire.java.nio.file.DirectoryStream; import org.uberfire.java.nio.file.Files; import com.google.common.collect.Lists; import com.thoughtworks.xstream.XStream; public class ExplorerServiceHelper { private static final Logger LOGGER = LoggerFactory.getLogger( ExplorerServiceHelper.class ); @Inject private KieProjectService projectService; @Inject private FolderListingResolver folderListingResolver; private LinkedDotFileFilter dotFileFilter = new LinkedDotFileFilter(); private LinkedRegularFileFilter regularFileFilter = new LinkedRegularFileFilter( dotFileFilter ); private XStream xs = new XStream(); @Inject @Named("ioStrategy") private IOService ioService; @Inject @Named("configIO") private IOService ioServiceConfig; @Inject private VFSLockServiceImpl lockService; @Inject private MetadataService metadataService; @Inject private UserServicesImpl userServices; public static FolderItem toFolderItem( final org.guvnor.common.services.project.model.Package pkg ) { if ( pkg == null ) { return null; } return new FolderItem( pkg, pkg.getRelativeCaption(), FolderItemType.FOLDER ); } public static FolderItem toFolderItem( final org.uberfire.java.nio.file.Path path ) { if ( Files.isRegularFile( path ) ) { final org.uberfire.backend.vfs.Path p = Paths.convert( path ); return new FolderItem( p, p.getFileName(), FolderItemType.FILE, false, Paths.readLockedBy( p ), new ArrayList<String>( ) ); } else if ( Files.isDirectory( path ) ) { final org.uberfire.backend.vfs.Path p = Paths.convert( path ); return new FolderItem( p, p.getFileName(), FolderItemType.FOLDER ); } return null; } public List<FolderItem> getPackageSegments( final Package _pkg ) { List<FolderItem> result = new ArrayList<FolderItem>(); Package pkg = _pkg; while ( pkg != null ) { final Package parent = projectService.resolveParentPackage( pkg ); if ( parent != null ) { result.add( toFolderItem( parent ) ); } pkg = parent; } return Lists.reverse( result ); } public FolderListing getFolderListing( FolderItem selectedItem, Project selectedProject, Package selectedPackage, Set<Option> options ) { return folderListingResolver.resolve( selectedItem, selectedProject, selectedPackage, this, options ); } public FolderListing getFolderListing( final Package pkg ) { return new FolderListing( toFolderItem( pkg ), getItems( pkg ), getPackageSegments( pkg ) ); } public FolderListing getFolderListing( final FolderItem item ) { FolderListing result = null; if ( item.getItem() instanceof Path ) { result = getFolderListing( (Path) item.getItem() ); } else if ( item.getItem() instanceof Package ) { result = getFolderListing( (Package) item.getItem() ); } return result; } public FolderListing getFolderListing( final Path path ) { //Get list of files and folders contained in the path final List<FolderItem> folderItems = new ArrayList<FolderItem>(); //Scan upwards until the path exists (as the current path could have been deleted) org.uberfire.java.nio.file.Path nioPath = Paths.convert( path ); while ( !Files.exists( nioPath ) ) { nioPath = nioPath.getParent(); } final Path basePath = Paths.convert( nioPath ); final DirectoryStream<org.uberfire.java.nio.file.Path> nioPaths = ioService.newDirectoryStream( nioPath, dotFileFilter ); for ( org.uberfire.java.nio.file.Path np : nioPaths ) { if ( Files.isRegularFile( np ) ) { final org.uberfire.backend.vfs.Path p = Paths.convert( np ); final String lockedBy = Paths.readLockedBy( p ); final FolderItem folderItem = new FolderItem( p, p.getFileName(), FolderItemType.FILE, false, lockedBy, metadataService.getMetadata( p ).getTags() ); folderItems.add( folderItem ); } else if ( Files.isDirectory( np ) ) { final org.uberfire.backend.vfs.Path p = Paths.convert( np ); boolean lockedItems = !lockService.retrieveLockInfos( Paths.convert( np ), true ).isEmpty(); final FolderItem folderItem = new FolderItem( p, p.getFileName(), FolderItemType.FOLDER, lockedItems, null, new ArrayList<String>( )); folderItems.add( folderItem ); } } Collections.sort( folderItems, Sorters.ITEM_SORTER ); return new FolderListing( toFolderItem( nioPath ), folderItems, getPathSegments( basePath ) ); } public List<FolderItem> getItems( final Package pkg ) { final List<FolderItem> folderItems = new ArrayList<FolderItem>(); if ( pkg == null ) { return emptyList(); } final Set<Package> childPackages = projectService.resolvePackages( pkg ); for ( final Package childPackage : childPackages ) { folderItems.add( toFolderItem( childPackage ) ); } folderItems.addAll( getItems( pkg.getPackageMainSrcPath() ) ); folderItems.addAll( getItems( pkg.getPackageTestSrcPath() ) ); folderItems.addAll( getItems( pkg.getPackageMainResourcesPath() ) ); folderItems.addAll( getItems( pkg.getPackageTestResourcesPath() ) ); Collections.sort( folderItems, Sorters.ITEM_SORTER ); return folderItems; } private List<FolderItem> getPathSegments( final Path path ) { org.uberfire.java.nio.file.Path nioSegmentPath = Paths.convert( path ).getParent(); //We're not interested in the terminal segment prior to root (i.e. the Project name) final int segmentCount = nioSegmentPath.getNameCount(); if ( segmentCount < 1 ) { return new ArrayList<FolderItem>(); } //Order from root to leaf (as we use getParent from the leaf we add them in reverse order) final FolderItem[] segments = new FolderItem[ segmentCount ]; for ( int idx = segmentCount; idx > 0; idx-- ) { segments[ idx - 1 ] = toFolderItem( nioSegmentPath ); nioSegmentPath = nioSegmentPath.getParent(); } return Arrays.asList( segments ); } private List<FolderItem> getItems( final Path packagePath ) { final List<FolderItem> folderItems = new ArrayList<FolderItem>(); final org.uberfire.java.nio.file.Path nioPackagePath = Paths.convert( packagePath ); if ( Files.exists( nioPackagePath ) ) { final DirectoryStream<org.uberfire.java.nio.file.Path> nioPaths = ioService.newDirectoryStream( nioPackagePath, regularFileFilter ); for ( org.uberfire.java.nio.file.Path nioPath : nioPaths ) { final org.uberfire.backend.vfs.Path path = Paths.convert( nioPath ); if ( Paths.isLock( path ) ) continue; final String lockedBy = Paths.readLockedBy( path ); final FolderItem folderItem = new FolderItem( path, path.getFileName(), FolderItemType.FILE, false, lockedBy, metadataService.getMetadata( path ).getTags() ); folderItems.add( folderItem ); } } return folderItems; } public void store( final OrganizationalUnit selectedOrganizationalUnit, final Repository selectedRepository, final Project selectedProject, final FolderListing folderListing, final Package selectedPackage, final Set<Option> options ) { final org.uberfire.java.nio.file.Path userNavPath = userServices.buildPath( "explorer", "user.nav" ); final org.uberfire.java.nio.file.Path lastUserNavPath = userServices.buildPath( "explorer", "last.user.nav" ); final OrganizationalUnit _selectedOrganizationalUnit = selectedOrganizationalUnit; final Repository _selectedRepository = selectedRepository; final Project _selectedProject = selectedProject; final FolderItem _selectedItem = folderListing.getItem(); final org.guvnor.common.services.project.model.Package _selectedPackage; if ( selectedPackage != null ) { _selectedPackage = selectedPackage; } else if ( folderListing.getItem().getItem() instanceof Package ) { _selectedPackage = (Package) folderListing.getItem().getItem(); } else { _selectedPackage = null; } SimpleAsyncExecutorService.getDefaultInstance().execute( new DescriptiveRunnable() { @Override public String getDescription() { return "Serialize Navigation State"; } @Override public void run() { try { store( userNavPath, lastUserNavPath, _selectedOrganizationalUnit, _selectedRepository, _selectedProject, _selectedPackage, _selectedItem, options ); } catch ( final Exception e ) { LOGGER.error( "Can't serialize user's state navigation", e ); } } } ); } public void store( final org.uberfire.java.nio.file.Path userNav, final org.uberfire.java.nio.file.Path lastUserNav, final OrganizationalUnit organizationalUnit, final Repository repository, final Project project, final Package pkg, final FolderItem item, final Set<Option> options ) { final UserExplorerData content; final UserExplorerData _content = loadUserContent( userNav ); if ( _content == null ) { content = new UserExplorerData(); } else { content = _content; } final UserExplorerLastData lastContent = new UserExplorerLastData(); if ( organizationalUnit != null ) { content.setOrganizationalUnit( organizationalUnit ); } if ( repository != null && organizationalUnit != null ) { content.addRepository( organizationalUnit, repository ); } if ( project != null && organizationalUnit != null && repository != null ) { content.addProject( organizationalUnit, repository, project ); } if ( item != null && organizationalUnit != null && repository != null && project != null ) { lastContent.setFolderItem( organizationalUnit, repository, project, item ); content.addFolderItem( organizationalUnit, repository, project, item ); } if ( pkg != null && organizationalUnit != null && repository != null && project != null ) { lastContent.setPackage( organizationalUnit, repository, project, pkg ); content.addPackage( organizationalUnit, repository, project, pkg ); } if ( options != null && !options.isEmpty() ) { lastContent.setOptions( options ); } if ( !content.isEmpty() ) { try { ioServiceConfig.startBatch( userNav.getFileSystem() ); ioServiceConfig.write( userNav, xs.toXML( content ) ); ioServiceConfig.write( lastUserNav, xs.toXML( lastContent ) ); } finally { ioServiceConfig.endBatch(); } } } public UserExplorerData loadUserContent( final org.uberfire.java.nio.file.Path path ) { try { if ( ioServiceConfig.exists( path ) ) { final String xml = ioServiceConfig.readAllString( path ); return (UserExplorerData) xs.fromXML( xml ); } } catch ( final Exception ignored ) { } return null; } public UserExplorerData loadUserContent() { final UserExplorerData userExplorerData = loadUserContent( userServices.buildPath( "explorer", "user.nav" ) ); if ( userExplorerData != null ) { return userExplorerData; } return new UserExplorerData(); } public UserExplorerLastData getLastContent() { try { final UserExplorerLastData lastData = getLastContent( userServices.buildPath( "explorer", "last.user.nav" ) ); if ( lastData != null ) { return lastData; } } catch ( final Exception ignored ) { } return new UserExplorerLastData(); } public UserExplorerLastData getLastContent( final org.uberfire.java.nio.file.Path path ) { try { if ( ioServiceConfig.exists( path ) ) { final String xml = ioServiceConfig.readAllString( path ); return (UserExplorerLastData) xs.fromXML( xml ); } } catch ( final Exception ignored ) { } return null; } }
package com.mrmq.poker.db.entity; // Generated Oct 7, 2015 11:00:33 PM by Hibernate Tools 4.0.0 import java.math.BigDecimal; import java.util.Date; /** * PkGameHistory generated by hbm2java */ public class PkGameHistory implements java.io.Serializable { public enum PkGameHistoryStatus { INACTIVE(0), FINISHED(1), LOCKED(2), PLAYING(9); private int value = 0; PkGameHistoryStatus(int value) { this.value = value; } public int getNumber() { return value; } } private static final long serialVersionUID = 2546762188603117887L; private Integer gameHistoryId; private String gameId; private int creater; private String players; private int joinPlayer; private int maxPlayer; private BigDecimal minBet; private BigDecimal maxBet; private BigDecimal totalBet; private String currency; private String comment; private int status; private Date startTime; private Date endTime; private Date updateDate; public PkGameHistory() { } public PkGameHistory(String gameId, int joinPlayer, int maxPlayer, BigDecimal minBet, BigDecimal maxBet, BigDecimal totalBet, String currency, int status, Date startTime, Date endTime, Date updateDate) { this.gameId = gameId; this.joinPlayer = joinPlayer; this.maxPlayer = maxPlayer; this.minBet = minBet; this.maxBet = maxBet; this.totalBet = totalBet; this.currency = currency; this.status = status; this.startTime = startTime; this.endTime = endTime; this.updateDate = updateDate; } public PkGameHistory(String gameId, int joinPlayer, int maxPlayer, BigDecimal minBet, BigDecimal maxBet, BigDecimal totalBet, String currency, String comment, int status, Date startTime, Date endTime, Date updateDate) { this.gameId = gameId; this.joinPlayer = joinPlayer; this.maxPlayer = maxPlayer; this.minBet = minBet; this.maxBet = maxBet; this.totalBet = totalBet; this.currency = currency; this.comment = comment; this.status = status; this.startTime = startTime; this.endTime = endTime; this.updateDate = updateDate; } public Integer getGameHistoryId() { return this.gameHistoryId; } public void setGameHistoryId(Integer gameHistoryId) { this.gameHistoryId = gameHistoryId; } public String getGameId() { return this.gameId; } public void setGameId(String gameId) { this.gameId = gameId; } public int getJoinPlayer() { return this.joinPlayer; } public void setJoinPlayer(int joinPlayer) { this.joinPlayer = joinPlayer; } public int getMaxPlayer() { return this.maxPlayer; } public void setMaxPlayer(int maxPlayer) { this.maxPlayer = maxPlayer; } public BigDecimal getMinBet() { return this.minBet; } public void setMinBet(BigDecimal minBet) { this.minBet = minBet; } public BigDecimal getMaxBet() { return this.maxBet; } public void setMaxBet(BigDecimal maxBet) { this.maxBet = maxBet; } public BigDecimal getTotalBet() { return this.totalBet; } public void setTotalBet(BigDecimal totalBet) { this.totalBet = totalBet; } public String getCurrency() { return this.currency; } public void setCurrency(String currency) { this.currency = currency; } public String getComment() { return this.comment; } public void setComment(String comment) { this.comment = comment; } public int getStatus() { return this.status; } public void setStatus(int status) { this.status = status; } public Date getStartTime() { return this.startTime; } public void setStartTime(Date startTime) { this.startTime = startTime; } public Date getEndTime() { return this.endTime; } public void setEndTime(Date endTime) { this.endTime = endTime; } public Date getUpdateDate() { return this.updateDate; } public void setUpdateDate(Date updateDate) { this.updateDate = updateDate; } public int getCreater() { return creater; } public void setCreater(int creater) { this.creater = creater; } public String getPlayers() { return players; } public void setPlayers(String players) { this.players = players; } @Override public String toString() { return "PkGameHistory [gameHistoryId=" + gameHistoryId + ", gameId=" + gameId + ", creater=" + creater + ", players=" + players + ", joinPlayer=" + joinPlayer + ", maxPlayer=" + maxPlayer + ", minBet=" + minBet + ", maxBet=" + maxBet + ", totalBet=" + totalBet + ", currency=" + currency + ", comment=" + comment + ", status=" + status + ", startTime=" + startTime + ", endTime=" + endTime + ", updateDate=" + updateDate + "]"; } }
/* * Copyright (C) 2014, United States Government, as represented by the * Administrator of the National Aeronautics and Space Administration. * All rights reserved. * * The Java Pathfinder core (jpf-core) platform is licensed under the * Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.nasa.jpf.listener; import gov.nasa.jpf.JPF; import gov.nasa.jpf.ListenerAdapter; import gov.nasa.jpf.search.DFSearch; import gov.nasa.jpf.search.Search; import gov.nasa.jpf.search.heuristic.BFSHeuristic; import gov.nasa.jpf.vm.ElementInfo; import gov.nasa.jpf.vm.VM; import gov.nasa.jpf.vm.ThreadInfo; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * A listener that tracks information about the stack depth of when a lock is first acquired. If * * Writing a test for this class is very difficult. Hence, a lot of asserts are added. */ public class LockedStackDepth extends ListenerAdapter { private static final Logger s_logger = JPF.getLogger(LockedStackDepth.class.getName()); private static final Integer EMPTY[] = new Integer[0]; private static final int THREAD_FLAG = 0x80000000; private final HashMap<Integer, Operation> m_operations = new HashMap<Integer, Operation>(); private final HashMap<Integer, Integer> m_state = new HashMap<Integer, Integer>(); private final HashMap<Operation, Integer> m_index = new HashMap<Operation, Integer>(); private final ArrayList<Operation> m_apply = new ArrayList<Operation>(); private Operation m_current; public int getLockedStackDepth(ElementInfo lock) { Integer result; int lockIndex; lockIndex = lock.getObjectRef(); result = m_state.get(makeKey(lock)); if (s_logger.isLoggable(Level.INFO)) s_logger.info("Depth = " + result + " | Lock Index = " + lockIndex + " | Lock = " + lock); if (result == null) return(-1); assert result >= 0; return(result); } public List<ElementInfo> getLockedInTopFrame(ThreadInfo thread) { ArrayList<ElementInfo> result; ElementInfo lock; int threadDepth; threadDepth = thread.getStackDepth(); result = new ArrayList<ElementInfo>(); for (Integer key : m_state.keySet()) { if (key < 0) continue; if (threadDepth != m_state.get(key)) continue; lock = thread.getElementInfo(key); if (lock == null) continue; if (!lock.isLockedBy(thread)) continue; result.add(lock); } return(result); } @Override public void objectLocked(VM vm, ThreadInfo thread, ElementInfo ei) { ElementInfo lock; Integer depth; lock = ei; logStack(thread); depth = new Operation(thread, null).getOldDepth(); if (depth == null) depth = thread.getStackDepth(); assert thread.getLockCount() == 0; assert thread.getLockObject() == null; assert lock.isLockedBy(thread); if (m_state.containsKey(makeKey(lock))) // So that a breakpoint on the next line will only get hit if the assert will trigger. assert !m_state.containsKey(makeKey(lock)); assert !m_state.containsKey(makeKey(thread)); assert depth >= 0; new Operation(lock, depth); } @Override public void objectUnlocked(VM vm, ThreadInfo thread, ElementInfo ei) { ElementInfo lock; Integer depth; logStack(thread); lock = ei; depth = new Operation(lock, null).getOldDepth(); assert !m_state.containsKey(makeKey(lock)); assert !m_state.containsKey(makeKey(thread)); assert depth >= 0; if (thread.isWaiting()) { assert !lock.isLockedBy(thread); assert lock.getLockCount() == 0; assert thread.getLockCount() > 0; assert thread.getLockObject() == lock; new Operation(thread, depth); } else { assert lock.isLockedBy(thread); assert lock.getLockCount() > 0; assert thread.getLockCount() == 0; assert thread.getLockObject() == null; } } @Override public void searchStarted(Search search) { m_operations.clear(); m_state.clear(); m_current = null; } @Override public void stateAdvanced(Search search) { Integer id; id = search.getStateId(); if (!m_operations.containsKey(id)) // Don't overwrite the original chain of Operations to get to the same state. The original chain is more likely to be shorter. m_operations.put(id, m_current); if (s_logger.isLoggable(Level.FINE)) s_logger.fine("State Advanced: " + id); logState(); } @Override public void stateProcessed(Search search) { Integer id; if (!(search instanceof DFSearch)) // Can't remove from m_operations since Search could go back to the state. if (!(search instanceof BFSHeuristic)) return; id = search.getStateId(); m_operations.remove(id); // DFSearch won't ever revisit this state. It is safe to remove and allow for cleanup. if (s_logger.isLoggable(Level.FINE)) s_logger.fine("State Processed: " + id); } @Override public void stateBacktracked(Search search) { switchTo(search); } @Override public void stateRestored(Search search) { switchTo(search); } private void switchTo(Search search) { Operation next; Integer id; id = search.getStateId(); next = m_operations.get(id); if (s_logger.isLoggable(Level.FINE)) s_logger.fine("State Switching: " + id); assert (id <= 0) || (m_operations.containsKey(id)); switchTo(next); m_current = next; logState(); if (s_logger.isLoggable(Level.FINE)) s_logger.fine("State Switched: " + id); } private void switchTo(Operation next) { Operation operation; Integer index; int i; for (operation = next; operation != null; operation = operation.getParent()) // Go through all of the operations leading back to the root. { m_index.put(operation, m_apply.size()); // Keep track of the index into m_apply where operation is found m_apply.add(operation); } index = null; for (operation = m_current; operation != null; operation = operation.getParent()) // Go through all of the operations leading back to the root. { index = m_index.get(operation); if (index != null) // If a common ancestor is found, stop going back. break; operation.revert(); // Revert the operation since it isn't common to both states. } if (index == null) index = m_apply.size(); // No common ancestor found. Must apply all of the operations. for (i = index; --i >= 0; ) // Apply all of the operations required to get back to the "next" state. m_apply.get(i).apply(); m_index.clear(); m_apply.clear(); } private void logState() { StringBuilder message; String type; Integer key, keys[], depth; int i; if (!s_logger.isLoggable(Level.FINER)) return; message = new StringBuilder(); keys = m_state.keySet().toArray(EMPTY); Arrays.sort(keys); message.append("State | Size = "); message.append(keys.length); for (i = 0; i < keys.length; i++) { key = keys[i]; depth = m_state.get(key); if ((key & THREAD_FLAG) != 0) type = "Thread"; else type = "Lock"; message.append('\n'); message.append("Depth = "); message.append(depth); message.append(" | Key = "); message.append(key & ~THREAD_FLAG); message.append(" | "); message.append(type); } s_logger.finer(message.toString()); } private void logStack(ThreadInfo thread) { if (!s_logger.isLoggable(Level.FINEST)) return; s_logger.finest(thread.getStackTrace()); } private static int makeKey(ElementInfo lock) { return(lock.getObjectRef()); } private static int makeKey(ThreadInfo thread) { return(thread.getThreadObjectRef() ^ THREAD_FLAG); } private class Operation { private final Operation m_parent; private final Integer m_key; private final Integer m_oldDepth; private final Integer m_newDepth; public Operation(ElementInfo lock, Integer newDepth) { this(makeKey(lock), newDepth); } public Operation(ThreadInfo thread, Integer newDepth) { this(makeKey(thread), newDepth); } private Operation(Integer key, Integer newDepth) { m_parent = m_current; m_current = this; m_key = key; m_newDepth = newDepth; m_oldDepth = m_state.get(key); apply(); } public Operation getParent() { return(m_parent); } public Integer getOldDepth() { return(m_oldDepth); } public Integer getNewDepth() { return(m_newDepth); } public void apply() { change(m_newDepth); log("Apply "); } public void revert() { change(m_oldDepth); log("Revert"); } private void change(Integer depth) { Integer previous; if (depth == null) m_state.remove(m_key); else { previous = m_state.put(m_key, depth); assert previous == null; } } private void log(String header) { String message, subheader, depthStr, type; Integer depth; if (!s_logger.isLoggable(Level.FINE)) return; if (m_newDepth != null) { subheader = "Add "; depth = m_newDepth; } else { subheader = "Remove"; depth = m_oldDepth; } depthStr = String.valueOf(depth); switch (depthStr.length()) { case 1: depthStr = " " + depthStr; break; case 2: depthStr = " " + depthStr; break; case 3: depthStr = " " + depthStr; break; default: break; } if ((m_key & THREAD_FLAG) != 0) type = "Thread"; else type = "Lock"; message = header + " " + subheader + " | Depth = " + depthStr + " | Key = " + (m_key & ~THREAD_FLAG) + " | " + type; s_logger.fine(message); } } }
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j.internal.json; import twitter4j.GeoLocation; import twitter4j.Place; import twitter4j.ResponseList; import twitter4j.TwitterException; import twitter4j.conf.Configuration; import twitter4j.internal.http.HttpResponse; import twitter4j.internal.org.json.JSONArray; import twitter4j.internal.org.json.JSONException; import twitter4j.internal.org.json.JSONObject; import java.util.Arrays; import static twitter4j.internal.json.z_T4JInternalParseUtil.getRawString; import static twitter4j.internal.json.z_T4JInternalParseUtil.getUnescapedString; /** * @author Yusuke Yamamoto - yusuke at mac.com * @since Twitter4J 2.1.1 */ final class PlaceJSONImpl extends TwitterResponseImpl implements Place, java.io.Serializable { private String name; private String streetAddress; private String countryCode; private String id; private String country; private String placeType; private String url; private String fullName; private String boundingBoxType; private GeoLocation[][] boundingBoxCoordinates; private String geometryType; private GeoLocation[][] geometryCoordinates; private Place[] containedWithIn; private static final long serialVersionUID = -2873364341474633812L; /*package*/ PlaceJSONImpl(HttpResponse res, Configuration conf) throws TwitterException { super(res); JSONObject json = res.asJSONObject(); init(json); if (conf.isJSONStoreEnabled()) { DataObjectFactoryUtil.clearThreadLocalMap(); DataObjectFactoryUtil.registerJSONObject(this, json); } } PlaceJSONImpl(JSONObject json, HttpResponse res) throws TwitterException { super(res); init(json); } PlaceJSONImpl(JSONObject json) throws TwitterException { super(); init(json); } /* For serialization purposes only. */ PlaceJSONImpl() { } private void init(JSONObject json) throws TwitterException { try { name = getUnescapedString("name", json); streetAddress = getUnescapedString("street_address", json); countryCode = getRawString("country_code", json); id = getRawString("id", json); country = getRawString("country", json); if (!json.isNull("place_type")) { placeType = getRawString("place_type", json); } else { placeType = getRawString("type", json); } url = getRawString("url", json); fullName = getRawString("full_name", json); if (!json.isNull("bounding_box")) { JSONObject boundingBoxJSON = json.getJSONObject("bounding_box"); boundingBoxType = getRawString("type", boundingBoxJSON); JSONArray array = boundingBoxJSON.getJSONArray("coordinates"); boundingBoxCoordinates = z_T4JInternalJSONImplFactory.coordinatesAsGeoLocationArray(array); } else { boundingBoxType = null; boundingBoxCoordinates = null; } if (!json.isNull("geometry")) { JSONObject geometryJSON = json.getJSONObject("geometry"); geometryType = getRawString("type", geometryJSON); JSONArray array = geometryJSON.getJSONArray("coordinates"); if (geometryType.equals("Point")) { geometryCoordinates = new GeoLocation[1][1]; geometryCoordinates[0][0] = new GeoLocation(array.getDouble(0), array.getDouble(1)); } else if (geometryType.equals("Polygon")) { geometryCoordinates = z_T4JInternalJSONImplFactory.coordinatesAsGeoLocationArray(array); } else { // MultiPolygon currently unsupported. geometryType = null; geometryCoordinates = null; } } else { geometryType = null; geometryCoordinates = null; } if (!json.isNull("contained_within")) { JSONArray containedWithInJSON = json.getJSONArray("contained_within"); containedWithIn = new Place[containedWithInJSON.length()]; for (int i = 0; i < containedWithInJSON.length(); i++) { containedWithIn[i] = new PlaceJSONImpl(containedWithInJSON.getJSONObject(i)); } } else { containedWithIn = null; } } catch (JSONException jsone) { throw new TwitterException(jsone.getMessage() + ":" + json.toString(), jsone); } } @Override public int compareTo(Place that) { return this.id.compareTo(that.getId()); } /*package*/ static ResponseList<Place> createPlaceList(HttpResponse res, Configuration conf) throws TwitterException { JSONObject json = null; try { json = res.asJSONObject(); return createPlaceList(json.getJSONObject("result").getJSONArray("places"), res, conf); } catch (JSONException jsone) { throw new TwitterException(jsone.getMessage() + ":" + json.toString(), jsone); } } /*package*/ static ResponseList<Place> createPlaceList(JSONArray list, HttpResponse res , Configuration conf) throws TwitterException { if (conf.isJSONStoreEnabled()) { DataObjectFactoryUtil.clearThreadLocalMap(); } try { int size = list.length(); ResponseList<Place> places = new ResponseListImpl<Place>(size, res); for (int i = 0; i < size; i++) { JSONObject json = list.getJSONObject(i); Place place = new PlaceJSONImpl(json); places.add(place); if (conf.isJSONStoreEnabled()) { DataObjectFactoryUtil.registerJSONObject(place, json); } } if (conf.isJSONStoreEnabled()) { DataObjectFactoryUtil.registerJSONObject(places, list); } return places; } catch (JSONException jsone) { throw new TwitterException(jsone); } catch (TwitterException te) { throw te; } } @Override public String getName() { return name; } @Override public String getStreetAddress() { return streetAddress; } @Override public String getCountryCode() { return countryCode; } @Override public String getId() { return id; } @Override public String getCountry() { return country; } @Override public String getPlaceType() { return placeType; } @Override public String getURL() { return url; } @Override public String getFullName() { return fullName; } @Override public String getBoundingBoxType() { return boundingBoxType; } @Override public GeoLocation[][] getBoundingBoxCoordinates() { return boundingBoxCoordinates; } @Override public String getGeometryType() { return geometryType; } @Override public GeoLocation[][] getGeometryCoordinates() { return geometryCoordinates; } @Override public Place[] getContainedWithIn() { return containedWithIn; } @Override public boolean equals(Object obj) { if (null == obj) { return false; } if (this == obj) { return true; } return obj instanceof Place && ((Place) obj).getId().equals(this.id); } @Override public int hashCode() { return id.hashCode(); } @Override public String toString() { return "PlaceJSONImpl{" + "name='" + name + '\'' + ", streetAddress='" + streetAddress + '\'' + ", countryCode='" + countryCode + '\'' + ", id='" + id + '\'' + ", country='" + country + '\'' + ", placeType='" + placeType + '\'' + ", url='" + url + '\'' + ", fullName='" + fullName + '\'' + ", boundingBoxType='" + boundingBoxType + '\'' + ", boundingBoxCoordinates=" + (boundingBoxCoordinates == null ? null : Arrays.asList(boundingBoxCoordinates)) + ", geometryType='" + geometryType + '\'' + ", geometryCoordinates=" + (geometryCoordinates == null ? null : Arrays.asList(geometryCoordinates)) + ", containedWithIn=" + (containedWithIn == null ? null : Arrays.asList(containedWithIn)) + '}'; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import java.io.FileInputStream; import javax.jms.ConnectionFactory; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.converter.jaxp.StringSource; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; /** * For unit testing with XML streams that can be troublesome with the StreamCache * * @version $Revision$ */ public class JmsXMLRouteTest extends CamelTestSupport { private static final String TEST_LONDON = "src/test/data/message1.xml"; private static final String TEST_TAMPA = "src/test/data/message2.xml"; @Test public void testLondonWithFileStreamAsObject() throws Exception { MockEndpoint mock = getMockEndpoint("mock:london"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("James"); Source source = new StreamSource(new FileInputStream(TEST_LONDON)); assertNotNull(source); template.sendBody("direct:object", source); assertMockEndpointsSatisfied(); } @Test public void testLondonWithFileStreamAsBytes() throws Exception { MockEndpoint mock = getMockEndpoint("mock:london"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("James"); Source source = new StreamSource(new FileInputStream(TEST_LONDON)); assertNotNull(source); template.sendBody("direct:bytes", source); assertMockEndpointsSatisfied(); } @Test public void testLondonWithFileStreamAsDefault() throws Exception { MockEndpoint mock = getMockEndpoint("mock:london"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("James"); Source source = new StreamSource(new FileInputStream(TEST_LONDON)); assertNotNull(source); template.sendBody("direct:default", source); assertMockEndpointsSatisfied(); } @Test public void testTampaWithFileStreamAsObject() throws Exception { MockEndpoint mock = getMockEndpoint("mock:tampa"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("Hiram"); Source source = new StreamSource(new FileInputStream(TEST_TAMPA)); assertNotNull(source); template.sendBody("direct:object", source); assertMockEndpointsSatisfied(); } @Test public void testTampaWithFileStreamAsBytes() throws Exception { MockEndpoint mock = getMockEndpoint("mock:tampa"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("Hiram"); Source source = new StreamSource(new FileInputStream(TEST_TAMPA)); assertNotNull(source); template.sendBody("direct:bytes", source); assertMockEndpointsSatisfied(); } @Test public void testTampaWithFileStreamAsDefault() throws Exception { MockEndpoint mock = getMockEndpoint("mock:tampa"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("Hiram"); Source source = new StreamSource(new FileInputStream(TEST_TAMPA)); assertNotNull(source); template.sendBody("direct:default", source); assertMockEndpointsSatisfied(); } @Test public void testLondonWithStringSourceAsObject() throws Exception { MockEndpoint mock = getMockEndpoint("mock:london"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("James"); Source source = new StringSource("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<person user=\"james\">\n" + " <firstName>James</firstName>\n" + " <lastName>Strachan</lastName>\n" + " <city>London</city>\n" + "</person>"); assertNotNull(source); template.sendBody("direct:object", source); assertMockEndpointsSatisfied(); } @Test public void testLondonWithStringSourceAsBytes() throws Exception { MockEndpoint mock = getMockEndpoint("mock:london"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("James"); Source source = new StringSource("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<person user=\"james\">\n" + " <firstName>James</firstName>\n" + " <lastName>Strachan</lastName>\n" + " <city>London</city>\n" + "</person>"); assertNotNull(source); template.sendBody("direct:bytes", source); assertMockEndpointsSatisfied(); } @Test public void testLondonWithStringSourceAsDefault() throws Exception { MockEndpoint mock = getMockEndpoint("mock:london"); mock.expectedMessageCount(1); mock.message(0).body(String.class).contains("James"); Source source = new StringSource("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<person user=\"james\">\n" + " <firstName>James</firstName>\n" + " <lastName>Strachan</lastName>\n" + " <city>London</city>\n" + "</person>"); assertNotNull(source); template.sendBody("direct:default", source); assertMockEndpointsSatisfied(); } protected CamelContext createCamelContext() throws Exception { CamelContext camelContext = super.createCamelContext(); ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory)); return camelContext; } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { // enable stream caching context.setStreamCaching(true); errorHandler(deadLetterChannel("mock:error").redeliveryDelay(0)); // no need to convert to String as JMS producer can handle XML streams now from("direct:object").to("activemq:queue:object?jmsMessageType=Object"); // no need to convert to String as JMS producer can handle XML streams now from("direct:bytes").to("activemq:queue:bytes?jmsMessageType=Bytes"); // no need to convert to String as JMS producer can handle XML streams now from("direct:default").to("activemq:queue:default"); from("activemq:queue:object") .process(new Processor() { public void process(Exchange exchange) throws Exception { Object body = exchange.getIn().getBody(); // should preserve the object as Source assertIsInstanceOf(Source.class, body); } }).to("seda:choice"); from("activemq:queue:bytes") .process(new Processor() { public void process(Exchange exchange) throws Exception { Object body = exchange.getIn().getBody(); // should be a byte array by default assertIsInstanceOf(byte[].class, body); } }).to("seda:choice"); from("activemq:queue:default") .to("seda:choice"); from("seda:choice") .choice() .when().xpath("/person/city = 'London'").to("mock:london") .when().xpath("/person/city = 'Tampa'").to("mock:tampa") .otherwise().to("mock:unknown") .end(); } }; } }
/*(c) Copyright 2012, VersionOne, Inc. All rights reserved. (c)*/ package com.versionone.integration.teamcity; import com.versionone.integration.ciCommon.V1Config; import jetbrains.buildServer.controllers.ActionErrors; import jetbrains.buildServer.controllers.BaseFormXmlController; import jetbrains.buildServer.controllers.FormUtil; import jetbrains.buildServer.controllers.PublicKeyUtil; import jetbrains.buildServer.controllers.RememberState; import jetbrains.buildServer.controllers.XmlResponseUtil; import jetbrains.buildServer.serverSide.ServerPaths; import jetbrains.buildServer.util.StringUtil; import jetbrains.buildServer.web.openapi.CustomTab; import jetbrains.buildServer.web.openapi.PagePlaces; import jetbrains.buildServer.web.openapi.PlaceId; import jetbrains.buildServer.web.openapi.PluginDescriptor; import jetbrains.buildServer.web.openapi.WebControllerManager; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; //import jetbrains.buildServer.controllers.admin.NotificatorSettingsController; public class V1SettingsController extends BaseFormXmlController implements CustomTab { public static final String PAGE_URL = "/plugins/TeamCityNotificator/editSettings.html"; private static final String SETTINGS_BEAN_KEY = "settingsBean"; private static final String FILE_NAME = "editSettings.jsp"; private static final String TAB_TITLE = "VersionOne Notifier"; private static final String TAB_ID = "VersionOneNotifier"; private PluginDescriptor descriptor; private FileConfig myV1NotificatorConfig; private V1Connector connector; private WebControllerManager webControllerManager; protected final PagePlaces myPagePlaces; private PlaceId myPlaceId; public V1SettingsController(V1Connector connector, PagePlaces places, WebControllerManager webControllerManager, PluginDescriptor descriptor, ServerPaths serverPaths) { //super(server); myV1NotificatorConfig = new FileConfig(serverPaths.getConfigDir()); this.descriptor = descriptor; this.connector = connector; this.webControllerManager = webControllerManager; this.myPagePlaces = places; this.myPlaceId = PlaceId.ADMIN_SERVER_CONFIGURATION_TAB; register(); } protected void register() { myPagePlaces.getPlaceById(myPlaceId).addExtension(this); webControllerManager.registerController(PAGE_URL, this); } protected ModelAndView doGet(HttpServletRequest request, HttpServletResponse response) { RememberState bean = createSettingsBean(); ModelAndView view = new ModelAndView(descriptor.getPluginResourcesPath() + FILE_NAME); view.getModel().put(SETTINGS_BEAN_KEY, bean); return view; } private SettingsBean createSettingsBean() { return new SettingsBean(myV1NotificatorConfig); } @Override protected void doPost(HttpServletRequest request, HttpServletResponse response, Element xmlResponse) { if (PublicKeyUtil.isPublicKeyExpired(request)) { PublicKeyUtil.writePublicKeyExpiredError(xmlResponse); return; } SettingsBean bean = getSettingsBean(request); FormUtil.bindFromRequest(request, bean); if (isStoreInSessionRequest(request)) { XmlResponseUtil.writeFormModifiedIfNeeded(xmlResponse, bean); return; } ActionErrors errors = validate(bean); if (!errors.hasNoErrors()) { writeErrors(xmlResponse, errors); return; } String testConnectionResult = testSettings(bean); if (isTestConnectionRequest(request)) { XmlResponseUtil.writeTestResult(xmlResponse, testConnectionResult); } else { if (testConnectionResult == null) { saveSettings(bean); FormUtil.removeFromSession(request.getSession(), bean.getClass()); writeRedirect(xmlResponse, (request.getContextPath() + "admin.html?item=" + getTabId())); } else { errors.addError("invalidConnection", testConnectionResult); writeErrors(xmlResponse, errors); } } } protected final boolean isStoreInSessionRequest(HttpServletRequest request) { return "storeInSession".equals(request.getParameter("submitSettings")); } protected final boolean isTestConnectionRequest(HttpServletRequest request) { return "testConnection".equals(request.getParameter("submitSettings")); } protected SettingsBean getSettingsBean(HttpServletRequest request) { final SettingsBean bean = createSettingsBean(); return FormUtil.getOrCreateForm(request, (Class<SettingsBean>) bean.getClass(), new FormUtil.FormCreator<SettingsBean>() { public SettingsBean createForm(HttpServletRequest request) { return bean; } }); } protected void saveSettings(SettingsBean bean) { copySettings(bean, myV1NotificatorConfig); myV1NotificatorConfig.save(); connector.disconnect(); } private static void copySettings(SettingsBean bean, V1Config target) { target.setUrl(bean.getUrl()); target.setUserName(bean.getUserName()); target.setPassword(bean.getPassword()); target.setReferenceField(bean.getReferenceField()); target.setPattern(Pattern.compile(bean.getPattern())); target.setFullyQualifiedBuildName(getBooleanByString(bean.getFullyQualifiedBuildName().toString())); target.setProxyUsed(getBooleanByString(bean.getProxyUsed().toString())); target.setProxyUri(bean.getProxyUri()); target.setProxyUsername(bean.getProxyUsername()); target.setProxyPassword(bean.getProxyPassword()); } private static Boolean getBooleanByString(String value) { try { return Boolean.parseBoolean(value); } catch (Exception ex) { return false; } } public ActionErrors validate(SettingsBean bean) { ActionErrors errors = new ActionErrors(); if (StringUtil.isEmptyOrSpaces(bean.getUrl())) { errors.addError("emptyUrl", "VersionOne Server URL is required."); } else try { new URL(bean.getUrl()); } catch (MalformedURLException e) { errors.addError("invalidUrl", "Invalid server URL format."); } if (StringUtil.isEmptyOrSpaces(bean.getUserName())) { errors.addError("emptyUserName", "User name is required."); } if (StringUtil.isEmptyOrSpaces(bean.getPassword())) { errors.addError("emptyPassword", "Password is required."); } if (StringUtil.isEmptyOrSpaces(bean.getReferenceField())) { errors.addError("emptyReferenceField", "Reference Field is required."); } if (StringUtil.isEmptyOrSpaces(bean.getPattern())) { errors.addError("emptyPattern", "Pattern Field is required."); } else { try { Pattern.compile(bean.getPattern()); } catch (PatternSyntaxException e) { errors.addError("invalidPattern", "Pattern must be valid regular expression"); } } if (bean.getProxyUsed() && StringUtil.isEmptyOrSpaces(bean.getProxyUri())) { errors.addError("onEmptyProxyUriError", "Proxy URI is required."); } else if (bean.getProxyUsed()) { try { new URL(bean.getProxyUri()); } catch (MalformedURLException e) { errors.addError("onInvalidProxyUriError", "Invalid proxy URI format."); } } return errors; } public String testSettings(SettingsBean bean) { V1Connector testConnector = createConnectorToVersionOne(bean); if (!testConnector.isConnectionValid()) { return "Connection not valid."; } if (!testConnector.isReferenceFieldValid()) { return "Connection is valid.\nReference field NOT valid."; } return null; } protected V1Connector createConnectorToVersionOne(SettingsBean bean) { final FileConfig testConfig = new FileConfig(bean); V1Connector testConnector = new V1Connector(); testConnector.setConnectionSettings(testConfig); return testConnector; } @NotNull public String getTabId() { return TAB_ID; } @NotNull public String getTabTitle() { return TAB_TITLE; } @NotNull public String getIncludeUrl() { return PAGE_URL; } @NotNull public String getPluginName() { return V1ServerListener.PLUGIN_NAME; } @NotNull public List<String> getCssPaths() { return new ArrayList<String>(); } @NotNull public List<String> getJsPaths() { return new ArrayList<String>(); } public boolean isAvailable(@NotNull final HttpServletRequest request) { return true; } public boolean isVisible() { return true; } public void fillModel(@NotNull Map<String, Object> model, @NotNull HttpServletRequest request) { } }
package org.apache.lucene.search.suggest; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import java.util.Comparator; import org.apache.lucene.search.suggest.Sort.ByteSequencesReader; import org.apache.lucene.search.suggest.Sort.ByteSequencesWriter; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; /** * This wrapper buffers incoming elements and makes sure they are sorted based on given comparator. * @lucene.experimental */ public class SortedInputIterator implements InputIterator { private final InputIterator source; private File tempInput; private File tempSorted; private final ByteSequencesReader reader; private final Comparator<BytesRef> comparator; private final boolean hasPayloads; private boolean done = false; private long weight; private final BytesRef scratch = new BytesRef(); private BytesRef payload = new BytesRef(); /** * Creates a new sorted wrapper, using {@link * BytesRef#getUTF8SortedAsUnicodeComparator} for * sorting. */ public SortedInputIterator(InputIterator source) throws IOException { this(source, BytesRef.getUTF8SortedAsUnicodeComparator()); } /** * Creates a new sorted wrapper, sorting by BytesRef * (ascending) then cost (ascending). */ public SortedInputIterator(InputIterator source, Comparator<BytesRef> comparator) throws IOException { this.hasPayloads = source.hasPayloads(); this.source = source; this.comparator = comparator; this.reader = sort(); } @Override public BytesRef next() throws IOException { boolean success = false; if (done) { return null; } try { ByteArrayDataInput input = new ByteArrayDataInput(); if (reader.read(scratch)) { weight = decode(scratch, input); if (hasPayloads) { payload = decodePayload(scratch, input); } success = true; return scratch; } close(); success = done = true; return null; } finally { if (!success) { done = true; close(); } } } @Override public long weight() { return weight; } @Override public BytesRef payload() { if (hasPayloads) { return payload; } return null; } @Override public boolean hasPayloads() { return hasPayloads; } @Override public Comparator<BytesRef> getComparator() { return tieBreakByCostComparator; } /** Sortes by BytesRef (ascending) then cost (ascending). */ private final Comparator<BytesRef> tieBreakByCostComparator = new Comparator<BytesRef>() { private final BytesRef leftScratch = new BytesRef(); private final BytesRef rightScratch = new BytesRef(); private final ByteArrayDataInput input = new ByteArrayDataInput(); @Override public int compare(BytesRef left, BytesRef right) { // Make shallow copy in case decode changes the BytesRef: leftScratch.bytes = left.bytes; leftScratch.offset = left.offset; leftScratch.length = left.length; rightScratch.bytes = right.bytes; rightScratch.offset = right.offset; rightScratch.length = right.length; long leftCost = decode(leftScratch, input); long rightCost = decode(rightScratch, input); if (hasPayloads) { decodePayload(leftScratch, input); decodePayload(rightScratch, input); } int cmp = comparator.compare(leftScratch, rightScratch); if (cmp != 0) { return cmp; } if (leftCost < rightCost) { return -1; } else if (leftCost > rightCost) { return 1; } else { return 0; } } }; private Sort.ByteSequencesReader sort() throws IOException { String prefix = getClass().getSimpleName(); File directory = Sort.defaultTempDir(); tempInput = File.createTempFile(prefix, ".input", directory); tempSorted = File.createTempFile(prefix, ".sorted", directory); final Sort.ByteSequencesWriter writer = new Sort.ByteSequencesWriter(tempInput); boolean success = false; try { BytesRef spare; byte[] buffer = new byte[0]; ByteArrayDataOutput output = new ByteArrayDataOutput(buffer); while ((spare = source.next()) != null) { encode(writer, output, buffer, spare, source.payload(), source.weight()); } writer.close(); new Sort(tieBreakByCostComparator).sort(tempInput, tempSorted); ByteSequencesReader reader = new Sort.ByteSequencesReader(tempSorted); success = true; return reader; } finally { if (success) { IOUtils.close(writer); } else { try { IOUtils.closeWhileHandlingException(writer); } finally { close(); } } } } private void close() throws IOException { IOUtils.close(reader); if (tempInput != null) { tempInput.delete(); } if (tempSorted != null) { tempSorted.delete(); } } /** encodes an entry (bytes+(payload)+weight) to the provided writer */ protected void encode(ByteSequencesWriter writer, ByteArrayDataOutput output, byte[] buffer, BytesRef spare, BytesRef payload, long weight) throws IOException { int requiredLength = spare.length + 8 + ((hasPayloads) ? 2 + payload.length : 0); if (requiredLength >= buffer.length) { buffer = ArrayUtil.grow(buffer, requiredLength); } output.reset(buffer); output.writeBytes(spare.bytes, spare.offset, spare.length); if (hasPayloads) { output.writeBytes(payload.bytes, payload.offset, payload.length); output.writeShort((short) payload.length); } output.writeLong(weight); writer.write(buffer, 0, output.getPosition()); } /** decodes the weight at the current position */ protected long decode(BytesRef scratch, ByteArrayDataInput tmpInput) { tmpInput.reset(scratch.bytes); tmpInput.skipBytes(scratch.length - 8); // suggestion scratch.length -= 8; // long return tmpInput.readLong(); } /** decodes the payload at the current position */ protected BytesRef decodePayload(BytesRef scratch, ByteArrayDataInput tmpInput) { tmpInput.reset(scratch.bytes); tmpInput.skipBytes(scratch.length - 2); // skip to payload size short payloadLength = tmpInput.readShort(); // read payload size tmpInput.setPosition(scratch.length - 2 - payloadLength); // setPosition to start of payload BytesRef payloadScratch = new BytesRef(payloadLength); tmpInput.readBytes(payloadScratch.bytes, 0, payloadLength); // read payload payloadScratch.length = payloadLength; scratch.length -= 2; // payload length info (short) scratch.length -= payloadLength; // payload return payloadScratch; } }
package io.swagger.codegen.cmd; import io.airlift.airline.Command; import io.airlift.airline.Option; import io.swagger.codegen.ClientOptInput; import io.swagger.codegen.CodegenConstants; import io.swagger.codegen.DefaultGenerator; import io.swagger.codegen.config.CodegenConfigurator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.swagger.codegen.config.CodegenConfiguratorUtils.*; import static org.apache.commons.lang3.StringUtils.isNotEmpty; /** * User: lanwen * Date: 24.03.15 * Time: 20:22 */ @Command(name = "generate", description = "Generate code with chosen lang") public class Generate implements Runnable { public static final Logger LOG = LoggerFactory.getLogger(Generate.class); @Option(name = {"-v", "--verbose"}, description = "verbose mode") private Boolean verbose; @Option(name = {"-l", "--lang"}, title = "language", required = true, description = "client language to generate (maybe class name in classpath, required)") private String lang; @Option(name = {"-o", "--output"}, title = "output directory", description = "where to write the generated files (current dir by default)") private String output = ""; @Option(name = {"-i", "--input-spec"}, title = "spec file", required = true, description = "location of the swagger spec, as URL or file (required)") private String spec; @Option(name = {"-t", "--template-dir"}, title = "template directory", description = "folder containing the template files") private String templateDir; @Option(name = {"-a", "--auth"}, title = "authorization", description = "adds authorization headers when fetching the swagger definitions remotely. " + "Pass in a URL-encoded string of name:header with a comma separating multiple values") private String auth; @Option(name = {"-D"}, title = "system properties", description = "sets specified system properties in " + "the format of name=value,name=value") private String systemProperties; @Option(name = {"-c", "--config"}, title = "configuration file", description = "Path to json configuration file. " + "File content should be in a json format {\"optionKey\":\"optionValue\", \"optionKey1\":\"optionValue1\"...} " + "Supported options can be different for each language. Run config-help -l {lang} command for language specific config options.") private String configFile; @Option(name = {"-s", "--skip-overwrite"}, title = "skip overwrite", description = "specifies if the existing files should be " + "overwritten during the generation.") private Boolean skipOverwrite; @Option(name = {"--api-package"}, title = "api package", description = CodegenConstants.API_PACKAGE_DESC) private String apiPackage; @Option(name = {"--model-package"}, title = "model package", description = CodegenConstants.MODEL_PACKAGE_DESC) private String modelPackage; @Option(name = {"--model-name-prefix"}, title = "model name prefix", description = CodegenConstants.MODEL_NAME_PREFIX_DESC) private String modelNamePrefix; @Option(name = {"--model-name-suffix"}, title = "model name suffix", description = CodegenConstants.MODEL_NAME_SUFFIX_DESC) private String modelNameSuffix; @Option(name = {"--instantiation-types"}, title = "instantiation types", description = "sets instantiation type mappings in the format of type=instantiatedType,type=instantiatedType." + "For example (in Java): array=ArrayList,map=HashMap. In other words array types will get instantiated as ArrayList in generated code.") private String instantiationTypes; @Option(name = {"--type-mappings"}, title = "type mappings", description = "sets mappings between swagger spec types and generated code types " + "in the format of swaggerType=generatedType,swaggerType=generatedType. For example: array=List,map=Map,string=String") private String typeMappings; @Option(name = {"--additional-properties"}, title = "additional properties", description = "sets additional properties that can be referenced by the mustache templates in the format of name=value,name=value") private String additionalProperties; @Option(name = {"--language-specific-primitives"}, title = "language specific primitives", description = "specifies additional language specific primitive types in the format of type1,type2,type3,type3. For example: String,boolean,Boolean,Double") private String languageSpecificPrimitives; @Option(name = {"--import-mappings"}, title = "import mappings", description = "specifies mappings between a given class and the import that should be used for that class in the format of type=import,type=import") private String importMappings; @Option(name = {"--invoker-package"}, title = "invoker package", description = CodegenConstants.INVOKER_PACKAGE_DESC) private String invokerPackage; @Option(name = {"--group-id"}, title = "group id", description = CodegenConstants.GROUP_ID_DESC) private String groupId; @Option(name = {"--artifact-id"}, title = "artifact id", description = CodegenConstants.ARTIFACT_ID_DESC) private String artifactId; @Option(name = {"--artifact-version"}, title = "artifact version", description = CodegenConstants.ARTIFACT_VERSION_DESC) private String artifactVersion; @Option(name = {"--library"}, title = "library", description = CodegenConstants.LIBRARY_DESC) private String library; @Option(name = {"--git-user-id"}, title = "git user id", description = CodegenConstants.GIT_USER_ID_DESC) private String gitUserId; @Option(name = {"--git-repo-id"}, title = "git repo id", description = CodegenConstants.GIT_REPO_ID_DESC) private String gitRepoId; @Option(name = {"--release-note"}, title = "release note", description = CodegenConstants.RELEASE_NOTE_DESC) private String releaseNote; @Option(name = {"--http-user-agent"}, title = "http user agent", description = CodegenConstants.HTTP_USER_AGENT_DESC) private String httpUserAgent; @Override public void run() { //attempt to read from config file CodegenConfigurator configurator = CodegenConfigurator.fromFile(configFile); //if a config file wasn't specified or we were unable to read it if(configurator == null) { //createa a fresh configurator configurator = new CodegenConfigurator(); } //now override with any specified parameters if (verbose != null) { configurator.setVerbose(verbose); } if(skipOverwrite != null) { configurator.setSkipOverwrite(skipOverwrite); } if(isNotEmpty(spec)) { configurator.setInputSpec(spec); } if(isNotEmpty(lang)) { configurator.setLang(lang); } if(isNotEmpty(output)) { configurator.setOutputDir(output); } if(isNotEmpty(auth)) { configurator.setAuth(auth); } if(isNotEmpty(templateDir)) { configurator.setTemplateDir(templateDir); } if(isNotEmpty(apiPackage)) { configurator.setApiPackage(apiPackage); } if(isNotEmpty(modelPackage)) { configurator.setModelPackage(modelPackage); } if(isNotEmpty(modelNamePrefix)){ configurator.setModelNamePrefix(modelNamePrefix); } if(isNotEmpty(modelNameSuffix)){ configurator.setModelNameSuffix(modelNameSuffix); } if(isNotEmpty(invokerPackage)) { configurator.setInvokerPackage(invokerPackage); } if(isNotEmpty(groupId)) { configurator.setGroupId(groupId); } if(isNotEmpty(artifactId)) { configurator.setArtifactId(artifactId); } if(isNotEmpty(artifactVersion)) { configurator.setArtifactVersion(artifactVersion); } if(isNotEmpty(library)) { configurator.setLibrary(library); } if (isNotEmpty(gitUserId)) { configurator.setGitUserId(gitUserId); } if (isNotEmpty(gitRepoId)) { configurator.setGitRepoId(gitRepoId); } if (isNotEmpty(releaseNote)) { configurator.setReleaseNote(releaseNote); } if (isNotEmpty(httpUserAgent)) { configurator.setHttpUserAgent(httpUserAgent); } applySystemPropertiesKvp(systemProperties, configurator); applyInstantiationTypesKvp(instantiationTypes, configurator); applyImportMappingsKvp(importMappings, configurator); applyTypeMappingsKvp(typeMappings, configurator); applyAdditionalPropertiesKvp(additionalProperties, configurator); applyLanguageSpecificPrimitivesCsv(languageSpecificPrimitives, configurator); final ClientOptInput clientOptInput = configurator.toClientOptInput(); new DefaultGenerator().opts(clientOptInput).generate(); } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.util.graph; import com.hazelcast.internal.util.BiTuple; import com.hazelcast.internal.util.RandomPicker; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.QuickTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.ToIntFunction; import java.util.stream.IntStream; import static java.util.stream.Collectors.toList; import static org.junit.Assert.assertEquals; import static org.junit.Assume.assumeFalse; @RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class BronKerboschCliqueFinderTest { @Test public void test2DisconnectedVerticesIn4vertexGraph() { test2DisconnectedVertices(4); } @Test public void test2DisconnectedVerticesIn50vertexGraph() { test2DisconnectedVertices(50); } @Test public void test2DisconnectedVerticesIn100vertexGraph() { test2DisconnectedVertices(100); } @Test public void test2DisconnectedVerticesIn250vertexGraph() { test2DisconnectedVertices(250); } private void test2DisconnectedVertices(int vertexCount) { List<String> vertices = IntStream.range(0, vertexCount).mapToObj(i -> "n" + i).collect(toList()); Graph<String> graph = populateFullyConnectedGraph(vertices); graph.disconnect("n0", "n1"); Collection<Set<String>> maxCliques = computeMaxCliques(graph); Set<Set<String>> expectedCliques = new HashSet<>(); Set<String> expectedClique1 = new HashSet<>(vertices.subList(2, vertexCount)); expectedClique1.add("n0"); Set<String> expectedClique2 = new HashSet<>(vertices.subList(2, vertexCount)); expectedClique2.add("n1"); expectedCliques.add(expectedClique1); expectedCliques.add(expectedClique2); assertEquals(expectedCliques, new HashSet<>(maxCliques)); } private Collection<Set<String>> computeMaxCliques(Graph<String> graph) { return new BronKerboschCliqueFinder<>(graph).computeMaxCliques(); } @Test public void testSplitInto4VertexLeftCliqueAnd4VertexRightClique() { testFullSplitInto2Cliques(8, 4); } @Test public void testSplitInto8VertexLeftCliqueAnd5vertexRightClique() { testFullSplitInto2Cliques(8, 5); } @Test public void testSplitInto25VertexLeftCliqueAnd25VertexRightClique() { testFullSplitInto2Cliques(50, 25); } @Test public void testSplitInto15VertexLeftCliqueAnd35VertexRightClique() { testFullSplitInto2Cliques(50, 15); } @Test public void testSplitInto50VertexLeftCliqueAnd50VertexRightClique() { testFullSplitInto2Cliques(100, 50); } @Test public void testSplitInto75VertexLeftCliqueAnd25VertexRightClique() { testFullSplitInto2Cliques(100, 75); } @Test public void testSplitInto125VertexLeftCliqueAnd125VertexRightClique() { testFullSplitInto2Cliques(250, 125); } @Test public void testSplitInto100VertexLeftCliqueAnd150VertexRightClique() { testFullSplitInto2Cliques(250, 100); } private void testFullSplitInto2Cliques(int vertexCount, int leftCliqueSize) { List<String> vertices = IntStream.range(0, vertexCount).mapToObj(i -> "n" + i).collect(toList()); Graph<String> graph = populateFullyConnectedGraph(vertices); List<String> left = vertices.subList(0, leftCliqueSize); List<String> right = vertices.subList(leftCliqueSize, vertices.size()); for (String v1 : left) { for (String v2 : right) { graph.disconnect(v1, v2); } } Collection<Set<String>> maxCliques = computeMaxCliques(graph); Set<Set<String>> expectedCliques = new HashSet<>(); if (left.size() == right.size()) { expectedCliques.add(new HashSet<>(left)); expectedCliques.add(new HashSet<>(right)); } else if (left.size() < right.size()) { expectedCliques.add(new HashSet<>(right)); } else { expectedCliques.add(new HashSet<>(left)); } assertEquals(expectedCliques, new HashSet<>(maxCliques)); } @Test public void test3VerticesDisconnectFrom2VerticesIn10VertexGraph() { testTwoDisconnectedSubgraphs(10, 3, 2); } @Test public void test3VerticesDisconnectFrom3VerticesIn10VertexGraph() { testTwoDisconnectedSubgraphs(10, 3, 3); } @Test public void test10VerticesDisconnectFrom10VerticesIn50VertexGraph() { testTwoDisconnectedSubgraphs(50, 10, 10); } @Test public void test15VerticesDisconnectFrom10VerticesIn50VertexGraph() { testTwoDisconnectedSubgraphs(50, 15, 10); } @Test public void test20VerticesDisconnectFrom20VerticesIn100VertexGraph() { testTwoDisconnectedSubgraphs(100, 20, 20); } @Test public void test30VerticesDisconnectFrom20VerticesIn100VertexGraph() { testTwoDisconnectedSubgraphs(100, 30, 20); } @Test public void test50VerticesDisconnectFrom50VerticesIn250VertexGraph() { testTwoDisconnectedSubgraphs(250, 50, 50); } @Test public void test100VerticesDisconnectFrom50VerticesIn250VertexGraph() { testTwoDisconnectedSubgraphs(250, 100, 50); } private void testTwoDisconnectedSubgraphs(int vertexCount, int firstGroupSize, int secondGroupSize) { List<String> vertices = IntStream.range(0, vertexCount).mapToObj(i -> "n" + i).collect(toList()); Graph<String> graph = populateFullyConnectedGraph(vertices); for (int i = 0; i < firstGroupSize; i++) { for (int j = firstGroupSize; j < firstGroupSize + secondGroupSize; j++) { graph.disconnect("n" + i, "n" + j); } } Collection<Set<String>> maxCliques = computeMaxCliques(graph); Set<Set<String>> expectedCliques = new HashSet<>(); if (firstGroupSize == secondGroupSize) { Set<String> expectedClique1 = new HashSet<>(vertices.subList(0, firstGroupSize)); Set<String> expectedClique2 = new HashSet<>(vertices.subList(firstGroupSize, firstGroupSize + secondGroupSize)); expectedClique1.addAll(vertices.subList(firstGroupSize + secondGroupSize, vertices.size())); expectedClique2.addAll(vertices.subList(firstGroupSize + secondGroupSize, vertices.size())); expectedCliques.add(expectedClique1); expectedCliques.add(expectedClique2); } else if (firstGroupSize < secondGroupSize) { Set<String> expectedClique = new HashSet<>(vertices.subList(firstGroupSize, firstGroupSize + secondGroupSize)); expectedClique.addAll(vertices.subList(firstGroupSize + secondGroupSize, vertices.size())); expectedCliques.add(expectedClique); } else { Set<String> expectedClique = new HashSet<>(vertices.subList(0, firstGroupSize)); expectedClique.addAll(vertices.subList(firstGroupSize + secondGroupSize, vertices.size())); expectedCliques.add(expectedClique); } assertEquals(expectedCliques, new HashSet<>(maxCliques)); } @Test public void test6DisconnectedSubgraphsInLargerGraph() { List<List<String>> groups = new ArrayList<>(); for (int i = 0; i < 10; i++) { int j = i; int vertexCount = i + 10; List<String> vertices = IntStream.range(0, vertexCount).mapToObj(v -> j + "_" + v).collect(toList()); groups.add(vertices); } Graph<String> graph = populateFullyConnectedGraph(groups.stream().flatMap(Collection::stream).collect(toList())); for (BiTuple<Integer, Integer> t : Arrays.asList(BiTuple.of(0, 9), BiTuple.of(1, 8), BiTuple.of(2, 7), BiTuple.of(3, 6), BiTuple.of(4, 5))) { for (String v1 : groups.get(t.element1)) { for (String v2 : groups.get(t.element2)) { graph.disconnect(v1, v2); } } } Collection<Set<String>> maxCliques = new BronKerboschCliqueFinder<>(graph, 30, TimeUnit.SECONDS).computeMaxCliques(); assumeFalse(maxCliques.isEmpty()); Set<String> expectedClique = new HashSet<>(); for (int i = groups.size() / 2; i < groups.size(); i++) { expectedClique.addAll(groups.get(i)); } assertEquals(1, maxCliques.size()); assertEquals(expectedClique, maxCliques.iterator().next()); } @Test public void test6DisconnectedSubgraphsOfWholeGraph() { List<List<String>> groups = new ArrayList<>(); for (int i = 0; i < 20; i++) { int j = i; int vertexCount = i + 10; List<String> vertices = IntStream.range(0, vertexCount).mapToObj(v -> j + "_" + v).collect(toList()); groups.add(vertices); } Graph<String> graph = populateFullyConnectedGraph(groups.stream().flatMap(Collection::stream).collect(toList())); List<Integer> groupIndices = new ArrayList<>(); while (groupIndices.size() < 6) { int rackIndex = RandomPicker.getInt(groups.size()); if (!groupIndices.contains(rackIndex)) { groupIndices.add(rackIndex); } } groupIndices.sort(Comparator.comparingInt((ToIntFunction<Integer>) i -> groups.get(i).size()).reversed()); for (String v1 : groups.get(groupIndices.get(0))) { for (String v2 : groups.get(groupIndices.get(5))) { graph.disconnect(v1, v2); } } for (String v1 : groups.get(groupIndices.get(1))) { for (String v2 : groups.get(groupIndices.get(4))) { graph.disconnect(v1, v2); } } for (String v1 : groups.get(groupIndices.get(2))) { for (String v2 : groups.get(groupIndices.get(3))) { graph.disconnect(v1, v2); } } Collection<Set<String>> maxCliques = new BronKerboschCliqueFinder<>(graph, 30, TimeUnit.SECONDS).computeMaxCliques(); assumeFalse(maxCliques.isEmpty()); Set<String> expectedClique = new HashSet<>(); for (int i = 0; i < groups.size(); i++) { if (i != groupIndices.get(3) && i != groupIndices.get(4) && i != groupIndices.get(5)) { expectedClique.addAll(groups.get(i)); } } assertEquals(1, maxCliques.size()); assertEquals(expectedClique, maxCliques.iterator().next()); } private Graph<String> populateFullyConnectedGraph(List<String> vertices) { Graph<String> graph = new Graph<>(); for (String v1 : vertices) { for (String v2 : vertices) { graph.connect(v1, v2); } } return graph; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ipc; import java.io.IOException; import java.net.ConnectException; import java.net.InetSocketAddress; import java.lang.reflect.Method; import junit.framework.TestCase; import java.util.Arrays; import org.apache.commons.logging.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.UTF8; import org.apache.hadoop.io.Writable; import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.spi.NullContext; import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.Service; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.AccessControlException; import static org.mockito.Mockito.*; /** Unit tests for RPC. */ public class TestRPC extends TestCase { private static final String ADDRESS = "0.0.0.0"; public static final Log LOG = LogFactory.getLog(TestRPC.class); private static Configuration conf = new Configuration(); int datasize = 1024*100; int numThreads = 50; public TestRPC(String name) { super(name); } public interface TestProtocol extends VersionedProtocol { public static final long versionID = 1L; void ping() throws IOException; void slowPing(boolean shouldSlow) throws IOException; String echo(String value) throws IOException; String[] echo(String[] value) throws IOException; Writable echo(Writable value) throws IOException; int add(int v1, int v2) throws IOException; int add(int[] values) throws IOException; int error() throws IOException; void testServerGet() throws IOException; int[] exchange(int[] values) throws IOException; } public static class TestImpl implements TestProtocol { int fastPingCounter = 0; public long getProtocolVersion(String protocol, long clientVersion) { return TestProtocol.versionID; } public void ping() {} public synchronized void slowPing(boolean shouldSlow) { if (shouldSlow) { while (fastPingCounter < 2) { try { wait(); // slow response until two fast pings happened } catch (InterruptedException ignored) {} } fastPingCounter -= 2; } else { fastPingCounter++; notify(); } } public String echo(String value) throws IOException { return value; } public String[] echo(String[] values) throws IOException { return values; } public Writable echo(Writable writable) { return writable; } public int add(int v1, int v2) { return v1 + v2; } public int add(int[] values) { int sum = 0; for (int i = 0; i < values.length; i++) { sum += values[i]; } return sum; } public int error() throws IOException { throw new IOException("bobo"); } public void testServerGet() throws IOException { if (!(Server.get() instanceof RPC.Server)) { throw new IOException("Server.get() failed"); } } public int[] exchange(int[] values) { for (int i = 0; i < values.length; i++) { values[i] = i; } return values; } } // // an object that does a bunch of transactions // static class Transactions implements Runnable { int datasize; TestProtocol proxy; Transactions(TestProtocol proxy, int datasize) { this.proxy = proxy; this.datasize = datasize; } // do two RPC that transfers data. public void run() { int[] indata = new int[datasize]; int[] outdata = null; int val = 0; try { outdata = proxy.exchange(indata); val = proxy.add(1,2); } catch (IOException e) { assertTrue("Exception from RPC exchange() " + e, false); } assertEquals(indata.length, outdata.length); assertEquals(val, 3); for (int i = 0; i < outdata.length; i++) { assertEquals(outdata[i], i); } } } // // A class that does an RPC but does not read its response. // static class SlowRPC implements Runnable { private TestProtocol proxy; private volatile boolean done; SlowRPC(TestProtocol proxy) { this.proxy = proxy; done = false; } boolean isDone() { return done; } public void run() { try { proxy.slowPing(true); // this would hang until two fast pings happened done = true; } catch (IOException e) { assertTrue("SlowRPC ping exception " + e, false); } } } public void testSlowRpc() throws Exception { System.out.println("Testing Slow RPC"); // create a server with two handlers Server server = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, 2, false, conf, null); TestProtocol proxy = null; try { server.start(); InetSocketAddress addr = NetUtils.getConnectAddress(server); // create a client proxy = (TestProtocol)RPC.getProxy( TestProtocol.class, TestProtocol.versionID, addr, conf); SlowRPC slowrpc = new SlowRPC(proxy); Thread thread = new Thread(slowrpc, "SlowRPC"); thread.start(); // send a slow RPC, which won't return until two fast pings assertTrue("Slow RPC should not have finished1.", !slowrpc.isDone()); proxy.slowPing(false); // first fast ping // verify that the first RPC is still stuck assertTrue("Slow RPC should not have finished2.", !slowrpc.isDone()); proxy.slowPing(false); // second fast ping // Now the slow ping should be able to be executed while (!slowrpc.isDone()) { System.out.println("Waiting for slow RPC to get done."); try { Thread.sleep(1000); } catch (InterruptedException e) {} } } finally { server.stop(); if (proxy != null) { RPC.stopProxy(proxy); } System.out.println("Down slow rpc testing"); } } public void testCalls(Configuration conf) throws Exception { Server server = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, conf); TestProtocol proxy = null; try { server.start(); InetSocketAddress addr = NetUtils.getConnectAddress(server); proxy = (TestProtocol)RPC.getProxy( TestProtocol.class, TestProtocol.versionID, addr, conf); proxy.ping(); String stringResult = proxy.echo("foo"); assertEquals(stringResult, "foo"); stringResult = proxy.echo((String)null); assertEquals(stringResult, null); // Check rpcMetrics server.rpcMetrics.doUpdates(new NullContext()); // Number 4 includes getProtocolVersion() assertEquals(4, server.rpcMetrics.rpcProcessingTime.getPreviousIntervalNumOps()); assertTrue(server.rpcMetrics.sentBytes.getPreviousIntervalValue() > 0); assertTrue(server.rpcMetrics.receivedBytes.getPreviousIntervalValue() > 0); // Number of calls to echo method should be 2 server.rpcDetailedMetrics.doUpdates(new NullContext()); MetricsTimeVaryingRate metrics = (MetricsTimeVaryingRate)server.rpcDetailedMetrics.registry.get("echo"); assertEquals(2, metrics.getPreviousIntervalNumOps()); // Number of calls to ping method should be 1 metrics = (MetricsTimeVaryingRate)server.rpcDetailedMetrics.registry.get("ping"); assertEquals(1, metrics.getPreviousIntervalNumOps()); String[] stringResults = proxy.echo(new String[]{"foo","bar"}); assertTrue(Arrays.equals(stringResults, new String[]{"foo","bar"})); stringResults = proxy.echo((String[])null); assertTrue(Arrays.equals(stringResults, null)); UTF8 utf8Result = (UTF8)proxy.echo(new UTF8("hello world")); assertEquals(utf8Result, new UTF8("hello world")); utf8Result = (UTF8)proxy.echo((UTF8)null); assertEquals(utf8Result, null); int intResult = proxy.add(1, 2); assertEquals(intResult, 3); intResult = proxy.add(new int[] {1, 2}); assertEquals(intResult, 3); boolean caught = false; try { proxy.error(); } catch (IOException e) { LOG.debug("Caught " + e); caught = true; } assertTrue(caught); proxy.testServerGet(); // create multiple threads and make them do large data transfers System.out.println("Starting multi-threaded RPC test..."); server.setSocketSendBufSize(1024); Thread threadId[] = new Thread[numThreads]; for (int i = 0; i < numThreads; i++) { Transactions trans = new Transactions(proxy, datasize); threadId[i] = new Thread(trans, "TransactionThread-" + i); threadId[i].start(); } // wait for all transactions to get over System.out.println("Waiting for all threads to finish RPCs..."); for (int i = 0; i < numThreads; i++) { try { threadId[i].join(); } catch (InterruptedException e) { i--; // retry } } // try some multi-calls Method echo = TestProtocol.class.getMethod("echo", new Class[] { String.class }); String[] strings = (String[])RPC.call(echo, new String[][]{{"a"},{"b"}}, new InetSocketAddress[] {addr, addr}, conf); assertTrue(Arrays.equals(strings, new String[]{"a","b"})); Method ping = TestProtocol.class.getMethod("ping", new Class[] {}); Object[] voids = RPC.call(ping, new Object[][]{{},{}}, new InetSocketAddress[] {addr, addr}, conf); assertEquals(voids, null); } finally { server.stop(); if(proxy!=null) RPC.stopProxy(proxy); } } public void testStandaloneClient() throws IOException { try { RPC.waitForProxy(TestProtocol.class, TestProtocol.versionID, new InetSocketAddress(ADDRESS, 20), conf, 15000L); fail("We should not have reached here"); } catch (ConnectException ioe) { //this is what we expected } } private static final String ACL_CONFIG = "test.protocol.acl"; private static class TestPolicyProvider extends PolicyProvider { @Override public Service[] getServices() { return new Service[] { new Service(ACL_CONFIG, TestProtocol.class) }; } } private void doRPCs(Configuration conf, boolean expectFailure) throws Exception { ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider()); Server server = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, 5, true, conf, null); TestProtocol proxy = null; server.start(); InetSocketAddress addr = NetUtils.getConnectAddress(server); try { proxy = (TestProtocol)RPC.getProxy( TestProtocol.class, TestProtocol.versionID, addr, conf); proxy.ping(); if (expectFailure) { fail("Expect RPC.getProxy to fail with AuthorizationException!"); } } catch (RemoteException e) { if (expectFailure) { assertTrue(e.unwrapRemoteException() instanceof AuthorizationException); } else { throw e; } } finally { server.stop(); if (proxy != null) { RPC.stopProxy(proxy); } if (expectFailure) { assertEquals("Wrong number of authorizationFailures ", 1, server.getRpcMetrics().authorizationFailures .getCurrentIntervalValue()); } else { assertEquals("Wrong number of authorizationSuccesses ", 1, server.getRpcMetrics().authorizationSuccesses .getCurrentIntervalValue()); } //since we don't have authentication turned ON, we should see // 0 for the authentication successes and 0 for failure assertEquals("Wrong number of authenticationFailures ", 0, server.getRpcMetrics().authenticationFailures .getCurrentIntervalValue()); assertEquals("Wrong number of authenticationSuccesses ", 0, server.getRpcMetrics().authenticationSuccesses .getCurrentIntervalValue()); } } public void testAuthorization() throws Exception { Configuration conf = new Configuration(); conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); // Expect to succeed conf.set(ACL_CONFIG, "*"); doRPCs(conf, false); // Reset authorization to expect failure conf.set(ACL_CONFIG, "invalid invalid"); doRPCs(conf, true); conf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2); // Expect to succeed conf.set(ACL_CONFIG, "*"); doRPCs(conf, false); // Reset authorization to expect failure conf.set(ACL_CONFIG, "invalid invalid"); doRPCs(conf, true); } /** * Switch off setting socketTimeout values on RPC sockets. * Verify that RPC calls still work ok. */ public void testNoPings() throws Exception { Configuration conf = new Configuration(); conf.setBoolean("ipc.client.ping", false); new TestRPC("testnoPings").testCalls(conf); conf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2); new TestRPC("testnoPings").testCalls(conf); } /** * Test stopping a non-registered proxy * @throws Exception */ public void testStopNonRegisteredProxy() throws Exception { RPC.stopProxy(mock(TestProtocol.class)); } public void testErrorMsgForInsecureClient() throws Exception { final Server server = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, 5, true, conf, null); server.enableSecurity(); server.start(); boolean succeeded = false; final InetSocketAddress addr = NetUtils.getConnectAddress(server); TestProtocol proxy = null; try { proxy = (TestProtocol) RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); } catch (RemoteException e) { LOG.info("LOGGING MESSAGE: " + e.getLocalizedMessage()); assertTrue(e.unwrapRemoteException() instanceof AccessControlException); succeeded = true; } finally { server.stop(); if (proxy != null) { RPC.stopProxy(proxy); } } assertTrue(succeeded); conf.setInt(CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, 2); final Server multiServer = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, 5, true, conf, null); multiServer.enableSecurity(); multiServer.start(); succeeded = false; final InetSocketAddress mulitServerAddr = NetUtils.getConnectAddress(multiServer); proxy = null; try { proxy = (TestProtocol) RPC.getProxy(TestProtocol.class, TestProtocol.versionID, mulitServerAddr, conf); } catch (RemoteException e) { LOG.info("LOGGING MESSAGE: " + e.getLocalizedMessage()); assertTrue(e.unwrapRemoteException() instanceof AccessControlException); succeeded = true; } finally { multiServer.stop(); if (proxy != null) { RPC.stopProxy(proxy); } } assertTrue(succeeded); } public static void main(String[] args) throws Exception { new TestRPC("test").testCalls(conf); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.spring; import org.apache.wicket.spring.test.ApplicationContextMock; import org.apache.wicket.spring.test.SpringContextLocatorMock; import org.apache.wicket.util.lang.WicketObjects; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * Tests {@link SpringBeanLocator} * * @author ivaynberg * */ public class SpringBeanLocatorTest extends Assert { private ApplicationContextMock ctx; private ISpringContextLocator ctxLocator; /** * */ @Before public void before() { ctx = new ApplicationContextMock(); ctxLocator = new SpringContextLocatorMock(ctx); } /** * tests lookup of beans by class only */ @Test public void testLookupByClass() { Bean bean = new Bean(); ctx.putBean("bean", bean); SpringBeanLocator locator = new SpringBeanLocator(Bean.class, ctxLocator); assertTrue(locator.locateProxyTarget() == bean); } /** * tests if lookup by class is still working after deserialization */ @Test public void testLookupByClassAfterDeserialization() { Bean bean = new Bean(); ctx.putBean("bean", bean); SpringBeanLocator locator = (SpringBeanLocator)WicketObjects.cloneObject(new SpringBeanLocator( Bean.class, ctxLocator)); assertNotNull(locator.locateProxyTarget()); } /** * tests error if bean with class is not in the context */ @Test public void testLookupByClassNotFound() { SpringBeanLocator locator = new SpringBeanLocator(Bean.class, ctxLocator); try { locator.locateProxyTarget(); fail(); } catch (IllegalStateException e) { // noop } } /** * tests error when more then one bean of the same class found */ @Test public void testLookupByClassTooManyFound() { Bean bean = new Bean(); ctx.putBean("somebean", bean); ctx.putBean("somebean2", bean); SpringBeanLocator locator = new SpringBeanLocator(Bean.class, ctxLocator); try { locator.locateProxyTarget(); fail(); } catch (IllegalStateException e) { // noop } } /** * tests lookup by name */ @Test public void testLookupByName() { Bean bean = new Bean(); ctx.putBean("bean", bean); SpringBeanLocator locator = new SpringBeanLocator("bean", Bean.class, ctxLocator); assertTrue(locator.locateProxyTarget() == bean); } /** * tests lookup by name after locator has been deserialized */ @Test public void testLookupByNameAfterDeserialization() { Bean bean = new Bean(); ctx.putBean("bean", bean); SpringBeanLocator locator = (SpringBeanLocator)WicketObjects.cloneObject(new SpringBeanLocator( "bean", Bean.class, ctxLocator)); assertNotNull(locator.locateProxyTarget()); } /** * tests error if no bean with name found */ @Test public void testLookupByNameNotFound() { SpringBeanLocator locator = new SpringBeanLocator("bean", Bean.class, ctxLocator); try { locator.locateProxyTarget(); fail(); } catch (IllegalStateException e) { // noop } } /** * tests constructor argument checks */ @Test public void testConstructorArguments() { try { new SpringBeanLocator(null, ctxLocator); fail(); } catch (IllegalArgumentException e) { // noop } try { new SpringBeanLocator(Bean.class, null); fail(); } catch (IllegalArgumentException e) { // noop } } /** * tests error when context not found */ @Test public void testContextNotFound() { SpringContextLocatorMock ctxLocator = new SpringContextLocatorMock(null); SpringBeanLocator locator = new SpringBeanLocator(Bean.class, ctxLocator); try { locator.locateProxyTarget(); } catch (IllegalStateException e) { // noop } } /** * tests equals and hashcode contracts */ @Test public void testEqualsAndHashcode() { SpringBeanLocator a = new SpringBeanLocator("bean", SpringBeanLocator.class, ctxLocator); SpringBeanLocator aprime = new SpringBeanLocator("bean", SpringBeanLocator.class, ctxLocator); SpringBeanLocator b = new SpringBeanLocator("bean2", SpringBeanLocator.class, ctxLocator); SpringBeanLocator c = new SpringBeanLocator("bean", SpringBeanLocatorTest.class, ctxLocator); SpringBeanLocator d = new SpringBeanLocator(SpringBeanLocator.class, ctxLocator); SpringBeanLocator dprime = new SpringBeanLocator(SpringBeanLocator.class, ctxLocator); SpringBeanLocator e = new SpringBeanLocator(SpringBeanLocatorTest.class, ctxLocator); assertEquals(a, aprime); assertEquals(aprime, a); assertEquals(a.hashCode(), aprime.hashCode()); assertFalse(a.equals(b)); assertFalse(a.equals(c)); assertFalse(b.equals(c)); assertEquals(d, dprime); assertEquals(dprime, d); ctx.putBean("locator", a); // we need to register a Bean of type d.getClass() assertEquals(d.hashCode(), dprime.hashCode()); assertFalse(a.equals(d)); assertFalse(d.equals(e)); assertFalse(a.equals(ctxLocator)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.B_VALUE; import static org.apache.phoenix.util.TestUtil.C_VALUE; import static org.apache.phoenix.util.TestUtil.E_VALUE; import static org.apache.phoenix.util.TestUtil.ROW1; import static org.apache.phoenix.util.TestUtil.ROW2; import static org.apache.phoenix.util.TestUtil.ROW3; import static org.apache.phoenix.util.TestUtil.ROW4; import static org.apache.phoenix.util.TestUtil.ROW5; import static org.apache.phoenix.util.TestUtil.ROW6; import static org.apache.phoenix.util.TestUtil.ROW7; import static org.apache.phoenix.util.TestUtil.ROW8; import static org.apache.phoenix.util.TestUtil.ROW9; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Properties; import java.util.Set; import org.apache.phoenix.util.PropertiesUtil; import org.junit.Test; import org.junit.runners.Parameterized.Parameters; public class InQueryIT extends BaseQueryIT { public InQueryIT(String idxDdl, boolean columnEncoded) throws Exception { super(idxDdl, columnEncoded, false); } @Parameters(name="InQueryIT_{index}") // name is used by failsafe as file name in reports public static Collection<Object> data() { return BaseQueryIT.allIndexes(); } @Test public void testInListSkipScan() throws Exception { String query = "SELECT entity_id, b_string FROM " + tableName + " WHERE organization_id=? and entity_id IN (?,?)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); statement.setString(2, ROW2); statement.setString(3, ROW4); ResultSet rs = statement.executeQuery(); Set<String> expectedvals = new HashSet<String>(); expectedvals.add(ROW2+"_"+C_VALUE); expectedvals.add(ROW4+"_"+B_VALUE); Set<String> vals = new HashSet<String>(); assertTrue (rs.next()); vals.add(rs.getString(1) + "_" + rs.getString(2)); assertTrue (rs.next()); vals.add(rs.getString(1) + "_" + rs.getString(2)); assertFalse(rs.next()); assertEquals(expectedvals, vals); } finally { conn.close(); } } @Test public void testDateInList() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE a_date IN (?,?) AND a_integer < 4"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setDate(1, new Date(0)); statement.setDate(2, date); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(ROW1, rs.getString(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSimpleInListStatement() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND a_integer IN (2,4)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); ResultSet rs = statement.executeQuery(); assertValueEqualsResultSet(rs, Arrays.<Object>asList(ROW2, ROW4)); } finally { conn.close(); } } @Test public void testPartiallyQualifiedRVCInList() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE (a_integer,a_string) IN ((2,'a'),(5,'b'))"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertValueEqualsResultSet(rs, Arrays.<Object>asList(ROW2, ROW5)); } finally { conn.close(); } } @Test public void testFullyQualifiedRVCInList() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE (a_integer,a_string, organization_id,entity_id) IN ((2,'a',:1,:2),(5,'b',:1,:3))"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); statement.setString(2, ROW2); statement.setString(3, ROW5); ResultSet rs = statement.executeQuery(); assertValueEqualsResultSet(rs, Arrays.<Object>asList(ROW2, ROW5)); } finally { conn.close(); } } @Test public void testOneInListStatement() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND b_string IN (?)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); statement.setString(2, E_VALUE); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(ROW3, rs.getString(1)); assertTrue(rs.next()); assertEquals(ROW6, rs.getString(1)); assertTrue(rs.next()); assertEquals(ROW9, rs.getString(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testMixedTypeInListStatement() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? AND x_long IN (5, ?)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(url, props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); long l = Integer.MAX_VALUE + 1L; statement.setLong(2, l); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(ROW7, rs.getString(1)); assertTrue(rs.next()); assertEquals(ROW9, rs.getString(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testRowKeySingleIn() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? and entity_id IN (?,?,?)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); statement.setString(2, ROW2); statement.setString(3, ROW6); statement.setString(4, ROW8); ResultSet rs = statement.executeQuery(); assertTrue (rs.next()); assertEquals(rs.getString(1), ROW2); assertTrue (rs.next()); assertEquals(rs.getString(1), ROW6); assertTrue (rs.next()); assertEquals(rs.getString(1), ROW8); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testRowKeyMultiIn() throws Exception { String query = "SELECT entity_id FROM " + tableName + " WHERE organization_id=? and entity_id IN (?,?,?) and a_string IN (?,?)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); statement.setString(2, ROW2); statement.setString(3, ROW6); statement.setString(4, ROW9); statement.setString(5, B_VALUE); statement.setString(6, C_VALUE); ResultSet rs = statement.executeQuery(); assertTrue (rs.next()); assertEquals(rs.getString(1), ROW6); assertTrue (rs.next()); assertEquals(rs.getString(1), ROW9); assertFalse(rs.next()); } finally { conn.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.startup; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Map; import java.util.Properties; import javax.servlet.ServletContext; import org.apache.catalina.Authenticator; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Engine; import org.apache.catalina.Globals; import org.apache.catalina.Host; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleEvent; import org.apache.catalina.LifecycleListener; import org.apache.catalina.Pipeline; import org.apache.catalina.Valve; import org.apache.catalina.Wrapper; import org.apache.catalina.core.ContainerBase; import org.apache.catalina.core.StandardContext; import org.apache.catalina.core.StandardEngine; import org.apache.catalina.core.StandardHost; import org.apache.catalina.deploy.ErrorPage; import org.apache.catalina.deploy.FilterDef; import org.apache.catalina.deploy.FilterMap; import org.apache.catalina.deploy.LoginConfig; import org.apache.catalina.deploy.SecurityConstraint; import org.apache.catalina.util.StringManager; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.descriptor.DigesterFactory; import org.apache.tomcat.util.descriptor.XmlErrorHandler; import org.apache.tomcat.util.digester.Digester; import org.apache.tomcat.util.digester.RuleSet; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.SAXParseException; /** * Startup event listener for a <b>Context</b> that configures the properties * of that Context, and the associated defined servlets. * * @author Craig R. McClanahan * @author Jean-Francois Arcand * */ public class ContextConfig implements LifecycleListener { protected static Log log = LogFactory.getLog( ContextConfig.class ); /** * The string resources for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); // ----------------------------------------------------- Instance Variables /** * Custom mappings of login methods to authenticators */ protected Map customAuthenticators; /** * The set of Authenticators that we know how to configure. The key is * the name of the implemented authentication method, and the value is * the fully qualified Java class name of the corresponding Valve. */ protected static Properties authenticators = null; /** * The Context we are associated with. */ protected Context context = null; /** * The default web application's context file location. */ protected String defaultContextXml = null; /** * The default web application's deployment descriptor location. */ protected String defaultWebXml = null; /** * Track any fatal errors during startup configuration processing. */ protected boolean ok = false; /** * Any parse error which occurred while parsing XML descriptors. * @deprecated Unused. Will be removed in Tomcat 7.0.x. */ @Deprecated protected SAXParseException parseException = null; /** * Original docBase. */ protected String originalDocBase = null; /** * Anti-locking docBase. It is a path to a copy of the web application * in the java.io.tmpdir directory. This path is always an absolute one. */ private File antiLockingDocBase = null; /** * The <code>Digester</code> we will use to process web application * context files. */ protected static Digester contextDigester = null; /** * The <code>Digester</code> we will use to process web application * deployment descriptor files. */ protected Digester webDigester = null; protected WebRuleSet webRuleSet = null; /** * Attribute value used to turn on/off XML validation * @deprecated Unused. Will be removed in Tomcat 7.0.x. */ @Deprecated protected static boolean xmlValidation = false; /** * Attribute value used to turn on/off XML namespace awarenes. * @deprecated Unused. Will be removed in Tomcat 7.0.x. */ @Deprecated protected static boolean xmlNamespaceAware = false; /** * Deployment count. */ protected static long deploymentCount = 0L; protected static final LoginConfig DUMMY_LOGIN_CONFIG = new LoginConfig("NONE", null, null, null); // ------------------------------------------------------------- Properties /** * Return the location of the default deployment descriptor */ public String getDefaultWebXml() { if( defaultWebXml == null ) { defaultWebXml=Constants.DefaultWebXml; } return (this.defaultWebXml); } /** * Set the location of the default deployment descriptor * * @param path Absolute/relative path to the default web.xml */ public void setDefaultWebXml(String path) { this.defaultWebXml = path; } /** * Return the location of the default context file */ public String getDefaultContextXml() { if( defaultContextXml == null ) { defaultContextXml=Constants.DefaultContextXml; } return (this.defaultContextXml); } /** * Set the location of the default context file * * @param path Absolute/relative path to the default context.xml */ public void setDefaultContextXml(String path) { this.defaultContextXml = path; } /** * Sets custom mappings of login methods to authenticators. * * @param customAuthenticators Custom mappings of login methods to * authenticators */ public void setCustomAuthenticators(Map customAuthenticators) { this.customAuthenticators = customAuthenticators; } // --------------------------------------------------------- Public Methods /** * Process events for an associated Context. * * @param event The lifecycle event that has occurred */ public void lifecycleEvent(LifecycleEvent event) { // Identify the context we are associated with try { context = (Context) event.getLifecycle(); } catch (ClassCastException e) { log.error(sm.getString("contextConfig.cce", event.getLifecycle()), e); return; } // Process the event that has occurred if (event.getType().equals(Lifecycle.START_EVENT)) { start(); } else if (event.getType().equals(StandardContext.BEFORE_START_EVENT)) { beforeStart(); } else if (event.getType().equals(StandardContext.AFTER_START_EVENT)) { // Restore docBase for management tools if (originalDocBase != null) { context.setDocBase(originalDocBase); } } else if (event.getType().equals(Lifecycle.STOP_EVENT)) { stop(); } else if (event.getType().equals(Lifecycle.INIT_EVENT)) { init(); } else if (event.getType().equals(Lifecycle.DESTROY_EVENT)) { destroy(); } } // -------------------------------------------------------- protected Methods /** * Process the application classes annotations, if it exists. */ protected void applicationAnnotationsConfig() { long t1=System.currentTimeMillis(); WebAnnotationSet.loadApplicationAnnotations(context); long t2=System.currentTimeMillis(); if (context instanceof StandardContext) { ((StandardContext) context).setStartupTime(t2-t1+ ((StandardContext) context).getStartupTime()); } } /** * Process the application configuration file, if it exists. */ protected void applicationWebConfig() { String altDDName = null; // Open the application web.xml file, if it exists InputStream stream = null; ServletContext servletContext = context.getServletContext(); if (servletContext != null) { altDDName = (String)servletContext.getAttribute( Globals.ALT_DD_ATTR); if (altDDName != null) { try { stream = new FileInputStream(altDDName); } catch (FileNotFoundException e) { log.error(sm.getString("contextConfig.altDDNotFound", altDDName)); } } else { stream = servletContext.getResourceAsStream (Constants.ApplicationWebXml); } } if (stream == null) { if (log.isDebugEnabled()) { log.debug(sm.getString("contextConfig.applicationMissing") + " " + context); } return; } long t1=System.currentTimeMillis(); URL url=null; // Process the application web.xml file synchronized (webDigester) { try { if (altDDName != null) { url = new File(altDDName).toURL(); } else { url = servletContext.getResource( Constants.ApplicationWebXml); } if( url!=null ) { InputSource is = new InputSource(url.toExternalForm()); is.setByteStream(stream); if (context instanceof StandardContext) { ((StandardContext) context).setReplaceWelcomeFiles(true); } XmlErrorHandler handler = new XmlErrorHandler(); webDigester.push(context); webDigester.setErrorHandler(handler); if(log.isDebugEnabled()) { log.debug("Parsing application web.xml file at " + url.toExternalForm()); } webDigester.parse(is); if (handler.getWarnings().size() > 0 || handler.getErrors().size() > 0) { ok = false; handler.logFindings(log, is.getSystemId()); } } else { log.info("No web.xml, using defaults " + context ); } } catch (SAXParseException e) { log.error(sm.getString("contextConfig.applicationParse", url.toExternalForm()), e); log.error(sm.getString("contextConfig.applicationPosition", "" + e.getLineNumber(), "" + e.getColumnNumber())); ok = false; } catch (Exception e) { log.error(sm.getString("contextConfig.applicationParse", url.toExternalForm()), e); ok = false; } finally { webDigester.reset(); try { if (stream != null) { stream.close(); } } catch (IOException e) { log.error(sm.getString("contextConfig.applicationClose"), e); } } } webRuleSet.recycle(); long t2=System.currentTimeMillis(); if (context instanceof StandardContext) { ((StandardContext) context).setStartupTime(t2-t1); } } /** * Set up an Authenticator automatically if required, and one has not * already been configured. */ protected synchronized void authenticatorConfig() { // Does this Context require an Authenticator? SecurityConstraint constraints[] = context.findConstraints(); if ((constraints == null) || (constraints.length == 0)) return; LoginConfig loginConfig = context.getLoginConfig(); if (loginConfig == null) { loginConfig = DUMMY_LOGIN_CONFIG; context.setLoginConfig(loginConfig); } // Has an authenticator been configured already? if (context instanceof Authenticator) return; if (context instanceof ContainerBase) { Pipeline pipeline = ((ContainerBase) context).getPipeline(); if (pipeline != null) { Valve basic = pipeline.getBasic(); if ((basic != null) && (basic instanceof Authenticator)) return; Valve valves[] = pipeline.getValves(); for (int i = 0; i < valves.length; i++) { if (valves[i] instanceof Authenticator) return; } } } else { return; // Cannot install a Valve even if it would be needed } // Has a Realm been configured for us to authenticate against? if (context.getRealm() == null) { log.error(sm.getString("contextConfig.missingRealm")); ok = false; return; } /* * First check to see if there is a custom mapping for the login * method. If so, use it. Otherwise, check if there is a mapping in * org/apache/catalina/startup/Authenticators.properties. */ Valve authenticator = null; if (customAuthenticators != null) { authenticator = (Valve) customAuthenticators.get(loginConfig.getAuthMethod()); } if (authenticator == null) { // Load our mapping properties if necessary if (authenticators == null) { try { InputStream is=this.getClass().getClassLoader().getResourceAsStream("org/apache/catalina/startup/Authenticators.properties"); if( is!=null ) { authenticators = new Properties(); authenticators.load(is); } else { log.error(sm.getString( "contextConfig.authenticatorResources")); ok=false; return; } } catch (IOException e) { log.error(sm.getString( "contextConfig.authenticatorResources"), e); ok = false; return; } } // Identify the class name of the Valve we should configure String authenticatorName = null; authenticatorName = authenticators.getProperty(loginConfig.getAuthMethod()); if (authenticatorName == null) { log.error(sm.getString("contextConfig.authenticatorMissing", loginConfig.getAuthMethod())); ok = false; return; } // Instantiate and install an Authenticator of the requested class try { Class authenticatorClass = Class.forName(authenticatorName); authenticator = (Valve) authenticatorClass.newInstance(); } catch (Throwable t) { log.error(sm.getString( "contextConfig.authenticatorInstantiate", authenticatorName), t); ok = false; } } if (authenticator != null && context instanceof ContainerBase) { Pipeline pipeline = ((ContainerBase) context).getPipeline(); if (pipeline != null) { ((ContainerBase) context).addValve(authenticator); if (log.isDebugEnabled()) { log.debug(sm.getString( "contextConfig.authenticatorConfigured", loginConfig.getAuthMethod())); } } } } /** * Create and return a Digester configured to process the * web application deployment descriptor (web.xml). */ protected void createWebXmlDigester(boolean namespaceAware, boolean validation) { boolean blockExternal = context.getXmlBlockExternal(); webRuleSet = new WebRuleSet(); webDigester = DigesterFactory.newDigester(validation, namespaceAware, webRuleSet, blockExternal); webDigester.getParser(); } /** * Create (if necessary) and return a Digester configured to process the * context configuration descriptor for an application. */ protected Digester createContextDigester() { Digester digester = new Digester(); digester.setValidating(false); RuleSet contextRuleSet = new ContextRuleSet("", false); digester.addRuleSet(contextRuleSet); RuleSet namingRuleSet = new NamingRuleSet("Context/"); digester.addRuleSet(namingRuleSet); return digester; } protected String getBaseDir() { Container engineC=context.getParent().getParent(); if( engineC instanceof StandardEngine ) { return ((StandardEngine)engineC).getBaseDir(); } return System.getProperty("catalina.base"); } /** * Process the default configuration file, if it exists. * The default config must be read with the container loader - so * container servlets can be loaded */ protected void defaultWebConfig() { long t1=System.currentTimeMillis(); // Open the default web.xml file, if it exists if( defaultWebXml==null && context instanceof StandardContext ) { defaultWebXml=((StandardContext)context).getDefaultWebXml(); } // set the default if we don't have any overrides if( defaultWebXml==null ) getDefaultWebXml(); File file = new File(this.defaultWebXml); if (!file.isAbsolute()) { file = new File(getBaseDir(), this.defaultWebXml); } InputStream stream = null; InputSource source = null; try { if ( ! file.exists() ) { // Use getResource and getResourceAsStream stream = getClass().getClassLoader() .getResourceAsStream(defaultWebXml); if( stream != null ) { source = new InputSource (getClass().getClassLoader() .getResource(defaultWebXml).toString()); } if( stream== null ) { // maybe embedded stream = getClass().getClassLoader() .getResourceAsStream("web-embed.xml"); if( stream != null ) { source = new InputSource (getClass().getClassLoader() .getResource("web-embed.xml").toString()); } } if( stream== null ) { log.info("No default web.xml"); } } else { source = new InputSource("file://" + file.getAbsolutePath()); stream = new FileInputStream(file); context.addWatchedResource(file.getAbsolutePath()); } } catch (Exception e) { log.error(sm.getString("contextConfig.defaultMissing") + " " + defaultWebXml + " " + file , e); } if (stream != null) { processDefaultWebConfig(webDigester, stream, source); webRuleSet.recycle(); } long t2=System.currentTimeMillis(); if( (t2-t1) > 200 ) log.debug("Processed default web.xml " + file + " " + ( t2-t1)); stream = null; source = null; String resourceName = getHostConfigPath(Constants.HostWebXml); file = new File(getConfigBase(), resourceName); try { if ( ! file.exists() ) { // Use getResource and getResourceAsStream stream = getClass().getClassLoader() .getResourceAsStream(resourceName); if( stream != null ) { source = new InputSource (getClass().getClassLoader() .getResource(resourceName).toString()); } } else { source = new InputSource("file://" + file.getAbsolutePath()); stream = new FileInputStream(file); } } catch (Exception e) { log.error(sm.getString("contextConfig.defaultMissing") + " " + resourceName + " " + file , e); } if (stream != null) { processDefaultWebConfig(webDigester, stream, source); webRuleSet.recycle(); } } /** * Process a default web.xml. */ protected void processDefaultWebConfig(Digester digester, InputStream stream, InputSource source) { if (log.isDebugEnabled()) log.debug("Processing context [" + context.getName() + "] web configuration resource " + source.getSystemId()); // Process the default web.xml file synchronized (digester) { try { source.setByteStream(stream); if (context instanceof StandardContext) ((StandardContext) context).setReplaceWelcomeFiles(true); digester.setClassLoader(this.getClass().getClassLoader()); digester.setUseContextClassLoader(false); XmlErrorHandler handler = new XmlErrorHandler(); digester.push(context); digester.setErrorHandler(handler); digester.parse(source); if (handler.getWarnings().size() > 0 || handler.getErrors().size() > 0) { ok = false; handler.logFindings(log, source.getSystemId()); } } catch (SAXParseException e) { log.error(sm.getString("contextConfig.defaultParse"), e); log.error(sm.getString("contextConfig.defaultPosition", "" + e.getLineNumber(), "" + e.getColumnNumber())); ok = false; } catch (Exception e) { log.error(sm.getString("contextConfig.defaultParse"), e); ok = false; } finally { digester.reset(); try { if (stream != null) { stream.close(); } } catch (IOException e) { log.error(sm.getString("contextConfig.defaultClose"), e); } } } } /** * Process the default configuration file, if it exists. */ protected void contextConfig() { // Open the default context.xml file, if it exists if( defaultContextXml==null && context instanceof StandardContext ) { defaultContextXml = ((StandardContext)context).getDefaultContextXml(); } // set the default if we don't have any overrides if( defaultContextXml==null ) getDefaultContextXml(); if (!context.getOverride()) { File defaultContextFile = new File(defaultContextXml); if (!defaultContextFile.isAbsolute()) { defaultContextFile =new File(getBaseDir(), defaultContextXml); } processContextConfig(defaultContextFile.getParentFile(), defaultContextFile.getName()); processContextConfig(getConfigBase(), getHostConfigPath(Constants.HostContextXml)); } if (context.getConfigFile() != null) processContextConfig(new File(context.getConfigFile()), null); } /** * Process a context.xml. */ protected void processContextConfig(File baseDir, String resourceName) { if (log.isDebugEnabled()) log.debug("Processing context [" + context.getName() + "] configuration file " + baseDir + " " + resourceName); InputSource source = null; InputStream stream = null; File file = baseDir; if (resourceName != null) { file = new File(baseDir, resourceName); } try { if ( !file.exists() ) { if (resourceName != null) { // Use getResource and getResourceAsStream stream = getClass().getClassLoader() .getResourceAsStream(resourceName); if( stream != null ) { source = new InputSource (getClass().getClassLoader() .getResource(resourceName).toString()); } } } else { source = new InputSource("file://" + file.getAbsolutePath()); stream = new FileInputStream(file); // Add as watched resource so that cascade reload occurs if a default // config file is modified/added/removed context.addWatchedResource(file.getAbsolutePath()); } } catch (Exception e) { log.error(sm.getString("contextConfig.contextMissing", resourceName + " " + file) , e); } if (source == null) return; synchronized (contextDigester) { try { source.setByteStream(stream); contextDigester.setClassLoader(this.getClass().getClassLoader()); contextDigester.setUseContextClassLoader(false); XmlErrorHandler handler = new XmlErrorHandler(); contextDigester.push(context.getParent()); contextDigester.push(context); contextDigester.setErrorHandler(handler); contextDigester.parse(source); if (handler.getWarnings().size() > 0 || handler.getErrors().size() > 0) { ok = false; handler.logFindings(log, source.getSystemId()); } if (log.isDebugEnabled()) log.debug("Successfully processed context [" + context.getName() + "] configuration file " + baseDir + " " + resourceName); } catch (SAXParseException e) { log.error(sm.getString("contextConfig.contextParse", context.getName()), e); log.error(sm.getString("contextConfig.defaultPosition", "" + e.getLineNumber(), "" + e.getColumnNumber())); ok = false; } catch (Exception e) { log.error(sm.getString("contextConfig.contextParse", context.getName()), e); ok = false; } finally { contextDigester.reset(); try { if (stream != null) { stream.close(); } } catch (IOException e) { log.error(sm.getString("contextConfig.contextClose"), e); } } } } /** * Adjust docBase. */ protected void fixDocBase() throws IOException { Host host = (Host) context.getParent(); String appBase = host.getAppBase(); boolean unpackWARs = true; if (host instanceof StandardHost) { unpackWARs = ((StandardHost) host).isUnpackWARs() && ((StandardContext) context).getUnpackWAR(); } File canonicalAppBase = new File(appBase); if (canonicalAppBase.isAbsolute()) { canonicalAppBase = canonicalAppBase.getCanonicalFile(); } else { canonicalAppBase = new File(System.getProperty("catalina.base"), appBase) .getCanonicalFile(); } String docBase = context.getDocBase(); if (docBase == null) { // Trying to guess the docBase according to the path String path = context.getPath(); if (path == null) { return; } if (path.equals("")) { docBase = "ROOT"; } else { if (path.startsWith("/")) { docBase = path.substring(1).replace('/', '#'); } else { docBase = path.replace('/', '#'); } } } File file = new File(docBase); if (!file.isAbsolute()) { docBase = (new File(canonicalAppBase, docBase)).getPath(); } else { docBase = file.getCanonicalPath(); } file = new File(docBase); String origDocBase = docBase; String pathName = context.getPath(); if (pathName.equals("")) { pathName = "ROOT"; } else { // Context path must start with '/' pathName = pathName.substring(1).replace('/', '#'); } if (docBase.toLowerCase().endsWith(".war") && !file.isDirectory() && unpackWARs) { URL war = new URL("jar:" + (new File(docBase)).toURI().toURL() + "!/"); docBase = ExpandWar.expand(host, war, pathName); file = new File(docBase); docBase = file.getCanonicalPath(); if (context instanceof StandardContext) { ((StandardContext) context).setOriginalDocBase(origDocBase); } } else if (docBase.toLowerCase().endsWith(".war") && !file.isDirectory() && !unpackWARs) { URL war = new URL("jar:" + (new File (docBase)).toURI().toURL() + "!/"); ExpandWar.validate(host, war, pathName); } else { File docDir = new File(docBase); if (!docDir.exists()) { File warFile = new File(docBase + ".war"); if (warFile.exists()) { URL war = new URL("jar:" + warFile.toURI().toURL() + "!/"); if (unpackWARs) { docBase = ExpandWar.expand(host, war, pathName); file = new File(docBase); docBase = file.getCanonicalPath(); } else { docBase = warFile.getCanonicalPath(); ExpandWar.validate(host, war, pathName); } } if (context instanceof StandardContext) { ((StandardContext) context).setOriginalDocBase(origDocBase); } } } if (docBase.startsWith(canonicalAppBase.getPath() + File.separatorChar)) { docBase = docBase.substring(canonicalAppBase.getPath().length()); docBase = docBase.replace(File.separatorChar, '/'); if (docBase.startsWith("/")) { docBase = docBase.substring(1); } } else { docBase = docBase.replace(File.separatorChar, '/'); } context.setDocBase(docBase); } protected void antiLocking() throws IOException { if ((context instanceof StandardContext) && ((StandardContext) context).getAntiResourceLocking()) { Host host = (Host) context.getParent(); String appBase = host.getAppBase(); String docBase = context.getDocBase(); if (docBase == null) return; originalDocBase = docBase; File docBaseFile = new File(docBase); if (!docBaseFile.isAbsolute()) { File file = new File(appBase); if (!file.isAbsolute()) { file = new File(System.getProperty("catalina.base"), appBase); } docBaseFile = new File(file, docBase); } String path = context.getPath(); if (path == null) { return; } if (path.equals("")) { docBase = "ROOT"; } else { if (path.startsWith("/")) { docBase = path.substring(1).replace('/','#'); } else { docBase = path.replace('/','#'); } } if (originalDocBase.toLowerCase().endsWith(".war")) { antiLockingDocBase = new File( System.getProperty("java.io.tmpdir"), deploymentCount++ + "-" + docBase + ".war"); } else { antiLockingDocBase = new File( System.getProperty("java.io.tmpdir"), deploymentCount++ + "-" + docBase); } antiLockingDocBase = antiLockingDocBase.getAbsoluteFile(); if (log.isDebugEnabled()) log.debug("Anti locking context[" + context.getPath() + "] setting docBase to " + antiLockingDocBase.getPath()); // Cleanup just in case an old deployment is lying around ExpandWar.delete(antiLockingDocBase); if (ExpandWar.copy(docBaseFile, antiLockingDocBase)) { context.setDocBase(antiLockingDocBase.getPath()); } } } /** * Process a "init" event for this Context. */ protected void init() { // Called from StandardContext.init() if (contextDigester == null){ contextDigester = createContextDigester(); contextDigester.getParser(); } if (log.isDebugEnabled()) log.debug(sm.getString("contextConfig.init")); context.setConfigured(false); ok = true; contextConfig(); try { fixDocBase(); } catch (IOException e) { log.error(sm.getString( "contextConfig.fixDocBase", context.getPath()), e); } createWebXmlDigester(context.getXmlNamespaceAware(), context.getXmlValidation()); } /** * Process a "before start" event for this Context. */ protected synchronized void beforeStart() { try { antiLocking(); } catch (IOException e) { log.error(sm.getString("contextConfig.antiLocking"), e); } } /** * Process a "start" event for this Context. */ protected synchronized void start() { // Called from StandardContext.start() if (log.isDebugEnabled()) log.debug(sm.getString("contextConfig.start")); // Process the default and application web.xml files defaultWebConfig(); applicationWebConfig(); if (!context.getIgnoreAnnotations()) { applicationAnnotationsConfig(); } if (ok) { validateSecurityRoles(); } // Configure an authenticator if we need one if (ok) authenticatorConfig(); // Dump the contents of this pipeline if requested if ((log.isDebugEnabled()) && (context instanceof ContainerBase)) { log.debug("Pipeline Configuration:"); Pipeline pipeline = ((ContainerBase) context).getPipeline(); Valve valves[] = null; if (pipeline != null) valves = pipeline.getValves(); if (valves != null) { for (int i = 0; i < valves.length; i++) { log.debug(" " + valves[i].getInfo()); } } log.debug("======================"); } // Make our application available if no problems were encountered if (ok) context.setConfigured(true); else { log.error(sm.getString("contextConfig.unavailable")); context.setConfigured(false); } } /** * Process a "stop" event for this Context. */ protected synchronized void stop() { if (log.isDebugEnabled()) log.debug(sm.getString("contextConfig.stop")); int i; // Removing children Container[] children = context.findChildren(); for (i = 0; i < children.length; i++) { context.removeChild(children[i]); } // Removing application parameters /* ApplicationParameter[] applicationParameters = context.findApplicationParameters(); for (i = 0; i < applicationParameters.length; i++) { context.removeApplicationParameter (applicationParameters[i].getName()); } */ // Removing security constraints SecurityConstraint[] securityConstraints = context.findConstraints(); for (i = 0; i < securityConstraints.length; i++) { context.removeConstraint(securityConstraints[i]); } // Removing Ejbs /* ContextEjb[] contextEjbs = context.findEjbs(); for (i = 0; i < contextEjbs.length; i++) { context.removeEjb(contextEjbs[i].getName()); } */ // Removing environments /* ContextEnvironment[] contextEnvironments = context.findEnvironments(); for (i = 0; i < contextEnvironments.length; i++) { context.removeEnvironment(contextEnvironments[i].getName()); } */ // Removing errors pages ErrorPage[] errorPages = context.findErrorPages(); for (i = 0; i < errorPages.length; i++) { context.removeErrorPage(errorPages[i]); } // Removing filter defs FilterDef[] filterDefs = context.findFilterDefs(); for (i = 0; i < filterDefs.length; i++) { context.removeFilterDef(filterDefs[i]); } // Removing filter maps FilterMap[] filterMaps = context.findFilterMaps(); for (i = 0; i < filterMaps.length; i++) { context.removeFilterMap(filterMaps[i]); } // Removing local ejbs /* ContextLocalEjb[] contextLocalEjbs = context.findLocalEjbs(); for (i = 0; i < contextLocalEjbs.length; i++) { context.removeLocalEjb(contextLocalEjbs[i].getName()); } */ // Removing Mime mappings String[] mimeMappings = context.findMimeMappings(); for (i = 0; i < mimeMappings.length; i++) { context.removeMimeMapping(mimeMappings[i]); } // Removing parameters String[] parameters = context.findParameters(); for (i = 0; i < parameters.length; i++) { context.removeParameter(parameters[i]); } // Removing resource env refs /* String[] resourceEnvRefs = context.findResourceEnvRefs(); for (i = 0; i < resourceEnvRefs.length; i++) { context.removeResourceEnvRef(resourceEnvRefs[i]); } */ // Removing resource links /* ContextResourceLink[] contextResourceLinks = context.findResourceLinks(); for (i = 0; i < contextResourceLinks.length; i++) { context.removeResourceLink(contextResourceLinks[i].getName()); } */ // Removing resources /* ContextResource[] contextResources = context.findResources(); for (i = 0; i < contextResources.length; i++) { context.removeResource(contextResources[i].getName()); } */ // Removing security role String[] securityRoles = context.findSecurityRoles(); for (i = 0; i < securityRoles.length; i++) { context.removeSecurityRole(securityRoles[i]); } // Removing servlet mappings String[] servletMappings = context.findServletMappings(); for (i = 0; i < servletMappings.length; i++) { context.removeServletMapping(servletMappings[i]); } // FIXME : Removing status pages // Removing taglibs String[] taglibs = context.findTaglibs(); for (i = 0; i < taglibs.length; i++) { context.removeTaglib(taglibs[i]); } // Removing welcome files String[] welcomeFiles = context.findWelcomeFiles(); for (i = 0; i < welcomeFiles.length; i++) { context.removeWelcomeFile(welcomeFiles[i]); } // Removing wrapper lifecycles String[] wrapperLifecycles = context.findWrapperLifecycles(); for (i = 0; i < wrapperLifecycles.length; i++) { context.removeWrapperLifecycle(wrapperLifecycles[i]); } // Removing wrapper listeners String[] wrapperListeners = context.findWrapperListeners(); for (i = 0; i < wrapperListeners.length; i++) { context.removeWrapperListener(wrapperListeners[i]); } // Remove (partially) folders and files created by antiLocking if (antiLockingDocBase != null) { // No need to log failure - it is expected in this case ExpandWar.delete(antiLockingDocBase, false); } ok = true; } /** * Process a "destroy" event for this Context. */ protected synchronized void destroy() { // Called from StandardContext.destroy() if (log.isDebugEnabled()) log.debug(sm.getString("contextConfig.destroy")); // Changed to getWorkPath per Bugzilla 35819. String workDir = ((StandardContext) context).getWorkPath(); if (workDir != null) ExpandWar.delete(new File(workDir)); } /** * Validate the usage of security role names in the web application * deployment descriptor. If any problems are found, issue warning * messages (for backwards compatibility) and add the missing roles. * (To make these problems fatal instead, simply set the <code>ok</code> * instance variable to <code>false</code> as well). */ protected void validateSecurityRoles() { // Check role names used in <security-constraint> elements SecurityConstraint constraints[] = context.findConstraints(); for (int i = 0; i < constraints.length; i++) { String roles[] = constraints[i].findAuthRoles(); for (int j = 0; j < roles.length; j++) { if (!"*".equals(roles[j]) && !context.findSecurityRole(roles[j])) { log.warn(sm.getString("contextConfig.role.auth", roles[j])); context.addSecurityRole(roles[j]); } } } // Check role names used in <servlet> elements Container wrappers[] = context.findChildren(); for (int i = 0; i < wrappers.length; i++) { Wrapper wrapper = (Wrapper) wrappers[i]; String runAs = wrapper.getRunAs(); if ((runAs != null) && !context.findSecurityRole(runAs)) { log.warn(sm.getString("contextConfig.role.runas", runAs)); context.addSecurityRole(runAs); } String names[] = wrapper.findSecurityReferences(); for (int j = 0; j < names.length; j++) { String link = wrapper.findSecurityReference(names[j]); if ((link != null) && !context.findSecurityRole(link)) { log.warn(sm.getString("contextConfig.role.link", link)); context.addSecurityRole(link); } } } } /** * Get config base. */ protected File getConfigBase() { File configBase = new File(System.getProperty("catalina.base"), "conf"); if (!configBase.exists()) { return null; } else { return configBase; } } protected String getHostConfigPath(String resourceName) { StringBuffer result = new StringBuffer(); Container container = context; Container host = null; Container engine = null; while (container != null) { if (container instanceof Host) host = container; if (container instanceof Engine) engine = container; container = container.getParent(); } if (engine != null) { result.append(engine.getName()).append('/'); } if (host != null) { result.append(host.getName()).append('/'); } result.append(resourceName); return result.toString(); } /** * @deprecated Unused. Use {@link XmlErrorHandler}. Will be removed in * Tomcat 7.0.x */ @Deprecated protected class ContextErrorHandler implements ErrorHandler { public void error(SAXParseException exception) { parseException = exception; } public void fatalError(SAXParseException exception) { parseException = exception; } public void warning(SAXParseException exception) { parseException = exception; } } }
/** * Copyright (C) 2010-14 diirt developers. See COPYRIGHT.TXT * All rights reserved. Use is subject to license terms. See LICENSE.TXT */ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.diirt.service.pva.rpcservice.rpcclient; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; import java.util.Properties; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import java.util.logging.Logger; /** * Pool properties * @author Filip Hanik (tomcat connection pool) * @author dkumar (modified for a rpc client pool implementation) * */ class PoolProperties implements PoolConfiguration { /** * Logger */ private final static Logger log = Logger.getLogger(PoolProperties.class.getName()); protected static AtomicInteger poolCounter = new AtomicInteger(0); protected int initialSize = 10; protected int maxActive = DEFAULT_MAX_ACTIVE; protected int maxIdle = maxActive; protected int minIdle = initialSize; protected int maxWait = 30000; protected boolean testOnBorrow = false; protected boolean testOnReturn = false; protected boolean testWhileIdle = false; protected int timeBetweenEvictionRunsMillis = 5000; protected int numTestsPerEvictionRun; protected int minEvictableIdleTimeMillis = 60000; protected boolean removeAbandoned = false; protected int removeAbandonedTimeout = 60; protected boolean logAbandoned = false; protected long validationInterval = 30000; protected boolean testOnConnect =false; protected int abandonWhenPercentageFull = 0; protected long maxAge = 0; protected boolean useLock = false; protected int suspectTimeout = 0; /** * Create pool properties from text * @param textProperties properties in java properties format * @return pool configuration * @throws IOException */ static PoolProperties createFromText(String textProperties) throws IOException { final Properties properties = new Properties(); try(InputStream stream = new ByteArrayInputStream(textProperties.getBytes("UTF-8"))) { properties.load(stream); } return createFromProperties(properties); } private static PoolProperties createFromProperties(Properties properties) { PoolProperties poolProperties = new PoolProperties(); Enumeration<Object> keys = properties.keys(); while (keys.hasMoreElements()) { String key = (String)keys.nextElement(); String keyValue = (String) properties.get(key); switch (key) { case "abandonWhenPercentageFull": poolProperties.setAbandonWhenPercentageFull(Integer.parseInt(keyValue)); break; case "initialSize": poolProperties.setInitialSize(Integer.parseInt(keyValue)); break; case "logAbandoned": poolProperties.setLogAbandoned(Boolean.parseBoolean(keyValue)); break; case "maxActive": poolProperties.setMaxActive(Integer.parseInt(keyValue)); break; case "maxIdle": poolProperties.setMaxIdle(Integer.parseInt(keyValue)); break; case "minIdle": poolProperties.setMinIdle(Integer.parseInt(keyValue)); break; case "maxWait": poolProperties.setMaxWait(Integer.parseInt(keyValue)); break; case "minEvictableIdleTimeMillis": poolProperties.setMinEvictableIdleTimeMillis(Integer.parseInt(keyValue)); break; case "removeAbandoned": poolProperties.setRemoveAbandoned(Boolean.parseBoolean(keyValue)); break; case "removeAbandonedTimeout": poolProperties.setRemoveAbandonedTimeout(Integer.parseInt(keyValue)); break; case "testOnBorrow": poolProperties.setTestOnBorrow(Boolean.parseBoolean(keyValue)); break; case "testOnReturn": poolProperties.setTestOnReturn(Boolean.parseBoolean(keyValue)); break; case "testWhileIdle": poolProperties.setTestWhileIdle(Boolean.parseBoolean(keyValue)); break; case "timeBetweenEvictionRunsMillis": poolProperties.setTimeBetweenEvictionRunsMillis(Integer.parseInt(keyValue)); break; case "validationInterval": poolProperties.setValidationInterval(Integer.parseInt(keyValue)); break; case "maxAge": poolProperties.setMaxAge(Integer.parseInt(keyValue)); break; case "useLock": poolProperties.setUseLock(Boolean.parseBoolean(keyValue)); break; case "suspectTimeout": poolProperties.setSuspectTimeout(Integer.parseInt(keyValue)); break; default: log.log(Level.WARNING, "property " + key + " not supported"); break; } } return poolProperties; } /** * {@inheritDoc} */ public void setAbandonWhenPercentageFull(int percentage) { if (percentage<0) abandonWhenPercentageFull = 0; else if (percentage>100) abandonWhenPercentageFull = 100; else abandonWhenPercentageFull = percentage; } /** * {@inheritDoc} */ public int getAbandonWhenPercentageFull() { return abandonWhenPercentageFull; } /** * {@inheritDoc} */ public int getInitialSize() { return initialSize; } /** * {@inheritDoc} */ public boolean isLogAbandoned() { return logAbandoned; } /** * {@inheritDoc} */ public int getMaxActive() { return maxActive; } /** * {@inheritDoc} */ public int getMaxIdle() { return maxIdle; } /** * {@inheritDoc} */ public int getMaxWait() { return maxWait; } /** * {@inheritDoc} */ public int getMinEvictableIdleTimeMillis() { return minEvictableIdleTimeMillis; } /** * {@inheritDoc} */ public int getMinIdle() { return minIdle; } /** * {@inheritDoc} */ public int getNumTestsPerEvictionRun() { return numTestsPerEvictionRun; } /** * {@inheritDoc} */ public boolean isRemoveAbandoned() { return removeAbandoned; } /** * {@inheritDoc} */ public int getRemoveAbandonedTimeout() { return removeAbandonedTimeout; } /** * {@inheritDoc} */ public boolean isTestOnBorrow() { return testOnBorrow; } /** * {@inheritDoc} */ public boolean isTestOnReturn() { return testOnReturn; } /** * {@inheritDoc} */ public boolean isTestWhileIdle() { return testWhileIdle; } /** * {@inheritDoc} */ public int getTimeBetweenEvictionRunsMillis() { return timeBetweenEvictionRunsMillis; } /** * {@inheritDoc} */ public long getValidationInterval() { return validationInterval; } /** * {@inheritDoc} */ public boolean isTestOnConnect() { return testOnConnect; } /** * {@inheritDoc} */ public void setInitialSize(int initialSize) { this.initialSize = initialSize; } /** * {@inheritDoc} */ public void setLogAbandoned(boolean logAbandoned) { this.logAbandoned = logAbandoned; } /** * {@inheritDoc} */ public void setMaxActive(int maxActive) { this.maxActive = maxActive; } /** * {@inheritDoc} */ public void setMaxIdle(int maxIdle) { this.maxIdle = maxIdle; } /** * {@inheritDoc} */ public void setMaxWait(int maxWait) { this.maxWait = maxWait; } /** * {@inheritDoc} */ public void setMinEvictableIdleTimeMillis(int minEvictableIdleTimeMillis) { this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis; } /** * {@inheritDoc} */ public void setMinIdle(int minIdle) { this.minIdle = minIdle; } /** * {@inheritDoc} */ public void setRemoveAbandoned(boolean removeAbandoned) { this.removeAbandoned = removeAbandoned; } /** * {@inheritDoc} */ public void setRemoveAbandonedTimeout(int removeAbandonedTimeout) { this.removeAbandonedTimeout = removeAbandonedTimeout; } /** * {@inheritDoc} */ public void setTestOnBorrow(boolean testOnBorrow) { this.testOnBorrow = testOnBorrow; } /** * {@inheritDoc} */ public void setTestWhileIdle(boolean testWhileIdle) { this.testWhileIdle = testWhileIdle; } /** * {@inheritDoc} */ public void setTestOnReturn(boolean testOnReturn) { this.testOnReturn = testOnReturn; } /** * {@inheritDoc} */ public void setTimeBetweenEvictionRunsMillis(int timeBetweenEvictionRunsMillis) { this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis; } /** * {@inheritDoc} */ public void setValidationInterval(long validationInterval) { this.validationInterval = validationInterval; } /** * {@inheritDoc} */ public int getSuspectTimeout() { return this.suspectTimeout; } /** * {@inheritDoc} */ public void setSuspectTimeout(int seconds) { this.suspectTimeout = seconds; } /** * {@inheritDoc} */ public boolean isPoolSweeperEnabled() { boolean timer = getTimeBetweenEvictionRunsMillis()>0; boolean result = timer && (isRemoveAbandoned() && getRemoveAbandonedTimeout()>0); result = result || (timer && getSuspectTimeout()>0); result = result || (timer && isTestWhileIdle()); result = result || (timer && getMinEvictableIdleTimeMillis()>0); return result; } /** * {@inheritDoc} */ public long getMaxAge() { return maxAge; } /** * {@inheritDoc} */ public void setMaxAge(long maxAge) { this.maxAge = maxAge; } /** * {@inheritDoc} */ public boolean getUseLock() { return useLock; } /** * {@inheritDoc} */ public void setUseLock(boolean useLock) { this.useLock = useLock; } @Override public String toString() { return "PoolProperties{" + "initialSize=" + initialSize + ", maxActive=" + maxActive + ", maxIdle=" + maxIdle + ", minIdle=" + minIdle + ", maxWait=" + maxWait + ", testOnBorrow=" + testOnBorrow + ", testOnReturn=" + testOnReturn + ", testWhileIdle=" + testWhileIdle + ", timeBetweenEvictionRunsMillis=" + timeBetweenEvictionRunsMillis + ", numTestsPerEvictionRun=" + numTestsPerEvictionRun + ", minEvictableIdleTimeMillis=" + minEvictableIdleTimeMillis + ", removeAbandoned=" + removeAbandoned + ", removeAbandonedTimeout=" + removeAbandonedTimeout + ", logAbandoned=" + logAbandoned + ", validationInterval=" + validationInterval + ", testOnConnect=" + testOnConnect + ", abandonWhenPercentageFull=" + abandonWhenPercentageFull + ", maxAge=" + maxAge + ", useLock=" + useLock + ", suspectTimeout=" + suspectTimeout + '}'; } }
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.metric; import com.google.common.collect.ConcurrentHashMultiset; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.spotify.heroic.QueryOptions; import com.spotify.heroic.aggregation.AggregationInstance; import com.spotify.heroic.aggregation.AggregationOutput; import com.spotify.heroic.aggregation.AggregationResult; import com.spotify.heroic.aggregation.AggregationSession; import com.spotify.heroic.aggregation.BucketStrategy; import com.spotify.heroic.aggregation.RetainQuotaWatcher; import com.spotify.heroic.async.AsyncObservable; import com.spotify.heroic.common.DateRange; import com.spotify.heroic.common.Feature; import com.spotify.heroic.common.Features; import com.spotify.heroic.common.GroupSet; import com.spotify.heroic.common.Groups; import com.spotify.heroic.common.Histogram; import com.spotify.heroic.common.OptionalLimit; import com.spotify.heroic.common.QuotaViolationException; import com.spotify.heroic.common.SelectedGroup; import com.spotify.heroic.common.Series; import com.spotify.heroic.common.Statistics; import com.spotify.heroic.metadata.FindSeries; import com.spotify.heroic.metadata.MetadataBackend; import com.spotify.heroic.metadata.MetadataManager; import com.spotify.heroic.querylogging.QueryContext; import com.spotify.heroic.querylogging.QueryLogger; import com.spotify.heroic.querylogging.QueryLoggerFactory; import com.spotify.heroic.statistics.DataInMemoryReporter; import com.spotify.heroic.statistics.MetricBackendReporter; import eu.toolchain.async.AsyncFramework; import eu.toolchain.async.AsyncFuture; import eu.toolchain.async.LazyTransform; import eu.toolchain.async.StreamCollector; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.function.Consumer; import java.util.function.Function; import javax.inject.Inject; import javax.inject.Named; import lombok.RequiredArgsConstructor; import lombok.ToString; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; @Slf4j @ToString(of = {}) @MetricScope public class LocalMetricManager implements MetricManager { private static final QueryTrace.Identifier QUERY = QueryTrace.identifier(LocalMetricManager.class, "query"); private static final QueryTrace.Identifier FETCH = QueryTrace.identifier(LocalMetricManager.class, "fetch"); private final OptionalLimit groupLimit; private final OptionalLimit seriesLimit; private final OptionalLimit aggregationLimit; private final OptionalLimit dataLimit; private final int fetchParallelism; private final boolean failOnLimits; private final AsyncFramework async; private final GroupSet<MetricBackend> groupSet; private final MetadataManager metadata; private final MetricBackendReporter reporter; private final QueryLogger queryLogger; /** * @param groupLimit The maximum amount of groups this manager will allow to be generated. * @param seriesLimit The maximum amount of series in total an entire query may use. * @param aggregationLimit The maximum number of (estimated) data points a single aggregation * may produce. * @param dataLimit The maximum number of samples a single query is allowed to fetch. * @param fetchParallelism How many fetches that are allowed to be performed in parallel. */ @Inject public LocalMetricManager( @Named("groupLimit") final OptionalLimit groupLimit, @Named("seriesLimit") final OptionalLimit seriesLimit, @Named("aggregationLimit") final OptionalLimit aggregationLimit, @Named("dataLimit") final OptionalLimit dataLimit, @Named("fetchParallelism") final int fetchParallelism, @Named("failOnLimits") final boolean failOnLimits, final AsyncFramework async, final GroupSet<MetricBackend> groupSet, final MetadataManager metadata, final MetricBackendReporter reporter, final QueryLoggerFactory queryLoggerFactory ) { this.groupLimit = groupLimit; this.seriesLimit = seriesLimit; this.aggregationLimit = aggregationLimit; this.dataLimit = dataLimit; this.fetchParallelism = fetchParallelism; this.failOnLimits = failOnLimits; this.async = async; this.groupSet = groupSet; this.metadata = metadata; this.reporter = reporter; this.queryLogger = queryLoggerFactory.create("LocalMetricManager"); } @Override public GroupSet<MetricBackend> groupSet() { return groupSet; } @Override public MetricBackendGroup useOptionalGroup(final Optional<String> group) { return new Group(groupSet.useOptionalGroup(group), metadata.useDefaultGroup()); } @ToString private class Group extends AbstractMetricBackend implements MetricBackendGroup { private final SelectedGroup<MetricBackend> backends; private final MetadataBackend metadata; public Group(final SelectedGroup<MetricBackend> backends, final MetadataBackend metadata) { super(async); this.backends = backends; this.metadata = metadata; } @Override public Groups groups() { return backends.groups(); } @Override public boolean isEmpty() { return backends.isEmpty(); } private class Transform implements LazyTransform<FindSeries, FullQuery> { private final AggregationInstance aggregation; private final boolean failOnLimits; private final OptionalLimit seriesLimit; private final OptionalLimit groupLimit; private final QueryTrace.NamedWatch namedWatch; private final QuotaWatcher quotaWatcher; private final BucketStrategy bucketStrategy; private final DateRange range; private final QueryOptions options; private final DataInMemoryReporter dataInMemoryReporter; private final MetricType source; private final boolean slicedFetch; private Transform( final FullQuery.Request request, final boolean failOnLimits, final OptionalLimit seriesLimit, final OptionalLimit groupLimit, final QuotaWatcher quotaWatcher, final DataInMemoryReporter dataInMemoryReporter ) { this.aggregation = request.getAggregation(); this.range = request.getRange(); this.options = request.getOptions(); this.source = request.getSource(); this.failOnLimits = failOnLimits; this.seriesLimit = seriesLimit; this.groupLimit = groupLimit; this.namedWatch = QueryTrace.watch(QUERY); this.quotaWatcher = quotaWatcher; this.dataInMemoryReporter = dataInMemoryReporter; final Features features = request.getFeatures(); this.slicedFetch = features.hasFeature(Feature.SLICED_DATA_FETCH); this.bucketStrategy = options .getBucketStrategy() .orElseGet( () -> features.withFeature(Feature.END_BUCKET, () -> BucketStrategy.END, () -> BucketStrategy.START)); } @Override public AsyncFuture<FullQuery> transform(final FindSeries result) throws Exception { final ResultLimits limits; if (result.isLimited()) { if (failOnLimits) { final RequestError error = QueryError.fromMessage( "The number of series requested is more than the allowed limit of " + seriesLimit); return async.resolved(FullQuery.limitsError(namedWatch.end(), error, ResultLimits.of(ResultLimit.SERIES))); } limits = ResultLimits.of(ResultLimit.SERIES); } else { limits = ResultLimits.of(); } /* if empty, there are not time series on this shard */ if (result.isEmpty()) { return async.resolved(FullQuery.empty(namedWatch.end(), limits)); } final AggregationSession session; try { session = aggregation.session(range, quotaWatcher, bucketStrategy); } catch (QuotaViolationException e) { return async.resolved(FullQuery.limitsError(namedWatch.end(), QueryError.fromMessage(String.format( "aggregation needs to retain more data then what is allowed: %d", aggregationLimit.asLong().get())), ResultLimits.of(ResultLimit.AGGREGATION))); } /* setup collector */ final ResultCollector collector; if (options.tracing().isEnabled(Tracing.DETAILED)) { // tracing enabled, keeps track of each individual FetchData trace. collector = new ResultCollector(quotaWatcher, dataInMemoryReporter, aggregation, session, limits, groupLimit, failOnLimits) { final ConcurrentLinkedQueue<QueryTrace> traces = new ConcurrentLinkedQueue<>(); @Override public void resolved(final FetchData.Result result) throws Exception { traces.add(result.getTrace()); super.resolved(result); } @Override public QueryTrace buildTrace() { return namedWatch.end(ImmutableList.copyOf(traces)); } }; } else { // very limited tracing, does not collected each individual FetchData trace. collector = new ResultCollector(quotaWatcher, dataInMemoryReporter, aggregation, session, limits, groupLimit, failOnLimits) { @Override public QueryTrace buildTrace() { return namedWatch.end(); } }; } final List<Callable<AsyncFuture<FetchData.Result>>> fetches = new ArrayList<>(); /* setup fetches */ accept(metricBackend -> { for (final Series series : result.getSeries()) { if (slicedFetch) { fetches.add(() -> metricBackend.fetch( new FetchData.Request(source, series, range, options), quotaWatcher, mc -> collector.acceptMetricsCollection(series, mc))); } else { fetches.add(() -> metricBackend .fetch(new FetchData.Request(source, series, range, options), quotaWatcher) .directTransform(fetchData -> { fetchData.getGroups().forEach(group -> { collector.acceptMetricsCollection(series, group); }); return fetchData.getResult(); })); } } }); return async.eventuallyCollect(fetches, collector, fetchParallelism); } } @Override public AsyncFuture<FullQuery> query(final FullQuery.Request request) { final QueryOptions options = request.getOptions(); final QueryContext queryContext = request.getContext(); queryLogger.logIncomingRequestAtNode(queryContext, request); final DataInMemoryReporter dataInMemoryReporter = reporter.newDataInMemoryReporter(); final QuotaWatcher quotaWatcher = new QuotaWatcher( options.getDataLimit().orElse(dataLimit).asLong().orElse(Long.MAX_VALUE), options .getAggregationLimit() .orElse(aggregationLimit) .asLong() .orElse(Long.MAX_VALUE), dataInMemoryReporter); final OptionalLimit seriesLimit = options.getSeriesLimit().orElse(LocalMetricManager.this.seriesLimit); final boolean failOnLimits = options.getFailOnLimits().orElse(LocalMetricManager.this.failOnLimits); final OptionalLimit groupLimit = options.getGroupLimit().orElse(LocalMetricManager.this.groupLimit); // Transform that takes the result from ES metadata lookup to fetch from backend final LazyTransform<FindSeries, FullQuery> transform = new Transform(request, failOnLimits, seriesLimit, groupLimit, quotaWatcher, dataInMemoryReporter); return metadata .findSeries( new FindSeries.Request(request.getFilter(), request.getRange(), seriesLimit)) .onDone(reporter.reportFindSeries()) .lazyTransform(transform) .directTransform(fullQuery -> { queryLogger.logOutgoingResponseAtNode(queryContext, fullQuery); return fullQuery; }) .onDone(reporter.reportQueryMetrics()); } @Override public Statistics getStatistics() { Statistics result = Statistics.empty(); for (final Statistics s : map(MetricBackend::getStatistics)) { result = result.merge(s); } return result; } @Override public AsyncFuture<FetchData> fetch( final FetchData.Request request, final FetchQuotaWatcher watcher ) { final List<AsyncFuture<FetchData>> callbacks = map(b -> b.fetch(request, watcher)); return async.collect(callbacks, FetchData.collect(FETCH)); } @Override public AsyncFuture<FetchData.Result> fetch( final FetchData.Request request, final FetchQuotaWatcher watcher, final Consumer<MetricCollection> metricsConsumer ) { final List<AsyncFuture<FetchData.Result>> callbacks = map(b -> b.fetch(request, watcher, metricsConsumer)); return async.collect(callbacks, FetchData.collectResult(FETCH)); } @Override public AsyncFuture<WriteMetric> write(final WriteMetric.Request write) { return async.collect(map(b -> b.write(write)), WriteMetric.reduce()); } @Override public AsyncObservable<BackendKeySet> streamKeys( final BackendKeyFilter filter, final QueryOptions options ) { return AsyncObservable.chain(map(b -> b.streamKeys(filter, options))); } @Override public boolean isReady() { for (final MetricBackend backend : backends) { if (!backend.isReady()) { return false; } } return true; } @Override public Iterable<BackendEntry> listEntries() { throw new NotImplementedException("not supported"); } @Override public AsyncFuture<Void> configure() { return async.collectAndDiscard(map(MetricBackend::configure)); } @Override public AsyncFuture<List<String>> serializeKeyToHex(BackendKey key) { return async .collect(map(b -> b.serializeKeyToHex(key))) .directTransform(result -> ImmutableList.copyOf(Iterables.concat(result))); } @Override public AsyncFuture<List<BackendKey>> deserializeKeyFromHex(String key) { return async .collect(map(b -> b.deserializeKeyFromHex(key))) .directTransform(result -> ImmutableList.copyOf(Iterables.concat(result))); } @Override public AsyncFuture<Void> deleteKey(BackendKey key, QueryOptions options) { return async.collectAndDiscard(map(b -> b.deleteKey(key, options))); } @Override public AsyncFuture<Long> countKey(BackendKey key, QueryOptions options) { return async.collect(map(b -> b.countKey(key, options))).directTransform(result -> { long count = 0; for (final long c : result) { count += c; } return count; }); } @Override public AsyncFuture<MetricCollection> fetchRow(final BackendKey key) { final List<AsyncFuture<MetricCollection>> callbacks = map(b -> b.fetchRow(key)); return async.collect(callbacks, results -> { final List<List<? extends Metric>> collections = new ArrayList<>(); for (final MetricCollection result : results) { collections.add(result.getData()); } return MetricCollection.mergeSorted(key.getType(), collections); }); } @Override public AsyncObservable<MetricCollection> streamRow(final BackendKey key) { return AsyncObservable.chain(map(b -> b.streamRow(key))); } private void accept(final Consumer<MetricBackend> op) { backends.stream().forEach(op::accept); } private <T> List<T> map(final Function<MetricBackend, T> op) { return ImmutableList.copyOf(backends.stream().map(op).iterator()); } } @RequiredArgsConstructor private abstract static class ResultCollector implements StreamCollector<FetchData.Result, FullQuery> { private static final String ROWS_ACCESSED = "rowsAccessed"; final ConcurrentLinkedQueue<Throwable> errors = new ConcurrentLinkedQueue<>(); final ConcurrentLinkedQueue<RequestError> requestErrors = new ConcurrentLinkedQueue<>(); final QuotaWatcher watcher; final DataInMemoryReporter dataInMemoryReporter; final AggregationInstance aggregation; final AggregationSession session; final ResultLimits limits; final OptionalLimit groupLimit; final boolean failOnLimits; private final ConcurrentHashMultiset<Long> rowDensityData = ConcurrentHashMultiset.create(); @Override public void resolved(final FetchData.Result result) throws Exception { requestErrors.addAll(result.getErrors()); } void acceptMetricsCollection(final Series series, final MetricCollection g) { g.updateAggregation(session, series.getTags(), ImmutableSet.of(series)); dataInMemoryReporter.reportDataNoLongerNeeded(g.size()); g.getAverageDistanceBetweenMetrics().ifPresent(msBetweenSamples -> { final double metricsPerSecond = 1000.0 / msBetweenSamples; dataInMemoryReporter.reportRowDensity(metricsPerSecond); final long metricsPerMegaSecond = (long) (metricsPerSecond * 1_000_000); rowDensityData.add(metricsPerMegaSecond); }); } @Override public void failed(final Throwable cause) throws Exception { errors.add(cause); } @Override public void cancelled() throws Exception { } public abstract QueryTrace buildTrace(); @Override public FullQuery end(int resolved, int failed, int cancelled) throws Exception { final QueryTrace trace = buildTrace(); final ImmutableList.Builder<RequestError> errorsBuilder = ImmutableList.builder(); errorsBuilder.addAll(requestErrors); // Signal that we're done processing this dataInMemoryReporter.reportOperationEnded(); final ImmutableSet.Builder<ResultLimit> limitsBuilder = ImmutableSet.<ResultLimit>builder().addAll(this.limits.getLimits()); if (watcher.isRetainQuotaViolated()) { limitsBuilder.add(ResultLimit.AGGREGATION); } if (watcher.isReadQuotaViolated()) { limitsBuilder.add(ResultLimit.QUOTA); } if (watcher.isReadQuotaViolated() || watcher.isRetainQuotaViolated()) { final Optional<Histogram> dataDensity = Optional.of(getRowDensityHistogram()); errorsBuilder.add(QueryError.fromMessage( checkIssues(failed, cancelled).orElse("Query exceeded quota"))); return new FullQuery(trace, errorsBuilder.build(), ImmutableList.of(), Statistics.empty(), new ResultLimits(limitsBuilder.build()), dataDensity); } checkIssues(failed, cancelled).map(RuntimeException::new).ifPresent(e -> { for (final Throwable t : errors) { e.addSuppressed(t); } throw e; }); final AggregationResult result = session.result(); final Optional<Histogram> dataDensity = Optional.of(getRowDensityHistogram()); final Statistics baseStatistics = Statistics.of(ROWS_ACCESSED, watcher.getRowsAccessed()); final List<ResultGroup> groups = new ArrayList<>(); for (final AggregationOutput group : result.getResult()) { if (groupLimit.isGreaterOrEqual(groups.size())) { if (failOnLimits) { errorsBuilder.add(QueryError.fromMessage( "The number of result groups is more than the allowed limit of " + groupLimit)); return new FullQuery(trace, errorsBuilder.build(), ImmutableList.of(), baseStatistics, new ResultLimits(limitsBuilder.add(ResultLimit.GROUP).build()), dataDensity); } limitsBuilder.add(ResultLimit.GROUP); break; } groups.add(new ResultGroup(group.getKey(), group.getSeries(), group.getMetrics(), aggregation.cadence())); } return new FullQuery(trace, errorsBuilder.build(), groups, baseStatistics.merge(result.getStatistics()), new ResultLimits(limitsBuilder.build()), dataDensity); } private Optional<String> checkIssues(final int failed, final int cancelled) { if (failed > 0 || cancelled > 0) { return Optional.of( "Some fetches failed (" + failed + ") or were cancelled (" + cancelled + ")"); } return Optional.empty(); } public Histogram getRowDensityHistogram() { /* The data is gathered in an efficient ConcurrentHashMultiset, to allow for multiple * threads writing with minimum blocking. Reading the data only happens at the end of * the watched operation, so here we build the histogram. */ final Histogram.Builder builder = Histogram.builder(); for (final Long value : rowDensityData.elementSet()) { builder.add(value); } return builder.build(); } } @RequiredArgsConstructor private static class QuotaWatcher implements FetchQuotaWatcher, RetainQuotaWatcher { private final long dataLimit; private final long retainLimit; private final DataInMemoryReporter dataInMemoryReporter; private final AtomicLong read = new AtomicLong(); private final AtomicLong retained = new AtomicLong(); private final LongAdder rowsAccessed = new LongAdder(); @Override public void readData(long n) { read.addAndGet(n); throwIfViolated(); // Must be called after checkViolation above, since that one might throw an exception. dataInMemoryReporter.reportDataHasBeenRead(n); } @Override public void retainData(final long n) { retained.addAndGet(n); throwIfViolated(); } @Override public boolean mayReadData() { return !isReadQuotaViolated() && !isRetainQuotaViolated(); } @Override public boolean mayRetainMoreData() { return mayReadData(); } @Override public int getReadDataQuota() { return getLeft(dataLimit, read.get()); } @Override public void accessedRows(final long n) { dataInMemoryReporter.reportRowsAccessed(n); rowsAccessed.add(n); } public long getRowsAccessed() { return rowsAccessed.longValue(); } @Override public int getRetainQuota() { return getLeft(retainLimit, retained.get()); } private void throwIfViolated() { if (isReadQuotaViolated() || isRetainQuotaViolated()) { throw new QuotaViolationException(); } } boolean isReadQuotaViolated() { return read.get() >= dataLimit; } boolean isRetainQuotaViolated() { return retained.get() >= retainLimit; } private static int getLeft(long limit, long current) { final long left = limit - current; if (left < 0) { return 0; } if (left > Integer.MAX_VALUE) { throw new IllegalStateException("quota too large"); } return (int) left; } } }
package org.apache.maven.model.interpolation; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.model.InputLocation; import org.apache.maven.model.InputSource; import org.apache.maven.model.Model; import org.apache.maven.model.building.DefaultModelBuildingRequest; import org.apache.maven.model.building.ModelBuildingRequest; import org.apache.maven.model.building.SimpleProblemCollector; import org.junit.Test; import java.io.File; import java.lang.reflect.Field; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * StringSearchModelInterpolatorTest - not in use * * @author jdcasey * @author Benjamin Bentmann * @deprecated replaced by StringVisitorModelInterpolator (MNG-6697) */ public class StringSearchModelInterpolatorTest extends AbstractModelInterpolatorTest { @Override public void setUp() { super.setUp(); interpolator = new StringSearchModelInterpolator(); } protected ModelInterpolator createInterpolator() { return this.interpolator; } @Test public void testInterpolateStringArray() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); String[] values = { "${key}", "${key2}" }; StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest(p); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( values, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "value", values[0] ); assertEquals( "value2", values[1] ); } private ModelBuildingRequest createModelBuildingRequest( Properties p ) { ModelBuildingRequest config = new DefaultModelBuildingRequest(); config.setSystemProperties( p ); return config; } @Test public void testInterpolateObjectWithStringArrayField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); String[] values = { "${key}", "${key2}" }; ObjectWithStringArrayField obj = new ObjectWithStringArrayField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "value", obj.values[0] ); assertEquals( "value2", obj.values[1] ); } @Test public void testInterpolateObjectWithStringListField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); List<String> values = new ArrayList<>(); values.add( "${key}" ); values.add( "${key2}" ); ObjectWithListField obj = new ObjectWithListField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "value", obj.values.get( 0 ) ); assertEquals( "value2", obj.values.get( 1 ) ); } @Test public void testInterpolateObjectWithStringListFieldAndOneLiteralValue() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); List<String> values = new ArrayList<>(); values.add( "key" ); values.add( "${key2}" ); ObjectWithListField obj = new ObjectWithListField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "key", obj.values.get( 0 ) ); assertEquals( "value2", obj.values.get( 1 ) ); } @Test public void testInterpolateObjectWithUnmodifiableStringListField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); List<String> values = Collections.unmodifiableList( Collections.singletonList( "${key}" ) ); ObjectWithListField obj = new ObjectWithListField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "${key}", obj.values.get( 0 ) ); } @Test public void testInterpolateObjectWithStringArrayListField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); p.setProperty( "key3", "value3" ); p.setProperty( "key4", "value4" ); List<String[]> values = new ArrayList<>(); values.add( new String[] { "${key}", "${key2}" } ); values.add( new String[] { "${key3}", "${key4}" } ); ObjectWithListField obj = new ObjectWithListField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "value", ( (String[]) obj.values.get( 0 ) )[0] ); assertEquals( "value2", ( (String[]) obj.values.get( 0 ) )[1] ); assertEquals( "value3", ( (String[]) obj.values.get( 1 ) )[0] ); assertEquals( "value4", ( (String[]) obj.values.get( 1 ) )[1] ); } @Test public void testInterpolateObjectWithStringToStringMapField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); Map<String, String> values = new HashMap<>(); values.put( "key", "${key}" ); values.put( "key2", "${key2}" ); ObjectWithMapField obj = new ObjectWithMapField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "value", obj.values.get( "key" ) ); assertEquals( "value2", obj.values.get( "key2" ) ); } @Test public void testInterpolateObjectWithStringToStringMapFieldAndOneLiteralValue() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); Map<String, String> values = new HashMap<>(); values.put( "key", "val" ); values.put( "key2", "${key2}" ); ObjectWithMapField obj = new ObjectWithMapField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "val", obj.values.get( "key" ) ); assertEquals( "value2", obj.values.get( "key2" ) ); } @Test public void testInterpolateObjectWithUnmodifiableStringToStringMapField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); Map<String, String> values = Collections.unmodifiableMap( Collections.singletonMap( "key", "${key}" ) ); ObjectWithMapField obj = new ObjectWithMapField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "${key}", obj.values.get( "key" ) ); } @Test public void testInterpolateObjectWithStringToStringArrayMapField() { Model model = new Model(); Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); p.setProperty( "key3", "value3" ); p.setProperty( "key4", "value4" ); Map<String, String[]> values = new HashMap<>(); values.put( "key", new String[] { "${key}", "${key2}" } ); values.put( "key2", new String[] { "${key3}", "${key4}" } ); ObjectWithMapField obj = new ObjectWithMapField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertEquals( "value", ( (String[]) obj.values.get( "key" ) )[0] ); assertEquals( "value2", ( (String[]) obj.values.get( "key" ) )[1] ); assertEquals( "value3", ( (String[]) obj.values.get( "key2" ) )[0] ); assertEquals( "value4", ( (String[]) obj.values.get( "key2" ) )[1] ); } @Test public void testInterpolateObjectWithPomFile() throws Exception { Model model = new Model(); model.setPomFile( new File( System.getProperty( "user.dir" ), "pom.xml" ) ); File baseDir = model.getProjectDirectory(); Properties p = new Properties(); Map<String, String> values = new HashMap<>(); values.put( "key", "${project.basedir}" + File.separator + "target" ); ObjectWithMapField obj = new ObjectWithMapField( values ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); assertThat( baseDir.getAbsolutePath(), is( System.getProperty( "user.dir" ) ) ); assertThat( obj.values.size(), is( 1 ) ); assertThat( (String) obj.values.get( "key" ), is( anyOf( is( System.getProperty( "user.dir" ) + File.separator + "target" ), // TODO why MVN adds dot /./ in paths??? is( System.getProperty( "user.dir" ) + File.separator + '.' + File.separator + "target" ) ) ) ); } @Test public void testNotInterpolateObjectWithFile() throws Exception { Model model = new Model(); File baseDir = new File( System.getProperty( "user.dir" ) ); Properties p = new Properties(); ObjectWithNotInterpolatedFile obj = new ObjectWithNotInterpolatedFile( baseDir ); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); assertProblemFree( collector ); Map<Class<?>, ?> cache = getCachedEntries(); Object objCacheItem = cache.get( Object.class ); Object fileCacheItem = cache.get( File.class ); assertNotNull( objCacheItem ); assertNotNull( fileCacheItem ); assertThat( readFieldsArray( objCacheItem ).length, is( 0 ) ); assertThat( readFieldsArray( fileCacheItem ).length, is( 0 ) ); } private static Object[] readFieldsArray( Object o ) throws NoSuchFieldException, IllegalAccessException { assertNotNull( o ); Field field = o.getClass().getDeclaredField( "fields" ); field.setAccessible( true ); return (Object[]) field.get( o ); } private static Map<Class<?>, ?> getCachedEntries() throws NoSuchFieldException, IllegalAccessException { Field field = StringSearchModelInterpolator.class.getDeclaredField( "CACHED_ENTRIES" ); field.setAccessible( true ); //noinspection unchecked return (Map<Class<?>, ?>) field.get( null ); } @Test public void testNotInterpolateFile() throws Exception { Model model = new Model(); File baseDir = new File( System.getProperty( "user.dir" ) ); Properties p = new Properties(); StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); ModelBuildingRequest config = createModelBuildingRequest( p ); SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( baseDir, model, new File( "." ), config, collector ); assertProblemFree( collector ); Map<Class<?>, ?> cache = getCachedEntries(); Object fileCacheItem = cache.get( File.class ); assertNotNull( fileCacheItem ); assertThat( readFieldsArray( fileCacheItem ).length, is( 0 ) ); } @Test public void testConcurrentInterpolation() throws Exception { final Model model = new Model(); final Properties p = new Properties(); p.setProperty( "key", "value" ); p.setProperty( "key2", "value2" ); p.setProperty( "key3", "value3" ); p.setProperty( "key4", "value4" ); p.setProperty( "key5", "value5" ); final StringSearchModelInterpolator interpolator = (StringSearchModelInterpolator) createInterpolator(); int numItems = 100; final CountDownLatch countDownLatch = new CountDownLatch(1); List<Future<SimpleProblemCollector>> futures = new ArrayList<>(); for ( int i = 0; i < numItems; i++ ) { Callable<SimpleProblemCollector> future = () -> { final ObjectWithMixedProtection obj = getValueList(); final ModelBuildingRequest config = createModelBuildingRequest( p ); countDownLatch.await(); final SimpleProblemCollector collector = new SimpleProblemCollector(); interpolator.interpolateObject( obj, model, new File( "." ), config, collector ); return collector; }; FutureTask<SimpleProblemCollector> task = new FutureTask<>( future ); futures.add( task ); new Thread( task ).start(); } countDownLatch.countDown(); // Start all the threads for ( Future<SimpleProblemCollector> result : futures ) { SimpleProblemCollector problemCollector = result.get(); // ArrayIndexOutOfBoundsException are typical indication of threading issues assertProblemFree( problemCollector ); } } private ObjectWithMixedProtection getValueList() { List<String[]> values = new ArrayList<>(); values.add( new String[] { "${key}", "${key2}" } ); values.add( new String[] { "${key3}", "${key4}" } ); List<String> values2 = new ArrayList<>(); values.add( new String[] { "${key}", "${key2}" } ); values.add( new String[] { "${key3}", "${key4}" } ); List<String> values3 = new ArrayList<>(); values.add( new String[] { "${key}", "${key2}" } ); values.add( new String[] { "${key3}", "${key4}" } ); return new ObjectWithMixedProtection( values, values2, values3, "${key5}" ); } private static final class ObjectWithStringArrayField { private final String[] values; public ObjectWithStringArrayField( String[] values ) { this.values = values; } } private static final class ObjectWithListField { private final List<?> values; public ObjectWithListField( List<?> values ) { this.values = values; } } private static final class ObjectWithMapField { private final Map<?, ?> values; public ObjectWithMapField( Map<?, ?> values ) { this.values = values; } } private static final class ObjectWithNotInterpolatedFile { private final File f; ObjectWithNotInterpolatedFile( File f ) { this.f = f; } } @SuppressWarnings( "unused" ) private static final class ObjectWithMixedProtection { private List<?> values1; protected List<?> values2; List<?> values3; private String fooBar; private ObjectWithMixedProtection( List<?> values1, List<?> values2, List<?> values3 ) { this.values1 = values1; this.values2 = values2; this.values3 = values3; } private ObjectWithMixedProtection( List<?> values1, List<?> values2, List<?> values3, String fooBar ) { this.values1 = values1; this.values2 = values2; this.values3 = values3; this.fooBar = fooBar; } public String getFooBar() { return fooBar; } } @Test public void testFinalFieldsExcludedFromInterpolation() { Properties props = new Properties(); props.setProperty( "expression", "value" ); DefaultModelBuildingRequest request = new DefaultModelBuildingRequest(); request.setUserProperties( props ); SimpleProblemCollector problems = new SimpleProblemCollector(); StringSearchModelInterpolator interpolator = new StringSearchModelInterpolator(); interpolator.interpolateObject( new ClassWithFinalField(), new Model(), null, request, problems ); assertProblemFree( problems ); } static class ClassWithFinalField { public static final String CONSTANT = "${expression}"; } @Test public void locationTrackerShouldBeExcludedFromInterpolation() { Properties props = new Properties(); props.setProperty( "expression", "value" ); DefaultModelBuildingRequest request = new DefaultModelBuildingRequest(); request.setUserProperties( props ); InputSource source = new InputSource(); source.setLocation( "${expression}" ); source.setModelId( "${expression}" ); Model model = new Model(); model.setLocation( "", new InputLocation( 1, 1, source ) ); SimpleProblemCollector problems = new SimpleProblemCollector(); StringSearchModelInterpolator interpolator = new StringSearchModelInterpolator(); interpolator.interpolateObject( model, model, null, request, problems ); assertProblemFree( problems ); assertEquals( "${expression}", source.getLocation() ); assertEquals( "${expression}", source.getModelId() ); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.services.resources.admin; import org.jboss.resteasy.annotations.cache.NoCache; import org.jboss.resteasy.spi.NotFoundException; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.keycloak.common.constants.KerberosConstants; import org.keycloak.events.admin.OperationType; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.UserFederationProvider; import org.keycloak.models.UserFederationProviderFactory; import org.keycloak.models.UserFederationProviderModel; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.ModelToRepresentation; import org.keycloak.provider.ConfiguredProvider; import org.keycloak.provider.ProviderConfigProperty; import org.keycloak.provider.ProviderFactory; import org.keycloak.representations.idm.ConfigPropertyRepresentation; import org.keycloak.representations.idm.CredentialRepresentation; import org.keycloak.representations.idm.UserFederationProviderFactoryRepresentation; import org.keycloak.representations.idm.UserFederationProviderRepresentation; import org.keycloak.services.ServicesLogger; import org.keycloak.services.managers.UsersSyncManager; import org.keycloak.timer.TimerProvider; import org.keycloak.utils.CredentialHelper; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; /** * Base resource for managing users * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class UserFederationProvidersResource { protected static final ServicesLogger logger = ServicesLogger.ROOT_LOGGER; protected RealmModel realm; protected RealmAuth auth; protected AdminEventBuilder adminEvent; @Context protected UriInfo uriInfo; @Context protected KeycloakSession session; public UserFederationProvidersResource(RealmModel realm, RealmAuth auth, AdminEventBuilder adminEvent) { this.auth = auth; this.realm = realm; this.adminEvent = adminEvent; auth.init(RealmAuth.Resource.USER); } /** * Automatically add "kerberos" to required realm credentials if it's supported by saved provider * * @param realm * @param model * @return true if kerberos credentials were added */ public static boolean checkKerberosCredential(KeycloakSession session, RealmModel realm, UserFederationProviderModel model) { String allowKerberosCfg = model.getConfig().get(KerberosConstants.ALLOW_KERBEROS_AUTHENTICATION); if (Boolean.valueOf(allowKerberosCfg)) { CredentialHelper.setAlternativeCredential(session, CredentialRepresentation.KERBEROS, realm); return true; } return false; } /** * Get available provider factories * * Returns a list of available provider factories. * * @return */ @GET @NoCache @Path("providers") @Produces(MediaType.APPLICATION_JSON) public List<UserFederationProviderFactoryRepresentation> getProviders() { auth.requireView(); List<UserFederationProviderFactoryRepresentation> providers = new LinkedList<UserFederationProviderFactoryRepresentation>(); for (ProviderFactory factory : session.getKeycloakSessionFactory().getProviderFactories(UserFederationProvider.class)) { UserFederationProviderFactoryRepresentation rep = new UserFederationProviderFactoryRepresentation(); rep.setId(factory.getId()); rep.setOptions(((UserFederationProviderFactory)factory).getConfigurationOptions()); providers.add(rep); } return providers; } /** * Get factory with given id * * @return */ @GET @NoCache @Path("providers/{id}") @Produces(MediaType.APPLICATION_JSON) public UserFederationProviderFactoryRepresentation getProvider(@PathParam("id") String id) { auth.requireView(); for (ProviderFactory factory : session.getKeycloakSessionFactory().getProviderFactories(UserFederationProvider.class)) { if (!factory.getId().equals(id)) { continue; } if (factory instanceof ConfiguredProvider) { UserFederationProviderFactoryDescription rep = new UserFederationProviderFactoryDescription(); rep.setId(factory.getId()); ConfiguredProvider cp = (ConfiguredProvider) factory; rep.setHelpText(cp.getHelpText()); rep.setProperties(toConfigPropertyRepresentationList(cp.getConfigProperties())); return rep; } UserFederationProviderFactoryRepresentation rep = new UserFederationProviderFactoryRepresentation(); rep.setId(factory.getId()); rep.setOptions(((UserFederationProviderFactory) factory).getConfigurationOptions()); return rep; } throw new NotFoundException("Could not find provider"); } /** * Create a provider * * @param rep * @return */ @POST @Path("instances") @Consumes(MediaType.APPLICATION_JSON) public Response createProviderInstance(UserFederationProviderRepresentation rep) { auth.requireManage(); String displayName = rep.getDisplayName(); if (displayName != null && displayName.trim().equals("")) { displayName = null; } UserFederationProviderModel model = realm.addUserFederationProvider(rep.getProviderName(), rep.getConfig(), rep.getPriority(), displayName, rep.getFullSyncPeriod(), rep.getChangedSyncPeriod(), rep.getLastSync()); new UsersSyncManager().refreshPeriodicSyncForProvider(session.getKeycloakSessionFactory(), session.getProvider(TimerProvider.class), model, realm.getId()); boolean kerberosCredsAdded = checkKerberosCredential(session, realm, model); if (kerberosCredsAdded) { logger.addedKerberosToRealmCredentials(); } adminEvent.operation(OperationType.CREATE).resourcePath(uriInfo).representation(rep).success(); return Response.created(uriInfo.getAbsolutePathBuilder().path(model.getId()).build()).build(); } /** * Get configured providers * * @return */ @GET @Path("instances") @Produces(MediaType.APPLICATION_JSON) @NoCache public List<UserFederationProviderRepresentation> getUserFederationInstances() { auth.requireManage(); List<UserFederationProviderRepresentation> reps = new LinkedList<UserFederationProviderRepresentation>(); for (UserFederationProviderModel model : realm.getUserFederationProviders()) { UserFederationProviderRepresentation rep = ModelToRepresentation.toRepresentation(model); reps.add(rep); } return reps; } @Path("instances/{id}") public UserFederationProviderResource getUserFederationInstance(@PathParam("id") String id) { this.auth.requireView(); UserFederationProviderModel model = KeycloakModelUtils.findUserFederationProviderById(id, realm); if (model == null) { throw new NotFoundException("Could not find federation provider"); } UserFederationProviderResource instanceResource = new UserFederationProviderResource(session, realm, this.auth, model, adminEvent); ResteasyProviderFactory.getInstance().injectProperties(instanceResource); return instanceResource; } private ConfigPropertyRepresentation toConfigPropertyRepresentation(ProviderConfigProperty prop) { ConfigPropertyRepresentation propRep = new ConfigPropertyRepresentation(); propRep.setName(prop.getName()); propRep.setLabel(prop.getLabel()); propRep.setType(prop.getType()); propRep.setDefaultValue(prop.getDefaultValue()); propRep.setHelpText(prop.getHelpText()); return propRep; } private List<ConfigPropertyRepresentation> toConfigPropertyRepresentationList(List<ProviderConfigProperty> props) { List<ConfigPropertyRepresentation> reps = new ArrayList<>(props.size()); for(ProviderConfigProperty prop : props){ reps.add(toConfigPropertyRepresentation(prop)); } return reps; } public static class UserFederationProviderFactoryDescription extends UserFederationProviderFactoryRepresentation { protected String name; protected String helpText; protected List<ConfigPropertyRepresentation> properties; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getHelpText() { return helpText; } public void setHelpText(String helpText) { this.helpText = helpText; } public List<ConfigPropertyRepresentation> getProperties() { return properties; } public void setProperties(List<ConfigPropertyRepresentation> properties) { this.properties = properties; } } }
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSDocInfoBuilder; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; /** * Checks for non side effecting statements such as * <pre> * var s = "this string is " * "continued on the next line but you forgot the +"; * x == foo(); // should that be '='? * foo();; // probably just a stray-semicolon. Doesn't hurt to check though * </pre> * and generates warnings. * */ final class CheckSideEffects extends AbstractPostOrderCallback implements HotSwapCompilerPass { static final DiagnosticType USELESS_CODE_ERROR = DiagnosticType.warning( "JSC_USELESS_CODE", "Suspicious code. {0}"); static final String PROTECTOR_FN = "JSCOMPILER_PRESERVE"; private final CheckLevel level; private final List<Node> problemNodes = new ArrayList<>(); private final LinkedHashMap<String, String> noSideEffectExterns = new LinkedHashMap<>(); private final AbstractCompiler compiler; private final boolean protectSideEffectFreeCode; CheckSideEffects(AbstractCompiler compiler, CheckLevel level, boolean protectSideEffectFreeCode) { this.compiler = compiler; this.level = level; this.protectSideEffectFreeCode = protectSideEffectFreeCode; } @Override public void process(Node externs, Node root) { NodeTraversal.traverseEs6(compiler, externs, new GetNoSideEffectExterns()); NodeTraversal.traverseEs6(compiler, root, this); // Code with hidden side-effect code is common, for example // accessing "el.offsetWidth" forces a reflow in browsers, to allow this // will still allowing local dead code removal in general, // protect the "side-effect free" code in the source. // // This also includes function calls such as with document.createElement if (protectSideEffectFreeCode) { protectSideEffects(); } } @Override public void hotSwapScript(Node scriptRoot, Node originalRoot) { NodeTraversal.traverseEs6(compiler, scriptRoot, this); } @Override public void visit(NodeTraversal t, Node n, Node parent) { // VOID nodes appear when there are extra semicolons at the BLOCK level. // I've been unable to think of any cases where this indicates a bug, // and apparently some people like keeping these semicolons around, // so we'll allow it. if (n.isEmpty() || n.isComma()) { return; } if (parent == null) { return; } // Do not try to remove a block or an expr result. We already handle // these cases when we visit the child, and the peephole passes will // fix up the tree in more clever ways when these are removed. if (n.isExprResult() || n.isBlock()) { return; } // This no-op statement was there so that JSDoc information could // be attached to the name. This check should not complain about it. if (n.isQualifiedName() && n.getJSDocInfo() != null) { return; } boolean isResultUsed = NodeUtil.isExpressionResultUsed(n); boolean isSimpleOp = NodeUtil.isSimpleOperator(n); if (!isResultUsed) { if (isSimpleOp || !NodeUtil.mayHaveSideEffects(n, t.getCompiler())) { String msg = "This code lacks side-effects. Is there a bug?"; if (n.isString() || n.isTemplateLit()) { msg = "Is there a missing '+' on the previous line?"; } else if (isSimpleOp) { msg = "The result of the '" + Token.name(n.getType()).toLowerCase() + "' operator is not being used."; } t.getCompiler().report( t.makeError(n, level, USELESS_CODE_ERROR, msg)); // TODO(johnlenz): determine if it is necessary to // try to protect side-effect free statements as well. if (!NodeUtil.isStatement(n)) { problemNodes.add(n); } } else if (n.isCall() && (n.getFirstChild().isGetProp() || n.getFirstChild().isName() || n.getFirstChild().isString())) { String qname = n.getFirstChild().getQualifiedName(); // The name should not be defined in src scopes - only externs boolean isDefinedInSrc = false; if (qname != null) { if (n.getFirstChild().isGetProp()) { Node rootNameNode = NodeUtil.getRootOfQualifiedName(n.getFirstChild()); isDefinedInSrc = rootNameNode != null && rootNameNode.isName() && t.getScope().getVar(rootNameNode.getString()) != null; } else { isDefinedInSrc = t.getScope().getVar(qname) != null; } } if (qname != null && noSideEffectExterns.containsKey(qname) && !isDefinedInSrc) { problemNodes.add(n); String msg = "The result of the extern function call '" + qname + "' is not being used."; t.getCompiler().report( t.makeError(n, level, USELESS_CODE_ERROR, msg)); } } } } /** * Protect side-effect free nodes by making them parameters * to a extern function call. This call will be removed * after all the optimizations passes have run. */ private void protectSideEffects() { if (!problemNodes.isEmpty()) { addExtern(); for (Node n : problemNodes) { Node name = IR.name(PROTECTOR_FN).srcref(n); name.putBooleanProp(Node.IS_CONSTANT_NAME, true); Node replacement = IR.call(name).srcref(n); replacement.putBooleanProp(Node.FREE_CALL, true); n.getParent().replaceChild(n, replacement); replacement.addChildToBack(n); } compiler.reportCodeChange(); } } private void addExtern() { Node name = IR.name(PROTECTOR_FN); name.putBooleanProp(Node.IS_CONSTANT_NAME, true); Node var = IR.var(name); // Add "@noalias" so we can strip the method when AliasExternals is enabled. JSDocInfoBuilder builder = new JSDocInfoBuilder(false); builder.recordNoAlias(); var.setJSDocInfo(builder.build()); CompilerInput input = compiler.getSynthesizedExternsInput(); name.setStaticSourceFile(input.getSourceFile()); var.setStaticSourceFile(input.getSourceFile()); input.getAstRoot(compiler).addChildrenToBack(var); compiler.reportCodeChange(); } /** * Remove side-effect sync functions. */ static class StripProtection extends AbstractPostOrderCallback implements CompilerPass { private final AbstractCompiler compiler; StripProtection(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { NodeTraversal.traverseEs6(compiler, root, this); } @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isCall()) { Node target = n.getFirstChild(); // TODO(johnlenz): add this to the coding convention // so we can remove goog.reflect.sinkValue as well. if (target.isName() && target.getString().equals(PROTECTOR_FN)) { Node expr = n.getLastChild(); n.detachChildren(); parent.replaceChild(n, expr); } } } } /** * Get fully qualified function names which are marked * with @nosideeffects * * TODO(ChadKillingsworth) Add support for object literals */ private class GetNoSideEffectExterns extends AbstractPostOrderCallback { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isFunction()) { String name = NodeUtil.getFunctionName(n); JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(n); if (jsDoc != null && jsDoc.isNoSideEffects()) { noSideEffectExterns.put(name, null); } } } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.util; import java.text.DateFormat; import java.text.DateFormatSymbols; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.RowMetaInterface; /** * A collection of utilities to manipulate strings. * * @author wdeclerc */ public class StringUtil { public static final String UNIX_OPEN = "${"; public static final String UNIX_CLOSE = "}"; public static final String WINDOWS_OPEN = "%%"; public static final String WINDOWS_CLOSE = "%%"; public static final String HEX_OPEN = "$["; public static final String HEX_CLOSE = "]"; public static final String FIELD_OPEN = "?{"; public static final String FIELD_CLOSE = "}"; public static final String CRLF = "\r\n"; public static final String INDENTCHARS = " "; public static final String EMPTY_STRING = ""; public static final String[] SYSTEM_PROPERTIES = new String[] { "java.version", "java.vendor", "java.vendor.url", "java.home", "java.vm.specification.version", "java.vm.specification.vendor", "java.vm.specification.name", "java.vm.version", "java.vm.vendor", "java.vm.name", "java.specification.version", "java.specification.vendor", "java.specification.name", "java.class.version", "java.class.path", "java.library.path", "java.io.tmpdir", "java.compiler", "java.ext.dirs", "os.name", "os.arch", "os.version", "file.separator", "path.separator", "line.separator", "user.name", "user.home", "user.dir", "user.country", "user.language", "user.timezone", "org.apache.commons.logging.Log", "org.apache.commons.logging.simplelog.log.org.apache.http", "org.apache.commons.logging.simplelog.showdatetime", "org.eclipse.swt.browser.XULRunnerInitialized", "org.eclipse.swt.browser.XULRunnerPath", "sun.arch.data.model", "sun.boot.class.path", "sun.boot.library.path", "sun.cpu.endian", "sun.cpu.isalist", "sun.io.unicode.encoding", "sun.java.launcher", "sun.jnu.encoding", "sun.management.compiler", "sun.os.patch.level", }; /** * Substitutes variables in <code>aString</code>. Variable names are delimited by open and close strings. The values * are retrieved from the given map. * * @param aString * the string on which to apply the substitution. * @param variablesValues * a map containing the variable values. The keys are the variable names, the values are the variable values. * @param open * the open delimiter for variables. * @param close * the close delimiter for variables. * @return the string with the substitution applied. */ public static String substitute( String aString, Map<String, String> variablesValues, String open, String close ) { return substitute( aString, variablesValues, open, close, 0 ); } /** * Substitutes variables in <code>aString</code>. Variable names are delimited by open and close strings. The values * are retrieved from the given map. * * @param aString * the string on which to apply the substitution. * @param variablesValues * a map containg the variable values. The keys are the variable names, the values are the variable values. * @param open * the open delimiter for variables. * @param close * the close delimiter for variables. * @param recursion * the number of recursion (internal counter to avoid endless loops) * @return the string with the substitution applied. */ public static String substitute( String aString, Map<String, String> variablesValues, String open, String close, int recursion ) { if ( aString == null ) { return null; } StringBuilder buffer = new StringBuilder(); String rest = aString; // search for opening string int i = rest.indexOf( open ); while ( i > -1 ) { int j = rest.indexOf( close, i + open.length() ); // search for closing string if ( j > -1 ) { String varName = rest.substring( i + open.length(), j ); Object value = variablesValues.get( varName ); if ( value == null ) { value = open + varName + close; } else { // check for another variable inside this value int another = ( (String) value ).indexOf( open ); // check // here // first for // speed if ( another > -1 ) { // for safety: avoid recursive if ( recursion > 50 ) { // endless loops with stack overflow throw new RuntimeException( "Endless loop detected for substitution of variable: " + (String) value ); } value = substitute( (String) value, variablesValues, open, close, ++recursion ); } } buffer.append( rest.substring( 0, i ) ); buffer.append( value ); rest = rest.substring( j + close.length() ); } else { // no closing tag found; end the search buffer.append( rest ); rest = ""; } // keep searching i = rest.indexOf( open ); } buffer.append( rest ); return buffer.toString(); } /** * Substitutes hex values in <code>aString</code> and convert them to operating system char equivalents in the return * string. Format is $[01] or $[6F,FF,00,1F] Example: * "This is a hex encoded six digits number 123456 in this string: $[31,32,33,34,35,36]" * * @param aString * the string on which to apply the substitution. * @return the string with the substitution applied. */ public static String substituteHex( String aString ) { if ( aString == null ) { return null; } StringBuilder buffer = new StringBuilder(); String rest = aString; // search for opening string int i = rest.indexOf( HEX_OPEN ); while ( i > -1 ) { int j = rest.indexOf( HEX_CLOSE, i + HEX_OPEN.length() ); // search for closing string if ( j > -1 ) { buffer.append( rest.substring( 0, i ) ); String hexString = rest.substring( i + HEX_OPEN.length(), j ); String[] hexStringArray = hexString.split( "," ); int hexInt; byte[] hexByte = new byte[1]; for ( int pos = 0; pos < hexStringArray.length; pos++ ) { try { hexInt = Integer.parseInt( hexStringArray[pos], 16 ); } catch ( NumberFormatException e ) { hexInt = 0; // in case we get an invalid hex value, ignore: we can not log here } hexByte[0] = (byte) hexInt; buffer.append( new String( hexByte ) ); } rest = rest.substring( j + HEX_CLOSE.length() ); } else { // no closing tag found; end the search buffer.append( rest ); rest = ""; } // keep searching i = rest.indexOf( HEX_OPEN ); } buffer.append( rest ); return buffer.toString(); } /** * Substitutes variables in <code>aString</code> with the environment values in the system properties * * @param aString * the string on which to apply the substitution. * @param systemProperties * the system properties to use * @return the string with the substitution applied. */ public static final synchronized String environmentSubstitute( String aString, Map<String, String> systemProperties ) { Map<String, String> sysMap = new HashMap<String, String>(); synchronized ( sysMap ) { sysMap.putAll( Collections.synchronizedMap( systemProperties ) ); aString = substituteWindows( aString, sysMap ); aString = substituteUnix( aString, sysMap ); aString = substituteHex( aString ); return aString; } } /** * Substitutes variables in <code>aString</code>. Variables are of the form "${<variable name>}", following the Unix * scripting convention. The values are retrieved from the given map. * * @param aString * the string on which to apply the substitution. * @param variables * a map containg the variable values. The keys are the variable names, the values are the variable values. * @return the string with the substitution applied. */ public static String substituteUnix( String aString, Map<String, String> variables ) { return substitute( aString, variables, UNIX_OPEN, UNIX_CLOSE ); } /** * Substitutes variables in <code>aString</code>. Variables are of the form "%%<variable name>%%", following the * Windows convention. The values are retrieved from the given map. * * @param aString * the string on which to apply the substitution. * @param variables * a map containg the variable values. The keys are the variable names, the values are the variable values. * @return the string with the substitution applied. */ public static String substituteWindows( String aString, Map<String, String> variables ) { return substitute( aString, variables, WINDOWS_OPEN, WINDOWS_CLOSE ); } /** * Substitutes field values in <code>aString</code>. Field values are of the form "?{<field name>}". The values are * retrieved from the specified row. Please note that the getString() method is used to convert to a String, for all * values in the row. * * @param aString * the string on which to apply the substitution. * @param rowMeta * The row metadata to use. * @param rowData * The row data to use * * @return the string with the substitution applied. * @throws KettleValueException * In case there is a String conversion error */ public static String substituteField( String aString, RowMetaInterface rowMeta, Object[] rowData ) throws KettleValueException { Map<String, String> variables = new HashMap<String, String>(); for ( int i = 0; i < rowMeta.size(); i++ ) { variables.put( rowMeta.getValueMeta( i ).getName(), rowMeta.getString( rowData, i ) ); } return substitute( aString, variables, FIELD_OPEN, FIELD_CLOSE ); } /** * Search the string and report back on the variables used * * @param aString * The string to search * @param open * the open or "start of variable" characters ${ or %% * @param close * the close or "end of variable" characters } or %% * @param list * the list of variables to add to * @param includeSystemVariables * also check for system variables. */ private static void getUsedVariables( String aString, String open, String close, List<String> list, boolean includeSystemVariables ) { if ( aString == null ) { return; } int p = 0; while ( p < aString.length() ) { // OK, we found something... : start of Unix variable if ( aString.substring( p ).startsWith( open ) ) { // See if it's closed... int from = p + open.length(); int to = aString.indexOf( close, from + 1 ); if ( to >= 0 ) { String variable = aString.substring( from, to ); if ( Const.indexOfString( variable, list ) < 0 ) { // Either we include the system variables (all) // Or the variable is not a system variable // Or it's a system variable but the value has not been set (and we offer the user the option to set it) // if ( includeSystemVariables || !isSystemVariable( variable ) || System.getProperty( variable ) == null ) { list.add( variable ); } } // OK, continue p = to + close.length(); } } p++; } } public static boolean isSystemVariable( String aString ) { return Const.indexOfString( aString, SYSTEM_PROPERTIES ) >= 0; } public static void getUsedVariables( String aString, List<String> list, boolean includeSystemVariables ) { getUsedVariables( aString, UNIX_OPEN, UNIX_CLOSE, list, includeSystemVariables ); getUsedVariables( aString, WINDOWS_OPEN, WINDOWS_CLOSE, list, includeSystemVariables ); } public static final String generateRandomString( int length, String prefix, String postfix, boolean uppercase ) { StringBuilder buffer = new StringBuilder(); if ( !Utils.isEmpty( prefix ) ) { buffer.append( prefix ); } for ( int i = 0; i < length; i++ ) { int c = 'a' + (int) ( Math.random() * 26 ); buffer.append( (char) c ); } if ( !Utils.isEmpty( postfix ) ) { buffer.append( postfix ); } if ( uppercase ) { return buffer.toString().toUpperCase(); } return buffer.toString(); } public static String initCap( String st ) { if ( st == null || st.trim().length() == 0 ) { return ""; } if ( st.substring( 0, 1 ).equals( st.substring( 0, 1 ).toUpperCase() ) ) { // Already initially capitalized. return st; } else { // Capitalize first character return st.substring( 0, 1 ).toUpperCase() + st.substring( 1 ); } } public static double str2num( String pattern, String decimal, String grouping, String currency, String value ) throws KettleValueException { // 0 : pattern // 1 : Decimal separator // 2 : Grouping separator // 3 : Currency symbol NumberFormat nf = NumberFormat.getInstance(); DecimalFormat df = (DecimalFormat) nf; DecimalFormatSymbols dfs = new DecimalFormatSymbols(); if ( !Utils.isEmpty( pattern ) ) { df.applyPattern( pattern ); } if ( !Utils.isEmpty( decimal ) ) { dfs.setDecimalSeparator( decimal.charAt( 0 ) ); } if ( !Utils.isEmpty( grouping ) ) { dfs.setGroupingSeparator( grouping.charAt( 0 ) ); } if ( !Utils.isEmpty( currency ) ) { dfs.setCurrencySymbol( currency ); } try { df.setDecimalFormatSymbols( dfs ); return df.parse( value ).doubleValue(); } catch ( Exception e ) { String message = "Couldn't convert string to number " + e.toString(); if ( !isEmpty( pattern ) ) { message += " pattern=" + pattern; } if ( !isEmpty( decimal ) ) { message += " decimal=" + decimal; } if ( !isEmpty( grouping ) ) { message += " grouping=" + grouping.charAt( 0 ); } if ( !isEmpty( currency ) ) { message += " currency=" + currency; } throw new KettleValueException( message ); } } /** * Check if the string supplied is empty. A String is empty when it is null or when the length is 0 * * @param string * The string to check * @return true if the string supplied is empty */ public static final boolean isEmpty( String string ) { return string == null || string.length() == 0; } /** * Check if the StringBuilder supplied is empty. A StringBuilder is empty when it is null or when the length is 0 * * @param string * The StringBuilder to check * @return true if the StringBuilder supplied is empty */ public static final boolean isEmpty( StringBuilder string ) { return string == null || string.length() == 0; } public static Date str2dat( String arg0, String arg1, String val ) throws KettleValueException { SimpleDateFormat df = new SimpleDateFormat(); DateFormatSymbols dfs = new DateFormatSymbols(); if ( arg1 != null ) { dfs.setLocalPatternChars( arg1 ); } if ( arg0 != null ) { df.applyPattern( arg0 ); } try { return df.parse( val ); } catch ( Exception e ) { throw new KettleValueException( "TO_DATE Couldn't convert String to Date " + e.toString() ); } } public static String getIndent( int indentLevel ) { return INDENTCHARS.substring( 0, indentLevel ); } /** * Giving back a date/time string in the format following the rule from the most to the least significant * * @param date * the date to convert * @return a string in the form yyyddMM_hhmmss */ public static String getFormattedDateTime( Date date ) { return getFormattedDateTime( date, false ); } /** * Giving back a date/time string in the format following the rule from the most to the least significant * * @param date * the date to convert * @param milliseconds * true when milliseconds should be added * @return a string in the form yyyddMM_hhmmssSSS (milliseconds will be optional) */ public static String getFormattedDateTime( Date date, boolean milliseconds ) { DateFormat dateFormat = null; if ( milliseconds ) { dateFormat = new SimpleDateFormat( Const.GENERALIZED_DATE_TIME_FORMAT_MILLIS ); } else { dateFormat = new SimpleDateFormat( Const.GENERALIZED_DATE_TIME_FORMAT ); } return dateFormat.format( date ); } /** * Giving back the actual time as a date/time string in the format following the rule from the most to the least * significant * * @return a string in the form yyyddMM_hhmmss */ public static String getFormattedDateTimeNow() { return getFormattedDateTime( new Date(), false ); } /** * Giving back the actual time as a date/time string in the format following the rule from the most to the least * significant * * @param milliseconds * true when milliseconds should be added * @return a string in the form yyyddMM_hhmmssSSS (milliseconds will be optional) */ public static String getFormattedDateTimeNow( boolean milliseconds ) { return getFormattedDateTime( new Date(), milliseconds ); } public static boolean IsInteger( String str ) { try { Integer.parseInt( str ); } catch ( NumberFormatException e ) { return false; } return true; } public static boolean IsNumber( String str ) { try { Double.valueOf( str ).doubleValue(); } catch ( Exception e ) { return false; } return true; } public static boolean IsDate( String str ) { return IsDate( "yy-mm-dd" ); } public static boolean IsDate( String str, String mask ) { // TODO: What about other dates? Maybe something for a CRQ try { SimpleDateFormat fdate = new SimpleDateFormat( "yy-mm-dd" ); fdate.parse( str ); } catch ( Exception e ) { return false; } return true; } /** * remove specification from variable * * @param variable * the variable to look for, with the $ or % variable specification. * @return the variable name */ public static final String getVariableName( String variable ) { variable = variable.trim(); if ( variable.startsWith( UNIX_OPEN ) || variable.startsWith( WINDOWS_OPEN ) || variable.startsWith( HEX_OPEN ) ) { variable = variable.substring( 2, variable.length() ); } if ( variable.endsWith( UNIX_CLOSE ) || variable.endsWith( HEX_CLOSE ) ) { variable = variable.substring( 0, variable.length() - 1 ); } if ( variable.endsWith( WINDOWS_CLOSE ) ) { variable = variable.substring( 0, variable.length() - 2 ); } return variable; } /** * @param variable * the variable to look for, with the $ or % variable specification. * @return true if the input is a variable, false otherwise */ public static boolean isVariable( String variable ) { if ( variable == null ) { return false; } variable = variable.trim(); return variable.startsWith( UNIX_OPEN ) && variable.endsWith( UNIX_CLOSE ) || variable.startsWith( WINDOWS_OPEN ) && variable.endsWith( WINDOWS_CLOSE ) || variable.startsWith( HEX_OPEN ) && variable.endsWith( HEX_CLOSE ); } }
/* * Copyright 2004-2006 Stefan Reuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.asteriskjava.fastagi.internal; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.util.List; import org.asteriskjava.fastagi.AgiChannel; import org.asteriskjava.fastagi.InvalidCommandSyntaxException; import org.asteriskjava.fastagi.InvalidOrUnknownCommandException; import org.asteriskjava.fastagi.command.NoopCommand; import org.asteriskjava.fastagi.reply.AgiReply; import org.junit.Before; import org.junit.Test; public class AgiChannelImplTest { private AgiWriter agiWriter; private AgiReader agiReader; private AgiChannel agiChannel; @Before public void setUp() { this.agiWriter = createMock(AgiWriter.class); this.agiReader = createMock(AgiReader.class); this.agiChannel = new AgiChannelImpl(null, agiWriter, agiReader); } @Test public void testSendCommand() throws Exception { SimpleAgiReply reply; NoopCommand command; reply = new SimpleAgiReply(); reply.setStatus(AgiReply.SC_SUCCESS); reply.setResult("0"); command = new NoopCommand(); agiWriter.sendCommand(command); expect(agiReader.readReply()).andReturn(reply); replay(agiWriter); replay(agiReader); assertEquals(reply, agiChannel.sendCommand(command)); verify(agiWriter); verify(agiReader); } @Test public void testSendCommandWithInvalidOrUnknownCommandResponse() throws Exception { SimpleAgiReply reply; NoopCommand command; reply = new SimpleAgiReply(); reply.setStatus(AgiReply.SC_INVALID_OR_UNKNOWN_COMMAND); reply.setResult("0"); command = new NoopCommand(); agiWriter.sendCommand(command); expect(agiReader.readReply()).andReturn(reply); replay(agiWriter); replay(agiReader); try { agiChannel.sendCommand(command); fail("must throw InvalidOrUnknownCommandException"); } catch (InvalidOrUnknownCommandException e) { assertEquals("Incorrect message", "Invalid or unknown command: NOOP", e.getMessage()); } verify(agiWriter); verify(agiReader); } @Test public void testSendCommandWithInvalidCommandSyntaxResponse() throws Exception { SimpleAgiReply reply; NoopCommand command; reply = new SimpleAgiReply(); reply.setStatus(AgiReply.SC_INVALID_COMMAND_SYNTAX); reply.setSynopsis("NOOP Synopsis"); reply.setUsage("NOOP Usage"); reply.setResult("0"); command = new NoopCommand(); agiWriter.sendCommand(command); expect(agiReader.readReply()).andReturn(reply); replay(agiWriter); replay(agiReader); try { agiChannel.sendCommand(command); fail("must throw InvalidCommandSyntaxException"); } catch (InvalidCommandSyntaxException e) { assertEquals("Incorrect message", "Invalid command syntax: NOOP Synopsis", e.getMessage()); assertEquals("Incorrect sysnopsis", "NOOP Synopsis", e.getSynopsis()); assertEquals("Incorrect usage", "NOOP Usage", e.getUsage()); } verify(agiWriter); verify(agiReader); } public class SimpleAgiReply implements AgiReply { private static final long serialVersionUID = 1L; private int status; private String result; private String synopsis; private String usage; public String getFirstLine() { throw new UnsupportedOperationException(); } public void setUsage(String usage) { this.usage = usage; } public void setSynopsis(String synopsis) { this.synopsis = synopsis; } public void setResult(String result) { this.result = result; } public void setStatus(int status) { this.status = status; } public List<String> getLines() { throw new UnsupportedOperationException(); } public int getResultCode() { throw new UnsupportedOperationException(); } public char getResultCodeAsChar() { throw new UnsupportedOperationException(); } public String getResult() { return result; } public int getStatus() { return status; } public String getAttribute(String name) { throw new UnsupportedOperationException(); } public String getExtra() { throw new UnsupportedOperationException(); } public String getSynopsis() { return synopsis; } public String getUsage() { return usage; } } }
package de.hpi.is.idd.datasets; import org.apache.commons.lang3.StringUtils; import scala.Serializable; import java.text.Normalizer; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; public class CDUtility extends de.hpi.is.idd.interfaces.DatasetUtils implements Serializable { private static final long serialVersionUID = -7666942644356996786L; static class CDRecord { private String artist; private String category; private String cdExtra; private String genre; private String id; private String title; private List<String> tracks; private Short year; public CDRecord() { } public CDRecord(String id, String artist, String title, String category, String genre, String cdExtra, short year, List<String> tracks) { this.id = id; this.artist = artist; this.title = title; this.category = category; this.genre = genre; this.cdExtra = cdExtra; this.year = year; this.tracks = tracks; } public String getArtist() { return artist; } public String getCategory() { return category; } public String getCdExtra() { return cdExtra; } public String getGenre() { return genre; } public String getId() { return id; } public String getTitle() { return title; } public List<String> getTracks() { return tracks; } public Short getYear() { return year; } public void setArtist(String artist) { this.artist = artist; } public void setCategory(String category) { this.category = category; } public void setCdExtra(String cdExtra) { this.cdExtra = cdExtra; } public void setGenre(String genre) { this.genre = genre; } public void setId(String id) { this.id = id; } public void setTitle(String title) { this.title = title; } public void setTracks(List<String> tracks) { this.tracks = tracks; } public void setYear(short year) { this.year = year; } @Override public String toString() { return id; } } private static class CDRecordParser { @SuppressWarnings("unchecked") public static CDRecord parse(Map<String, Object> record) { CDRecord cd = new CDRecord(); cd.setId((String) record.get(ID)); cd.setArtist((String) record.get(ARTIST_NAME)); cd.setTitle((String) record.get(TITLE_NAME)); cd.setCategory((String) record.get(CATEGORY_NAME)); Object year = record.get(YEAR_NAME); if (year != null) { cd.setYear((Short) year); } cd.setGenre((String) record.get(GENRE_NAME)); cd.setCdExtra((String) record.get(CDEXTRA_NAME)); cd.setTracks((List<String>) record.get(TRACKS_NAME)); return cd; } } public enum Attribute { ARTIST(ARTIST_NAME, 5), CATEGORY(CATEGORY_NAME), CDEXTRA(CDEXTRA_NAME, 0), GENRE(GENRE_NAME), TITLE(TITLE_NAME, 4), TRACKS(TRACKS_NAME, 3), YEAR(YEAR_NAME); private static final double TOTAL_WEIGHT = Arrays.stream(Attribute.values()).mapToDouble(Attribute::weight) .sum(); private final double weight; public final String name; Attribute(String name) { this(name, 1); } Attribute(String name, double weight) { this.name = name; this.weight = weight; } public double weight() { return weight; } public String getName() { return name; } public static Attribute getForName(String attribute) { switch (attribute) { case ARTIST_NAME: return ARTIST; case TITLE_NAME: return TITLE; case CATEGORY_NAME: return CATEGORY; case GENRE_NAME: return GENRE; case YEAR_NAME: return YEAR; case CDEXTRA_NAME: return CDEXTRA; case TRACKS_NAME: return TRACKS; default: throw new IllegalArgumentException("Unknown attribute: " + attribute); } } } private static final String ARTIST_NAME = "artist"; private static final String CATEGORY_NAME = "category"; private static final String YEAR_NAME = "year"; private static final String TITLE_NAME = "dtitle"; private static final String CDEXTRA_NAME = "cdextra"; private static final String TRACKS_NAME = "tracks"; private static final String GENRE_NAME = "genre"; private static final String ID = "id"; private static final String SEPERATOR = "\\|"; private static final double THRESHOLD = 0.7; private static int getNthDigit(int number, int n) { return (int) (Math.abs(number) / Math.pow(10, n) % 10); } private static int getNumberOfDigits(int number) { return (int) (Math.log10(Math.abs(number)) + 1); } public Map<String, Double> getSimilarityOfRecords(CDRecord firstRecord, CDRecord secondRecord) { Map<String, Double> similarityMap = new HashMap<>(); similarityMap.put(ARTIST_NAME, compareAttributeValue(ARTIST_NAME, firstRecord.getArtist(), secondRecord.getArtist())); similarityMap.put(TITLE_NAME, compareAttributeValue(TITLE_NAME, firstRecord.getTitle(), secondRecord.getTitle())); similarityMap.put(CATEGORY_NAME, compareAttributeValue(CATEGORY_NAME, firstRecord.getCategory(), secondRecord.getCategory())); similarityMap.put(GENRE_NAME, compareAttributeValue(GENRE_NAME, firstRecord.getGenre(), secondRecord.getGenre())); similarityMap.put(YEAR_NAME, compareAttributeValue(YEAR_NAME, firstRecord.getYear(), secondRecord.getYear())); similarityMap.put(CDEXTRA_NAME, compareAttributeValue(CDEXTRA_NAME, firstRecord.getCdExtra(), secondRecord.getCdExtra())); similarityMap.put(TRACKS_NAME, compareAttributeValue(TRACKS_NAME, firstRecord.getTracks(), secondRecord.getTracks())); return similarityMap; } private static double getSimilarityOfTracks(List<String> firstTracklist, List<String> secondTracklist) { HashSet<String> set = new HashSet<>(firstTracklist); set.retainAll(secondTracklist); int shared = set.size(); Set<String> mergedTrackset = new HashSet<>(); mergedTrackset.addAll(firstTracklist); mergedTrackset.addAll(secondTracklist); if (mergedTrackset.isEmpty()) { return 1.0; } return (double) shared / mergedTrackset.size(); } private static double levenshteinDistance(String a, String b) { if (a.isEmpty() || b.isEmpty()) { return THRESHOLD; } return 1.0 - (double) StringUtils.getLevenshteinDistance(a.toLowerCase(), b.toLowerCase()) / Math.max(a.length(), b.length()); } private static String normalize(String s) { s = s.toLowerCase(); s = Normalizer.normalize(s, Normalizer.Form.NFD); s = s.replaceAll("\\p{M}", ""); s = s.trim(); return s; } public double similarity(CDRecord firstRecord, CDRecord secondRecord) { Map<String, Double> similarityMap = getSimilarityOfRecords(firstRecord, secondRecord); return calculateAttributeSimilarity(similarityMap); } private static String trimNumbers(String s) { s = s.replaceAll("^\\d+\\s+", ""); return s; } private static Double yearDistance(Short year, Short year2) { if (year == null || year2 == null) { return THRESHOLD; } int diff = 0; int max = 0; int n = Math.max(CDUtility.getNumberOfDigits(year), CDUtility.getNumberOfDigits(year2)); for (int i = 0; i < n; i++) { int weight = i + 1; max += weight * 9; diff += weight * Math.abs(CDUtility.getNthDigit(year, i) - CDUtility.getNthDigit(year2, i)); } return 1 - (double) diff / max; } public CDUtility() { datasetThreshold = THRESHOLD; } /** * Brute-force results: * * <ul> * <li>Recall: 0.8304</li> * <li>Precision: 0.8451</li> * <li>F-Measure: 0.8377</li> * </ul> */ @Override public Double calculateSimilarity(Map<String, Object> firstRecord, Map<String, Object> secondRecord, Map<String, String> parameters) { return similarity(CDRecordParser.parse(firstRecord), CDRecordParser.parse(secondRecord)); } @Override public Map<String, Object> parseRecord(Map<String, String> value) { Map<String, Object> record = new HashMap<>(); record.put(ID, value.get(ID)); record.put(ARTIST_NAME, value.get(ARTIST_NAME)); record.put(TITLE_NAME, value.get(TITLE_NAME)); record.put(CATEGORY_NAME, value.get(CATEGORY_NAME)); String year = value.get(YEAR_NAME); if (!year.isEmpty()) { record.put(YEAR_NAME, Short.parseShort(year)); } record.put(GENRE_NAME, value.get(GENRE_NAME)); record.put(CDEXTRA_NAME, value.get(CDEXTRA_NAME)); record.put(TRACKS_NAME, Arrays.asList(value.get(TRACKS_NAME).split(SEPERATOR)).stream() .map(CDUtility::trimNumbers).map(CDUtility::normalize).collect(Collectors.toList())); return record; } @SuppressWarnings("unchecked") @Override public Double compareAttributeValue(String attribute, Object value1, Object value2) { switch (attribute) { case ARTIST_NAME: return levenshteinDistance((String) value1, (String) value2); case TITLE_NAME: return levenshteinDistance((String) value1, (String) value2); case CATEGORY_NAME: return levenshteinDistance((String) value1, (String) value2); case GENRE_NAME: return levenshteinDistance((String) value1, (String) value2); case YEAR_NAME: return yearDistance((Short) value1, (Short) value2); case CDEXTRA_NAME: return levenshteinDistance((String) value1, (String) value2); case TRACKS_NAME: return getSimilarityOfTracks((List<String>) value1, (List<String>) value2); default: throw new IllegalArgumentException("Unknown attribute: " + attribute); } } @Override public Double calculateAttributeSimilarity(Map<String, Double> similarities) { double result = 0.0; for (Entry<String, Double> entry : similarities.entrySet()) { result += Attribute.getForName(entry.getKey()).weight() * entry.getValue(); } return result / Attribute.TOTAL_WEIGHT; } }
package org.apache.lucene.benchmark.byTask; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Closeable; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Locale; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.benchmark.byTask.feeds.DocMaker; import org.apache.lucene.benchmark.byTask.feeds.FacetSource; import org.apache.lucene.benchmark.byTask.feeds.QueryMaker; import org.apache.lucene.benchmark.byTask.stats.Points; import org.apache.lucene.benchmark.byTask.tasks.PerfTask; import org.apache.lucene.benchmark.byTask.tasks.ReadTask; import org.apache.lucene.benchmark.byTask.tasks.SearchTask; import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.benchmark.byTask.utils.FileUtils; import org.apache.lucene.benchmark.byTask.tasks.NewAnalyzerTask; import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyWriter; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.IOUtils; /** * Data maintained by a performance test run. * <p> * Data includes: * <ul> * <li>Configuration. * <li>Directory, Writer, Reader. * <li>Taxonomy Directory, Writer, Reader. * <li>DocMaker, FacetSource and a few instances of QueryMaker. * <li>Analyzer. * <li>Statistics data which updated during the run. * </ul> * Config properties: * <ul> * <li><b>work.dir</b>=&lt;path to root of docs and index dirs| Default: work&gt; * <li><b>analyzer</b>=&lt;class name for analyzer| Default: StandardAnalyzer&gt; * <li><b>doc.maker</b>=&lt;class name for doc-maker| Default: DocMaker&gt; * <li><b>facet.source</b>=&lt;class name for facet-source| Default: RandomFacetSource&gt; * <li><b>query.maker</b>=&lt;class name for query-maker| Default: SimpleQueryMaker&gt; * <li><b>log.queries</b>=&lt;whether queries should be printed| Default: false&gt; * <li><b>directory</b>=&lt;type of directory to use for the index| Default: RAMDirectory&gt; * <li><b>taxonomy.directory</b>=&lt;type of directory for taxonomy index| Default: RAMDirectory&gt; * </ul> */ public class PerfRunData implements Closeable { private Points points; // objects used during performance test run // directory, analyzer, docMaker - created at startup. // reader, writer, searcher - maintained by basic tasks. private Directory directory; private Analyzer analyzer; private DocMaker docMaker; private FacetSource facetSource; private Locale locale; private Directory taxonomyDir; private TaxonomyWriter taxonomyWriter; private TaxonomyReader taxonomyReader; // we use separate (identical) instances for each "read" task type, so each can iterate the quries separately. private HashMap<Class<? extends ReadTask>,QueryMaker> readTaskQueryMaker; private Class<? extends QueryMaker> qmkrClass; private IndexReader indexReader; private IndexSearcher indexSearcher; private IndexWriter indexWriter; private Config config; private long startTimeMillis; private final HashMap<String, Object> perfObjects = new HashMap<String, Object>(); // constructor public PerfRunData (Config config) throws Exception { this.config = config; // analyzer (default is standard analyzer) analyzer = NewAnalyzerTask.createAnalyzer(config.get("analyzer", "org.apache.lucene.analysis.standard.StandardAnalyzer")); // doc maker docMaker = Class.forName(config.get("doc.maker", "org.apache.lucene.benchmark.byTask.feeds.DocMaker")).asSubclass(DocMaker.class).newInstance(); docMaker.setConfig(config); // facet source facetSource = Class.forName(config.get("facet.source", "org.apache.lucene.benchmark.byTask.feeds.RandomFacetSource")).asSubclass(FacetSource.class).newInstance(); facetSource.setConfig(config); // query makers readTaskQueryMaker = new HashMap<Class<? extends ReadTask>,QueryMaker>(); qmkrClass = Class.forName(config.get("query.maker","org.apache.lucene.benchmark.byTask.feeds.SimpleQueryMaker")).asSubclass(QueryMaker.class); // index stuff reinit(false); // statistic points points = new Points(config); if (Boolean.valueOf(config.get("log.queries","false")).booleanValue()) { System.out.println("------------> queries:"); System.out.println(getQueryMaker(new SearchTask(this)).printQueries()); } } public void close() throws IOException { IOUtils.close(indexWriter, indexReader, indexSearcher, directory, taxonomyWriter, taxonomyReader, taxonomyDir, docMaker, facetSource); // close all perf objects that are closeable. ArrayList<Closeable> perfObjectsToClose = new ArrayList<Closeable>(); for (Object obj : perfObjects.values()) { if (obj instanceof Closeable) { perfObjectsToClose.add((Closeable) obj); } } IOUtils.close(perfObjectsToClose); } // clean old stuff, reopen public void reinit(boolean eraseIndex) throws Exception { // cleanup index IOUtils.close(indexWriter, indexReader, directory); indexWriter = null; indexReader = null; IOUtils.close(taxonomyWriter, taxonomyReader, taxonomyDir); taxonomyWriter = null; taxonomyReader = null; // directory (default is ram-dir). directory = createDirectory(eraseIndex, "index", "directory"); taxonomyDir = createDirectory(eraseIndex, "taxo", "taxonomy.directory"); // inputs resetInputs(); // release unused stuff System.runFinalization(); System.gc(); // Re-init clock setStartTimeMillis(); } private Directory createDirectory(boolean eraseIndex, String dirName, String dirParam) throws IOException { if ("FSDirectory".equals(config.get(dirParam,"RAMDirectory"))) { File workDir = new File(config.get("work.dir","work")); File indexDir = new File(workDir,dirName); if (eraseIndex && indexDir.exists()) { FileUtils.fullyDelete(indexDir); } indexDir.mkdirs(); return FSDirectory.open(indexDir); } return new RAMDirectory(); } /** Returns an object that was previously set by {@link #setPerfObject(String, Object)}. */ public synchronized Object getPerfObject(String key) { return perfObjects.get(key); } /** * Sets an object that is required by {@link PerfTask}s, keyed by the given * {@code key}. If the object implements {@link Closeable}, it will be closed * by {@link #close()}. */ public synchronized void setPerfObject(String key, Object obj) { perfObjects.put(key, obj); } public long setStartTimeMillis() { startTimeMillis = System.currentTimeMillis(); return startTimeMillis; } /** * @return Start time in milliseconds */ public long getStartTimeMillis() { return startTimeMillis; } /** * @return Returns the points. */ public Points getPoints() { return points; } /** * @return Returns the directory. */ public Directory getDirectory() { return directory; } /** * @param directory The directory to set. */ public void setDirectory(Directory directory) { this.directory = directory; } /** * @return Returns the taxonomy directory */ public Directory getTaxonomyDir() { return taxonomyDir; } /** * Set the taxonomy reader. Takes ownership of that taxonomy reader, that is, * internally performs taxoReader.incRef() (If caller no longer needs that * reader it should decRef()/close() it after calling this method, otherwise, * the reader will remain open). * @param taxoReader The taxonomy reader to set. */ public synchronized void setTaxonomyReader(TaxonomyReader taxoReader) throws IOException { if (taxoReader == this.taxonomyReader) { return; } if (taxonomyReader != null) { taxonomyReader.decRef(); } if (taxoReader != null) { taxoReader.incRef(); } this.taxonomyReader = taxoReader; } /** * @return Returns the taxonomyReader. NOTE: this returns a * reference. You must call TaxonomyReader.decRef() when * you're done. */ public synchronized TaxonomyReader getTaxonomyReader() { if (taxonomyReader != null) { taxonomyReader.incRef(); } return taxonomyReader; } /** * @param taxoWriter The taxonomy writer to set. */ public void setTaxonomyWriter(TaxonomyWriter taxoWriter) { this.taxonomyWriter = taxoWriter; } public TaxonomyWriter getTaxonomyWriter() { return taxonomyWriter; } /** * @return Returns the indexReader. NOTE: this returns a * reference. You must call IndexReader.decRef() when * you're done. */ public synchronized IndexReader getIndexReader() { if (indexReader != null) { indexReader.incRef(); } return indexReader; } /** * @return Returns the indexSearcher. NOTE: this returns * a reference to the underlying IndexReader. You must * call IndexReader.decRef() when you're done. */ public synchronized IndexSearcher getIndexSearcher() { if (indexReader != null) { indexReader.incRef(); } return indexSearcher; } /** * Set the index reader. Takes ownership of that index reader, that is, * internally performs indexReader.incRef() (If caller no longer needs that * reader it should decRef()/close() it after calling this method, otherwise, * the reader will remain open). * @param indexReader The indexReader to set. */ public synchronized void setIndexReader(IndexReader indexReader) throws IOException { if (indexReader == this.indexReader) { return; } if (this.indexReader != null) { // Release current IR this.indexReader.decRef(); } this.indexReader = indexReader; if (indexReader != null) { // Hold reference to new IR indexReader.incRef(); indexSearcher = new IndexSearcher(indexReader); } else { indexSearcher = null; } } /** * @return Returns the indexWriter. */ public IndexWriter getIndexWriter() { return indexWriter; } /** * @param indexWriter The indexWriter to set. */ public void setIndexWriter(IndexWriter indexWriter) { this.indexWriter = indexWriter; } /** * @return Returns the anlyzer. */ public Analyzer getAnalyzer() { return analyzer; } public void setAnalyzer(Analyzer analyzer) { this.analyzer = analyzer; } /** Returns the docMaker. */ public DocMaker getDocMaker() { return docMaker; } /** Returns the facet source. */ public FacetSource getFacetSource() { return facetSource; } /** * @return the locale */ public Locale getLocale() { return locale; } /** * @param locale the locale to set */ public void setLocale(Locale locale) { this.locale = locale; } /** * @return Returns the config. */ public Config getConfig() { return config; } public void resetInputs() throws IOException { docMaker.resetInputs(); facetSource.resetInputs(); for (final QueryMaker queryMaker : readTaskQueryMaker.values()) { queryMaker.resetInputs(); } } /** * @return Returns the queryMaker by read task type (class) */ synchronized public QueryMaker getQueryMaker(ReadTask readTask) { // mapping the query maker by task class allows extending/adding new search/read tasks // without needing to modify this class. Class<? extends ReadTask> readTaskClass = readTask.getClass(); QueryMaker qm = readTaskQueryMaker.get(readTaskClass); if (qm == null) { try { qm = qmkrClass.newInstance(); qm.setConfig(config); } catch (Exception e) { throw new RuntimeException(e); } readTaskQueryMaker.put(readTaskClass,qm); } return qm; } }
package com.mantralabsglobal.cashin.social; import android.content.Context; import android.net.Uri; import android.util.Log; import com.google.gson.Gson; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import com.mantralabsglobal.cashin.R; import com.mantralabsglobal.cashin.service.FacebookService; import org.json.JSONArray; import org.json.JSONObject; import org.scribe.builder.ServiceBuilder; import org.scribe.builder.api.FacebookApi; import org.scribe.builder.api.LinkedInApi; import org.scribe.model.Token; import org.scribe.oauth.OAuthService; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Created by pk on 7/4/2015. */ public class Facebook extends SocialBase<FacebookService.FacebookProfile> { private static final String PROTECTED_RESOURCE_URL = "https://graph.facebook.com/me?fields=location,first_name,last_name,birthday,hometown,relationship_status,work"; final static String CALLBACK = "http://www.mantralabsglobal.com/oauth_callback/"; private static final Token EMPTY_TOKEN = null; @Override public OAuthService getOAuthService(Context context) { return new ServiceBuilder() .provider(FacebookApi.class) .scope("public_profile,user_birthday,user_hometown,user_location,user_relationship_details,user_work_history") .apiKey(context.getResources().getString(R.string.facebook_app_id)) .apiSecret(context.getResources().getString(R.string.facebook_secret)) .callback(CALLBACK) .build(); } @Override public Token getRequestToken(OAuthService service) { return EMPTY_TOKEN; } @Override public String getCallBackUrl() { return CALLBACK; } @Override public String getVerifierCode(String callbackUrl) { Uri uri = Uri.parse(callbackUrl); String verifier = uri.getQueryParameter("code"); return verifier; } @Override protected String getProfileUrl() { return PROTECTED_RESOURCE_URL; } @Override protected FacebookService.FacebookProfile getProfileFromResponse(String responseBody) { Gson gson = new Gson(); FacebookUserProfile facebookUserProfile = gson.fromJson(responseBody, FacebookUserProfile.class); Log.d("facebook data", responseBody); return convertToFacebookFacebookProfile(facebookUserProfile); } private FacebookService.FacebookProfile convertToFacebookFacebookProfile(FacebookUserProfile facebookUserProfile) { FacebookService.FacebookProfile facebookProfile = null; if (facebookUserProfile != null) { facebookProfile = new FacebookService.FacebookProfile(); if (facebookUserProfile.getLocation() != null) facebookProfile.setCity(facebookUserProfile.getLocation().getName()); facebookProfile.setConnectedAs(facebookUserProfile.getFirstName() + " " + facebookUserProfile.getLastName()); if (facebookUserProfile.getBirthday() != null) { String formattedDate = changeDateFormat(facebookUserProfile.getBirthday()); facebookProfile.setDob(formattedDate); } if (facebookUserProfile.getHometown() != null) facebookProfile.setHometown(facebookUserProfile.getHometown().getName()); //Not available //facebookProfile.setRelationshipStatus(facebookUserProfile.getRelationshipStatus()); if (facebookUserProfile.getWork() != null && facebookUserProfile.getWork().size() > 0) facebookProfile.setWorkspace(facebookUserProfile.getWork().get(0).getEmployer().getName()); } return facebookProfile; } /*public static void getFacebookProfile(AccessToken accessToken, final SocialBase.SocialListener<FacebookService.FacebookProfile> listener) { GraphRequest.newMeRequest(accessToken, new GraphRequest.GraphJSONObjectCallback() { @Override public void onCompleted(JSONObject jsonObject, GraphResponse graphResponse) { FacebookService.FacebookProfile facebookProfile = new FacebookService.FacebookProfile(); facebookProfile.setDob(jsonObject.optString("birthday")); facebookProfile.setConnectedAs(jsonObject.optString("name")); JSONArray workArray = jsonObject.optJSONArray("work"); if(workArray != null && workArray.length()>0) { JSONObject workEx = workArray.optJSONObject(0); if(workEx != null) { JSONObject employer = workEx.optJSONObject("employer"); facebookProfile.setWorkspace(employer.optString("name")); } } JSONObject jsonLocation = jsonObject.optJSONObject("location"); if(jsonLocation != null) { facebookProfile.setCity(jsonLocation.optString("name")); } JSONObject jsonHomeTown = jsonObject.optJSONObject("home_town"); if(jsonHomeTown != null) { facebookProfile.setHometown(jsonHomeTown.optString("name")); } facebookProfile.setRelationshipStatus(jsonObject.optString("relationship_status")); listener.onSuccess(facebookProfile); } }).executeAsync(); }*/ public class Employer { @Expose private String id; @Expose private String name; /** * @return The id */ public String getId() { return id; } /** * @param id The id */ public void setId(String id) { this.id = id; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } } public class FacebookUserProfile { @Expose private String id; @Expose private String birthday; @Expose private String email; @SerializedName("first_name") @Expose private String firstName; @Expose private String gender; @Expose private Hometown hometown; @SerializedName("last_name") @Expose private String lastName; @Expose private String link; @Expose private Location location; @Expose private String locale; @Expose private String name; @Expose private Double timezone; @SerializedName("updated_time") @Expose private String updatedTime; @Expose private Boolean verified; @Expose private List<Work> work = new ArrayList<Work>(); /** * @return The id */ public String getId() { return id; } /** * @param id The id */ public void setId(String id) { this.id = id; } /** * @return The birthday */ public String getBirthday() { return birthday; } /** * @param birthday The birthday */ public void setBirthday(String birthday) { this.birthday = birthday; } /** * @return The email */ public String getEmail() { return email; } /** * @param email The email */ public void setEmail(String email) { this.email = email; } /** * @return The firstName */ public String getFirstName() { return firstName; } /** * @param firstName The first_name */ public void setFirstName(String firstName) { this.firstName = firstName; } /** * @return The gender */ public String getGender() { return gender; } /** * @param gender The gender */ public void setGender(String gender) { this.gender = gender; } /** * @return The hometown */ public Hometown getHometown() { return hometown; } /** * @param hometown The hometown */ public void setHometown(Hometown hometown) { this.hometown = hometown; } /** * @return The lastName */ public String getLastName() { return lastName; } /** * @param lastName The last_name */ public void setLastName(String lastName) { this.lastName = lastName; } /** * @return The link */ public String getLink() { return link; } /** * @param link The link */ public void setLink(String link) { this.link = link; } /** * @return The location */ public Location getLocation() { return location; } /** * @param location The location */ public void setLocation(Location location) { this.location = location; } /** * @return The locale */ public String getLocale() { return locale; } /** * @param locale The locale */ public void setLocale(String locale) { this.locale = locale; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } /** * @return The timezone */ public Double getTimezone() { return timezone; } /** * @param timezone The timezone */ public void setTimezone(Double timezone) { this.timezone = timezone; } /** * @return The updatedTime */ public String getUpdatedTime() { return updatedTime; } /** * @param updatedTime The updated_time */ public void setUpdatedTime(String updatedTime) { this.updatedTime = updatedTime; } /** * @return The verified */ public Boolean getVerified() { return verified; } /** * @param verified The verified */ public void setVerified(Boolean verified) { this.verified = verified; } /** * @return The work */ public List<Work> getWork() { return work; } /** * @param work The work */ public void setWork(List<Work> work) { this.work = work; } } public class Hometown { @Expose private String id; @Expose private String name; /** * @return The id */ public String getId() { return id; } /** * @param id The id */ public void setId(String id) { this.id = id; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } } public class Location { @Expose private String id; @Expose private String name; /** * @return The id */ public String getId() { return id; } /** * @param id The id */ public void setId(String id) { this.id = id; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } } public class Location_ { @Expose private String id; @Expose private String name; /** * @return The id */ public String getId() { return id; } /** * @param id The id */ public void setId(String id) { this.id = id; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } } public class Position { @Expose private String id; @Expose private String name; /** * @return The id */ public String getId() { return id; } /** * @param id The id */ public void setId(String id) { this.id = id; } /** * @return The name */ public String getName() { return name; } /** * @param name The name */ public void setName(String name) { this.name = name; } } public class Work { @Expose private Employer employer; @Expose private Location_ location; @Expose private Position position; @SerializedName("start_date") @Expose private String startDate; @SerializedName("end_date") @Expose private String endDate; /** * @return The employer */ public Employer getEmployer() { return employer; } /** * @param employer The employer */ public void setEmployer(Employer employer) { this.employer = employer; } /** * @return The location */ public Location_ getLocation() { return location; } /** * @param location The location */ public void setLocation(Location_ location) { this.location = location; } /** * @return The position */ public Position getPosition() { return position; } /** * @param position The position */ public void setPosition(Position position) { this.position = position; } /** * @return The startDate */ public String getStartDate() { return startDate; } /** * @param startDate The start_date */ public void setStartDate(String startDate) { this.startDate = startDate; } /** * @return The endDate */ public String getEndDate() { return endDate; } /** * @param endDate The end_date */ public void setEndDate(String endDate) { this.endDate = endDate; } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.comprehend.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Describes the level of confidence that Amazon Comprehend has in the accuracy of its detection of sentiments. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/comprehend-2017-11-27/SentimentScore" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SentimentScore implements Serializable, Cloneable, StructuredPojo { /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>POSITIVE</code> * sentiment. * </p> */ private Float positive; /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEGATIVE</code> * sentiment. * </p> */ private Float negative; /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEUTRAL</code> * sentiment. * </p> */ private Float neutral; /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>MIXED</code> * sentiment. * </p> */ private Float mixed; /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>POSITIVE</code> * sentiment. * </p> * * @param positive * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>POSITIVE</code> sentiment. */ public void setPositive(Float positive) { this.positive = positive; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>POSITIVE</code> * sentiment. * </p> * * @return The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>POSITIVE</code> sentiment. */ public Float getPositive() { return this.positive; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>POSITIVE</code> * sentiment. * </p> * * @param positive * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>POSITIVE</code> sentiment. * @return Returns a reference to this object so that method calls can be chained together. */ public SentimentScore withPositive(Float positive) { setPositive(positive); return this; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEGATIVE</code> * sentiment. * </p> * * @param negative * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>NEGATIVE</code> sentiment. */ public void setNegative(Float negative) { this.negative = negative; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEGATIVE</code> * sentiment. * </p> * * @return The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>NEGATIVE</code> sentiment. */ public Float getNegative() { return this.negative; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEGATIVE</code> * sentiment. * </p> * * @param negative * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>NEGATIVE</code> sentiment. * @return Returns a reference to this object so that method calls can be chained together. */ public SentimentScore withNegative(Float negative) { setNegative(negative); return this; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEUTRAL</code> * sentiment. * </p> * * @param neutral * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>NEUTRAL</code> sentiment. */ public void setNeutral(Float neutral) { this.neutral = neutral; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEUTRAL</code> * sentiment. * </p> * * @return The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>NEUTRAL</code> sentiment. */ public Float getNeutral() { return this.neutral; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>NEUTRAL</code> * sentiment. * </p> * * @param neutral * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>NEUTRAL</code> sentiment. * @return Returns a reference to this object so that method calls can be chained together. */ public SentimentScore withNeutral(Float neutral) { setNeutral(neutral); return this; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>MIXED</code> * sentiment. * </p> * * @param mixed * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>MIXED</code> sentiment. */ public void setMixed(Float mixed) { this.mixed = mixed; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>MIXED</code> * sentiment. * </p> * * @return The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>MIXED</code> sentiment. */ public Float getMixed() { return this.mixed; } /** * <p> * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the <code>MIXED</code> * sentiment. * </p> * * @param mixed * The level of confidence that Amazon Comprehend has in the accuracy of its detection of the * <code>MIXED</code> sentiment. * @return Returns a reference to this object so that method calls can be chained together. */ public SentimentScore withMixed(Float mixed) { setMixed(mixed); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPositive() != null) sb.append("Positive: ").append(getPositive()).append(","); if (getNegative() != null) sb.append("Negative: ").append(getNegative()).append(","); if (getNeutral() != null) sb.append("Neutral: ").append(getNeutral()).append(","); if (getMixed() != null) sb.append("Mixed: ").append(getMixed()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SentimentScore == false) return false; SentimentScore other = (SentimentScore) obj; if (other.getPositive() == null ^ this.getPositive() == null) return false; if (other.getPositive() != null && other.getPositive().equals(this.getPositive()) == false) return false; if (other.getNegative() == null ^ this.getNegative() == null) return false; if (other.getNegative() != null && other.getNegative().equals(this.getNegative()) == false) return false; if (other.getNeutral() == null ^ this.getNeutral() == null) return false; if (other.getNeutral() != null && other.getNeutral().equals(this.getNeutral()) == false) return false; if (other.getMixed() == null ^ this.getMixed() == null) return false; if (other.getMixed() != null && other.getMixed().equals(this.getMixed()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPositive() == null) ? 0 : getPositive().hashCode()); hashCode = prime * hashCode + ((getNegative() == null) ? 0 : getNegative().hashCode()); hashCode = prime * hashCode + ((getNeutral() == null) ? 0 : getNeutral().hashCode()); hashCode = prime * hashCode + ((getMixed() == null) ? 0 : getMixed().hashCode()); return hashCode; } @Override public SentimentScore clone() { try { return (SentimentScore) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.comprehend.model.transform.SentimentScoreMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest.completion.context; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import static org.apache.lucene.spatial.util.GeoHashUtils.addNeighbors; import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode; /** * A {@link ContextMapping} that uses a geo location/area as a * criteria. * The suggestions can be boosted and/or filtered depending on * whether it falls within an area, represented by a query geo hash * with a specified precision * * {@link GeoQueryContext} defines the options for constructing * a unit of query context for this context type */ public class GeoContextMapping extends ContextMapping<GeoQueryContext> { public static final String FIELD_PRECISION = "precision"; public static final String FIELD_FIELDNAME = "path"; public static final int DEFAULT_PRECISION = 6; static final String CONTEXT_VALUE = "context"; static final String CONTEXT_BOOST = "boost"; static final String CONTEXT_PRECISION = "precision"; static final String CONTEXT_NEIGHBOURS = "neighbours"; private final int precision; private final String fieldName; private GeoContextMapping(String name, String fieldName, int precision) { super(Type.GEO, name); this.precision = precision; this.fieldName = fieldName; } public String getFieldName() { return fieldName; } public int getPrecision() { return precision; } protected static GeoContextMapping load(String name, Map<String, Object> config) { final GeoContextMapping.Builder builder = new GeoContextMapping.Builder(name); if (config != null) { final Object configPrecision = config.get(FIELD_PRECISION); if (configPrecision != null) { if (configPrecision instanceof Integer) { builder.precision((Integer) configPrecision); } else if (configPrecision instanceof Long) { builder.precision((Long) configPrecision); } else if (configPrecision instanceof Double) { builder.precision((Double) configPrecision); } else if (configPrecision instanceof Float) { builder.precision((Float) configPrecision); } else { builder.precision(configPrecision.toString()); } config.remove(FIELD_PRECISION); } final Object fieldName = config.get(FIELD_FIELDNAME); if (fieldName != null) { builder.field(fieldName.toString()); config.remove(FIELD_FIELDNAME); } } return builder.build(); } @Override protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { builder.field(FIELD_PRECISION, precision); if (fieldName != null) { builder.field(FIELD_FIELDNAME, fieldName); } return builder; } /** * Parse a set of {@link CharSequence} contexts at index-time. * Acceptable formats: * * <ul> * <li>Array: <pre>[<i>&lt;GEO POINT&gt;</i>, ..]</pre></li> * <li>String/Object/Array: <pre>&quot;GEO POINT&quot;</pre></li> * </ul> * * see {@link GeoUtils#parseGeoPoint(String, GeoPoint)} for GEO POINT */ @Override public Set<CharSequence> parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if (fieldName != null) { FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); if (!(mapper instanceof GeoPointFieldMapper)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } final Set<CharSequence> contexts = new HashSet<>(); Token token = parser.currentToken(); if (token == Token.START_ARRAY) { token = parser.nextToken(); // Test if value is a single point in <code>[lon, lat]</code> format if (token == Token.VALUE_NUMBER) { double lon = parser.doubleValue(); if (parser.nextToken() == Token.VALUE_NUMBER) { double lat = parser.doubleValue(); if (parser.nextToken() == Token.END_ARRAY) { contexts.add(stringEncode(lon, lat, precision)); } else { throw new ElasticsearchParseException("only two values [lon, lat] expected"); } } else { throw new ElasticsearchParseException("latitude must be a numeric value"); } } else { while (token != Token.END_ARRAY) { GeoPoint point = GeoUtils.parseGeoPoint(parser); contexts.add(stringEncode(point.getLon(), point.getLat(), precision)); token = parser.nextToken(); } } } else if (token == Token.VALUE_STRING) { final String geoHash = parser.text(); final CharSequence truncatedGeoHash = geoHash.subSequence(0, Math.min(geoHash.length(), precision)); contexts.add(truncatedGeoHash); } else { // or a single location GeoPoint point = GeoUtils.parseGeoPoint(parser); contexts.add(stringEncode(point.getLon(), point.getLat(), precision)); } return contexts; } @Override public Set<CharSequence> parseContext(Document document) { final Set<CharSequence> geohashes = new HashSet<>(); if (fieldName != null) { IndexableField[] fields = document.getFields(fieldName); GeoPoint spare = new GeoPoint(); if (fields.length == 0) { IndexableField[] lonFields = document.getFields(fieldName + ".lon"); IndexableField[] latFields = document.getFields(fieldName + ".lat"); if (lonFields.length > 0 && latFields.length > 0) { for (int i = 0; i < lonFields.length; i++) { IndexableField lonField = lonFields[i]; IndexableField latField = latFields[i]; assert lonField.fieldType().docValuesType() == latField.fieldType().docValuesType(); // we write doc values fields differently: one field for all values, so we need to only care about indexed fields if (lonField.fieldType().docValuesType() == DocValuesType.NONE) { spare.reset(latField.numericValue().doubleValue(), lonField.numericValue().doubleValue()); geohashes.add(stringEncode(spare.getLon(), spare.getLat(), precision)); } } } } else { for (IndexableField field : fields) { if (field instanceof StringField) { spare.resetFromString(field.stringValue()); } else { spare.resetFromIndexHash(Long.parseLong(field.stringValue())); } geohashes.add(spare.geohash()); } } } Set<CharSequence> locations = new HashSet<>(); for (CharSequence geohash : geohashes) { int precision = Math.min(this.precision, geohash.length()); CharSequence truncatedGeohash = geohash.subSequence(0, precision); locations.add(truncatedGeohash); } return locations; } @Override protected GeoQueryContext fromXContent(XContentParser parser) throws IOException { return GeoQueryContext.fromXContent(parser); } /** * Parse a list of {@link GeoQueryContext} * using <code>parser</code>. A QueryContexts accepts one of the following forms: * * <ul> * <li>Object: GeoQueryContext</li> * <li>String: GeoQueryContext value with boost=1 precision=PRECISION neighbours=[PRECISION]</li> * <li>Array: <pre>[GeoQueryContext, ..]</pre></li> * </ul> * * A GeoQueryContext has one of the following forms: * <ul> * <li>Object: * <ul> * <li><pre>GEO POINT</pre></li> * <li><pre>{&quot;lat&quot;: <i>&lt;double&gt;</i>, &quot;lon&quot;: <i>&lt;double&gt;</i>, &quot;precision&quot;: <i>&lt;int&gt;</i>, &quot;neighbours&quot;: <i>&lt;[int, ..]&gt;</i>}</pre></li> * <li><pre>{&quot;context&quot;: <i>&lt;string&gt;</i>, &quot;boost&quot;: <i>&lt;int&gt;</i>, &quot;precision&quot;: <i>&lt;int&gt;</i>, &quot;neighbours&quot;: <i>&lt;[int, ..]&gt;</i>}</pre></li> * <li><pre>{&quot;context&quot;: <i>&lt;GEO POINT&gt;</i>, &quot;boost&quot;: <i>&lt;int&gt;</i>, &quot;precision&quot;: <i>&lt;int&gt;</i>, &quot;neighbours&quot;: <i>&lt;[int, ..]&gt;</i>}</pre></li> * </ul> * <li>String: <pre>GEO POINT</pre></li> * </ul> * see {@link GeoUtils#parseGeoPoint(String, GeoPoint)} for GEO POINT */ @Override public List<InternalQueryContext> toInternalQueryContexts(List<GeoQueryContext> queryContexts) { List<InternalQueryContext> internalQueryContextList = new ArrayList<>(); for (GeoQueryContext queryContext : queryContexts) { int minPrecision = Math.min(this.precision, queryContext.getPrecision()); GeoPoint point = queryContext.getGeoPoint(); final Collection<String> locations = new HashSet<>(); String geoHash = stringEncode(point.getLon(), point.getLat(), minPrecision); locations.add(geoHash); if (queryContext.getNeighbours().isEmpty() && geoHash.length() == this.precision) { addNeighbors(geoHash, locations); } else if (queryContext.getNeighbours().isEmpty() == false) { queryContext.getNeighbours().stream() .filter(neighbourPrecision -> neighbourPrecision < geoHash.length()) .forEach(neighbourPrecision -> { String truncatedGeoHash = geoHash.substring(0, neighbourPrecision); locations.add(truncatedGeoHash); addNeighbors(truncatedGeoHash, locations); }); } internalQueryContextList.addAll( locations.stream() .map(location -> new InternalQueryContext(location, queryContext.getBoost(), location.length() < this.precision)) .collect(Collectors.toList())); } return internalQueryContextList; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; GeoContextMapping that = (GeoContextMapping) o; if (precision != that.precision) return false; return !(fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null); } @Override public int hashCode() { return Objects.hash(super.hashCode(), precision, fieldName); } public static class Builder extends ContextBuilder<GeoContextMapping> { private int precision = DEFAULT_PRECISION; private String fieldName = null; public Builder(String name) { super(name); } /** * Set the precision use o make suggestions * * @param precision * precision as distance with {@link DistanceUnit}. Default: * meters * @return this */ public Builder precision(String precision) { return precision(DistanceUnit.parse(precision, DistanceUnit.METERS, DistanceUnit.METERS)); } /** * Set the precision use o make suggestions * * @param precision * precision value * @param unit * {@link DistanceUnit} to use * @return this */ public Builder precision(double precision, DistanceUnit unit) { return precision(unit.toMeters(precision)); } /** * Set the precision use o make suggestions * * @param meters * precision as distance in meters * @return this */ public Builder precision(double meters) { int level = GeoUtils.geoHashLevelsForPrecision(meters); // Ceiling precision: we might return more results if (GeoUtils.geoHashCellSize(level) < meters) { level = Math.max(1, level - 1); } return precision(level); } /** * Set the precision use o make suggestions * * @param level * maximum length of geohashes * @return this */ public Builder precision(int level) { this.precision = level; return this; } /** * Set the name of the field containing a geolocation to use * @param fieldName name of the field * @return this */ public Builder field(String fieldName) { this.fieldName = fieldName; return this; } @Override public GeoContextMapping build() { return new GeoContextMapping(name, fieldName, precision); } } }
/** * Modified MIT License * * Copyright 2016 OneSignal * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * 1. The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * 2. All copies of substantial portions of the Software may only be used in connection * with services provided by OneSignal. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.plugin.gcm; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.util.Log; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Iterator; import java.util.ArrayList; import java.util.Collection; import com.onesignal.OneSignal; import com.onesignal.OSNotification; import com.onesignal.OSNotificationOpenResult; import com.onesignal.OneSignal.NotificationOpenedHandler; import com.onesignal.OneSignal.NotificationReceivedHandler; import com.onesignal.OneSignal.GetTagsHandler; import com.onesignal.OneSignal.IdsAvailableHandler; import com.onesignal.OneSignal.PostNotificationResponseHandler; public class OneSignalPush extends CordovaPlugin { public static final String TAG = "OneSignalPush"; public static final String SET_NOTIFICATION_RECEIVED_HANDLER = "setNotificationReceivedHandler"; public static final String SET_NOTIFICATION_OPENED_HANDLER = "setNotificationOpenedHandler"; public static final String INIT = "init"; public static final String GET_TAGS = "getTags"; public static final String GET_IDS = "getIds"; public static final String DELETE_TAGS = "deleteTags"; public static final String SEND_TAGS = "sendTags"; public static final String REGISTER_FOR_PUSH_NOTIFICATIONS = "registerForPushNotifications"; public static final String ENABLE_VIBRATE = "enableVibrate"; public static final String ENABLE_SOUND = "enableSound"; public static final String SET_SUBSCRIPTION = "setSubscription"; public static final String POST_NOTIFICATION = "postNotification"; public static final String PROMPT_LOCATION = "promptLocation"; public static final String SYNC_HASHED_EMAIL = "syncHashedEmail"; public static final String SET_LOG_LEVEL = "setLogLevel"; public static final String CLEAR_ONESIGNAL_NOTIFICATIONS = "clearOneSignalNotifications"; private static CallbackContext notifReceivedCallbackContext; private static CallbackContext notifOpenedCallbackContext; // This is to prevent an issue where if two Javascript calls are made to OneSignal expecting a callback then only one would fire. private static void callbackSuccess(CallbackContext callbackContext, JSONObject jsonObject) { if (jsonObject == null) // in case there are no data jsonObject = new JSONObject(); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, jsonObject); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } private static void callbackError(CallbackContext callbackContext, JSONObject jsonObject) { if (jsonObject == null) // in case there are no data jsonObject = new JSONObject(); PluginResult pluginResult = new PluginResult(PluginResult.Status.ERROR, jsonObject); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } private static void callbackError(CallbackContext callbackContext, String str) { PluginResult pluginResult = new PluginResult(PluginResult.Status.ERROR, str); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } @Override public boolean execute(String action, JSONArray data, CallbackContext callbackContext) { boolean result = false; if(SET_NOTIFICATION_RECEIVED_HANDLER.equals(action)) { notifReceivedCallbackContext = callbackContext; result = true; } else if(SET_NOTIFICATION_OPENED_HANDLER.equals(action)) { notifOpenedCallbackContext = callbackContext; result = true; } else if (INIT.equals(action)) { try { String appId = data.getString(0); String googleProjectNumber = data.getString(1); OneSignal.sdkType = "cordova"; OneSignal.init(this.cordova.getActivity(), googleProjectNumber, appId, new CordovaNotificationOpenedHandler(notifOpenedCallbackContext), new CordovaNotificationReceivedHandler(notifReceivedCallbackContext) ); // data.getJSONObject(2) is for iOS settings. int displayOption = data.getInt(3); OneSignal.setInFocusDisplaying(displayOption); result = true; } catch (JSONException e) { Log.e(TAG, "execute: Got JSON Exception " + e.getMessage()); result = false; } } else if (GET_TAGS.equals(action)) { final CallbackContext jsTagsAvailableCallBack = callbackContext; OneSignal.getTags(new GetTagsHandler() { @Override public void tagsAvailable(JSONObject tags) { callbackSuccess(jsTagsAvailableCallBack, tags); } }); result = true; } else if (GET_IDS.equals(action)) { final CallbackContext jsIdsAvailableCallBack = callbackContext; OneSignal.idsAvailable(new IdsAvailableHandler() { @Override public void idsAvailable(String userId, String registrationId) { JSONObject jsonIds = new JSONObject(); try { jsonIds.put("userId", userId); if (registrationId != null) jsonIds.put("pushToken", registrationId); else jsonIds.put("pushToken", ""); callbackSuccess(jsIdsAvailableCallBack, jsonIds); } catch (Throwable t) { t.printStackTrace(); } } }); result = true; } else if (SEND_TAGS.equals(action)) { try { OneSignal.sendTags(data.getJSONObject(0)); } catch (Throwable t) { t.printStackTrace(); } result = true; } else if (DELETE_TAGS.equals(action)) { try { Collection<String> list = new ArrayList<String>(); for (int i = 0; i < data.length(); i++) list.add(data.get(i).toString()); OneSignal.deleteTags(list); result = true; } catch (Throwable t) { t.printStackTrace(); } } else if (REGISTER_FOR_PUSH_NOTIFICATIONS.equals(action)) { // Does not apply to Android. result = true; } else if (ENABLE_VIBRATE.equals(action)) { try { OneSignal.enableVibrate(data.getBoolean(0)); result = true; } catch (Throwable t) { t.printStackTrace(); } } else if (ENABLE_SOUND.equals(action)) { try { OneSignal.enableSound(data.getBoolean(0)); result = true; } catch (Throwable t) { t.printStackTrace(); } } else if (SET_SUBSCRIPTION.equals(action)) { try { OneSignal.setSubscription(data.getBoolean(0)); result = true; } catch (Throwable t) { t.printStackTrace(); } } else if (POST_NOTIFICATION.equals(action)) { try { JSONObject jo = data.getJSONObject(0); final CallbackContext jsPostNotificationCallBack = callbackContext; OneSignal.postNotification(jo, new PostNotificationResponseHandler() { @Override public void onSuccess(JSONObject response) { callbackSuccess(jsPostNotificationCallBack, response); } @Override public void onFailure(JSONObject response) { callbackError(jsPostNotificationCallBack, response); } }); result = true; } catch (Throwable t) { t.printStackTrace(); } } else if (PROMPT_LOCATION.equals(action)) OneSignal.promptLocation(); else if (SYNC_HASHED_EMAIL.equals(action)) { try { OneSignal.syncHashedEmail(data.getString(0)); } catch(Throwable t) { t.printStackTrace(); } } else if (SET_LOG_LEVEL.equals(action)) { try { JSONObject jo = data.getJSONObject(0); OneSignal.setLogLevel(jo.getInt("logLevel"), jo.getInt("visualLevel")); } catch(Throwable t) { t.printStackTrace(); } } else if (CLEAR_ONESIGNAL_NOTIFICATIONS.equals(action)) { try { OneSignal.clearOneSignalNotifications(); } catch(Throwable t) { t.printStackTrace(); } } else { result = false; Log.e(TAG, "Invalid action : " + action); callbackError(callbackContext, "Invalid action : " + action); } return result; } private class CordovaNotificationReceivedHandler implements NotificationReceivedHandler { private CallbackContext jsNotificationReceivedCallBack; public CordovaNotificationReceivedHandler(CallbackContext inCallbackContext) { jsNotificationReceivedCallBack = inCallbackContext; } @Override public void notificationReceived(OSNotification notification) { try { callbackSuccess(jsNotificationReceivedCallBack, new JSONObject(notification.stringify())); } catch (Throwable t) { t.printStackTrace(); } } } private class CordovaNotificationOpenedHandler implements NotificationOpenedHandler { private CallbackContext jsNotificationOpenedCallBack; public CordovaNotificationOpenedHandler(CallbackContext inCallbackContext) { jsNotificationOpenedCallBack = inCallbackContext; } @Override public void notificationOpened(OSNotificationOpenResult result) { try { callbackSuccess(jsNotificationOpenedCallBack, new JSONObject(result.stringify())); } catch (Throwable t) { t.printStackTrace(); } } } @Override public void onDestroy() { OneSignal.removeNotificationOpenedHandler(); OneSignal.removeNotificationReceivedHandler(); } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.rpc.client; import com.navercorp.pinpoint.rpc.PinpointSocketException; import com.navercorp.pinpoint.rpc.TestByteUtils; import com.navercorp.pinpoint.rpc.util.PinpointRPCTestUtils; import com.navercorp.pinpoint.test.server.TestPinpointServerAcceptor; import com.navercorp.pinpoint.test.server.TestServerMessageListenerFactory; import com.navercorp.pinpoint.test.utils.TestAwaitTaskUtils; import com.navercorp.pinpoint.test.utils.TestAwaitUtils; import org.jboss.netty.channel.ChannelFuture; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.SocketUtils; import java.io.IOException; import java.net.ConnectException; import java.net.InetSocketAddress; /** * @author emeroad */ public class PinpointClientFactoryTest { private Logger logger = LoggerFactory.getLogger(this.getClass()); private static DefaultPinpointClientFactory clientFactory; @BeforeClass public static void setUp() throws IOException { clientFactory = new DefaultPinpointClientFactory(); clientFactory.setPingDelay(100); } @AfterClass public static void tearDown() { if (clientFactory != null) { clientFactory.release(); } } @Test public void connectFail() { try { int availableTcpPort = SocketUtils.findAvailableTcpPort(47000); clientFactory.connect("127.0.0.1", availableTcpPort); Assert.fail(); } catch (PinpointSocketException e) { Assert.assertTrue(ConnectException.class.isInstance(e.getCause())); } } @Test public void reconnectFail() throws InterruptedException { // confirm simplified error message when api called. int availableTcpPort = SocketUtils.findAvailableTcpPort(47000); InetSocketAddress remoteAddress = new InetSocketAddress("127.0.0.1", availableTcpPort); ChannelFuture reconnect = clientFactory.reconnect(remoteAddress); reconnect.await(); Assert.assertFalse(reconnect.isSuccess()); Assert.assertTrue(ConnectException.class.isInstance(reconnect.getCause())); Thread.sleep(1000); } @Test public void connect() throws IOException, InterruptedException { TestPinpointServerAcceptor testPinpointServerAcceptor = new TestPinpointServerAcceptor(); int bindPort = testPinpointServerAcceptor.bind(); try { PinpointClient client = clientFactory.connect("127.0.0.1", bindPort); PinpointRPCTestUtils.close(client); } finally { testPinpointServerAcceptor.close(); } } @Test public void pingInternal() throws IOException, InterruptedException { TestServerMessageListenerFactory testServerMessageListenerFactory = new TestServerMessageListenerFactory(TestServerMessageListenerFactory.HandshakeType.DUPLEX, true); final TestServerMessageListenerFactory.TestServerMessageListener serverMessageListener = testServerMessageListenerFactory.create(); TestPinpointServerAcceptor testPinpointServerAcceptor = new TestPinpointServerAcceptor(testServerMessageListenerFactory); int bindPort = testPinpointServerAcceptor.bind(); try { PinpointClient client = clientFactory.connect("127.0.0.1", bindPort); boolean await = TestAwaitUtils.await(new TestAwaitTaskUtils() { @Override public boolean checkCompleted() { return serverMessageListener.hasReceivedPing(); } }, 100, 3000); Assert.assertTrue(await); PinpointRPCTestUtils.close(client); } finally { testPinpointServerAcceptor.close(); } } @Test public void ping() throws IOException, InterruptedException { TestPinpointServerAcceptor testPinpointServerAcceptor = new TestPinpointServerAcceptor(); int bindPort = testPinpointServerAcceptor.bind(); try { PinpointClient client = clientFactory.connect("127.0.0.1", bindPort); client.sendPing(); PinpointRPCTestUtils.close(client); } finally { testPinpointServerAcceptor.close(); } } @Test public void pingAndRequestResponse() throws IOException, InterruptedException { TestPinpointServerAcceptor testPinpointServerAcceptor = new TestPinpointServerAcceptor(new TestServerMessageListenerFactory(TestServerMessageListenerFactory.HandshakeType.DUPLEX)); int bindPort = testPinpointServerAcceptor.bind(); try { PinpointClient client = clientFactory.connect("127.0.0.1", bindPort); byte[] randomByte = TestByteUtils.createRandomByte(10); byte[] response = PinpointRPCTestUtils.request(client, randomByte); Assert.assertArrayEquals(randomByte, response); PinpointRPCTestUtils.close(client); } finally { testPinpointServerAcceptor.close(); } } @Test public void sendSync() throws IOException, InterruptedException { TestPinpointServerAcceptor testPinpointServerAcceptor = new TestPinpointServerAcceptor(); int bindPort = testPinpointServerAcceptor.bind(); try { PinpointClient client = clientFactory.connect("127.0.0.1", bindPort); logger.debug("send1"); client.send(new byte[20]); logger.debug("send2"); client.sendSync(new byte[20]); PinpointRPCTestUtils.close(client); } finally { testPinpointServerAcceptor.close(); } } @Test public void requestAndResponse() throws IOException, InterruptedException { TestPinpointServerAcceptor testPinpointServerAcceptor = new TestPinpointServerAcceptor(new TestServerMessageListenerFactory(TestServerMessageListenerFactory.HandshakeType.DUPLEX)); int bindPort = testPinpointServerAcceptor.bind(); try { PinpointClient client = clientFactory.connect("127.0.0.1", bindPort); byte[] randomByte = TestByteUtils.createRandomByte(20); byte[] response = PinpointRPCTestUtils.request(client, randomByte); Assert.assertArrayEquals(randomByte, response); PinpointRPCTestUtils.close(client); } finally { testPinpointServerAcceptor.close(); } } @Test public void connectTimeout() { int timeout = 1000; PinpointClientFactory pinpointClientFactory = null; try { pinpointClientFactory = new DefaultPinpointClientFactory(); pinpointClientFactory.setConnectTimeout(timeout); int connectTimeout = pinpointClientFactory.getConnectTimeout(); Assert.assertEquals(timeout, connectTimeout); } finally { pinpointClientFactory.release(); } } }
package org.jbpm.services.task.persistence; import java.lang.reflect.Constructor; import java.util.Collection; import org.drools.core.command.CommandService; import org.drools.core.command.Interceptor; import org.drools.core.command.impl.AbstractInterceptor; import org.drools.persistence.OrderedTransactionSynchronization; import org.drools.persistence.TransactionManager; import org.drools.persistence.TransactionManagerHelper; import org.drools.persistence.jta.JtaTransactionManager; import org.kie.api.command.Command; import org.kie.api.runtime.Environment; import org.kie.api.runtime.EnvironmentName; import org.kie.api.task.UserGroupCallback; import org.kie.api.task.model.Task; import org.kie.internal.command.Context; import org.kie.internal.command.World; import org.kie.internal.task.api.TaskContext; import org.kie.internal.task.api.TaskPersistenceContext; import org.kie.internal.task.api.TaskPersistenceContextManager; import org.kie.internal.task.api.model.InternalPeopleAssignments; import org.kie.internal.task.api.model.InternalTask; import org.kie.internal.task.exception.TaskException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TaskTransactionInterceptor extends AbstractInterceptor { private static Logger logger = LoggerFactory.getLogger(TaskTransactionInterceptor.class); private static String SPRING_TM_CLASSNAME = "org.springframework.transaction.support.AbstractPlatformTransactionManager"; private CommandService commandService; private TransactionManager txm; private TaskPersistenceContextManager tpm; private boolean eagerDisabled = false; public TaskTransactionInterceptor(Environment environment) { this.eagerDisabled = Boolean.getBoolean("jbpm.ht.eager.disabled"); initTransactionManager(environment); } @Override public synchronized <T> T execute(Command<T> command) { boolean transactionOwner = false; T result = null; try { transactionOwner = txm.begin(); tpm.beginCommandScopedEntityManager(); TransactionManagerHelper.registerTransactionSyncInContainer(this.txm, new TaskSynchronizationImpl( this )); result = executeNext((Command<T>) command); postInit(result); txm.commit( transactionOwner ); return result; } catch (TaskException e) { // allow to handle TaskException as business exceptions on caller side // if transaction is owned by other component like process engine if (transactionOwner) { rollbackTransaction( e, transactionOwner ); e.setRecoverable(false); throw e; } else { throw e; } } catch ( RuntimeException re ) { rollbackTransaction( re, transactionOwner ); throw re; } catch ( Exception t1 ) { rollbackTransaction( t1, transactionOwner ); throw new RuntimeException( "Wrapped exception see cause", t1 ); } } private void rollbackTransaction(Exception t1, boolean transactionOwner) { try { logger.warn("Could not commit session", t1); txm.rollback(transactionOwner); } catch (Exception t2) { logger.error("Could not rollback", t2); throw new RuntimeException("Could not commit session or rollback", t2); } } public void addInterceptor(Interceptor interceptor) { interceptor.setNext( this.commandService == null ? this : this.commandService ); this.commandService = interceptor; } @Override public Context getContext() { final TaskPersistenceContext persistenceContext = tpm.getPersistenceContext(); persistenceContext.joinTransaction(); return new TaskContext() { @Override public void set(String identifier, Object value) { txm.putResource(identifier, value); } @Override public void remove(String identifier) { } @Override public String getName() { return null; } @Override public World getContextManager() { return null; } @Override public Object get(String identifier) { return txm.getResource(identifier); } @Override public void setPersistenceContext(TaskPersistenceContext context) { } @Override public TaskPersistenceContext getPersistenceContext() { return persistenceContext; } @Override public UserGroupCallback getUserGroupCallback() { return null; } }; } public void initTransactionManager(Environment env) { Object tm = env.get( EnvironmentName.TRANSACTION_MANAGER ); if ( env.get( EnvironmentName.TASK_PERSISTENCE_CONTEXT_MANAGER ) != null && env.get( EnvironmentName.TRANSACTION_MANAGER ) != null ) { this.txm = (TransactionManager) tm; this.tpm = (TaskPersistenceContextManager) env.get( EnvironmentName.TASK_PERSISTENCE_CONTEXT_MANAGER ); } else { if ( tm != null && isSpringTransactionManager(tm.getClass()) ) { try { logger.debug( "Instantiating KieSpringTransactionManager" ); Class< ? > cls = Class.forName( "org.kie.spring.persistence.KieSpringTransactionManager" ); Constructor< ? > con = cls.getConstructors()[0]; this.txm = (TransactionManager) con.newInstance( tm ); env.set( EnvironmentName.TRANSACTION_MANAGER, this.txm ); cls = Class.forName( "org.kie.spring.persistence.KieSpringTaskJpaManager" ); con = cls.getConstructors()[0]; this.tpm = (TaskPersistenceContextManager) con.newInstance( new Object[]{env} ); } catch ( Exception e ) { logger.warn( "Could not instantiate DroolsSpringTransactionManager" ); throw new RuntimeException( "Could not instantiate org.kie.container.spring.beans.persistence.DroolsSpringTransactionManager", e ); } } else { logger.debug( "Instantiating JtaTransactionManager" ); this.txm = new JtaTransactionManager( env.get( EnvironmentName.TRANSACTION ), env.get( EnvironmentName.TRANSACTION_SYNCHRONIZATION_REGISTRY ), tm ); env.set( EnvironmentName.TRANSACTION_MANAGER, this.txm ); try { this.tpm = new JPATaskPersistenceContextManager( env ); } catch ( Exception e ) { throw new RuntimeException( "Error creating JPATaskPersistenceContextManager", e ); } } env.set( EnvironmentName.TASK_PERSISTENCE_CONTEXT_MANAGER, this.tpm ); env.set( EnvironmentName.TRANSACTION_MANAGER, this.txm ); } } public boolean isSpringTransactionManager( Class<?> clazz ) { if ( SPRING_TM_CLASSNAME.equals(clazz.getName()) ) { return true; } // Try to find from the ancestors if (clazz.getSuperclass() != null) { return isSpringTransactionManager(clazz.getSuperclass()); } return false; } private void postInit(Object result) { if (result instanceof Task) { Task task = (Task) result; if (task != null && !eagerDisabled) { task.getNames().size(); task.getDescriptions().size(); task.getSubjects().size(); task.getPeopleAssignments().getBusinessAdministrators().size(); task.getPeopleAssignments().getPotentialOwners().size(); ((InternalPeopleAssignments) task.getPeopleAssignments()).getRecipients().size(); ((InternalPeopleAssignments) task.getPeopleAssignments()).getExcludedOwners().size(); ((InternalPeopleAssignments) task.getPeopleAssignments()).getTaskStakeholders().size(); task.getTaskData().getAttachments().size(); task.getTaskData().getComments().size(); ((InternalTask)task).getDeadlines().getStartDeadlines().size(); ((InternalTask)task).getDeadlines().getEndDeadlines().size(); } } else if (result instanceof Collection<?>) { ((Collection<?>) result).size(); } } private static class TaskSynchronizationImpl extends OrderedTransactionSynchronization { TaskTransactionInterceptor service; public TaskSynchronizationImpl(TaskTransactionInterceptor service) { super(1, "TaskService-"+service.toString()); this.service = service; } public void afterCompletion(int status) { this.service.tpm.endCommandScopedEntityManager(); } public void beforeCompletion() { // not used } } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.embraceplus.ble; import java.io.IOException; import java.math.BigInteger; import java.util.List; import java.util.UUID; import android.app.Notification; import android.app.PendingIntent; import android.app.Service; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothGatt; import android.bluetooth.BluetoothGattCallback; import android.bluetooth.BluetoothGattCharacteristic; import android.bluetooth.BluetoothGattService; import android.bluetooth.BluetoothManager; import android.bluetooth.BluetoothProfile; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Binder; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.util.Log; import com.embraceplus.app.R; import com.embraceplus.app.SearchActivity; import com.embraceplus.ble.utils.Constants; import com.embraceplus.ble.utils.GlobalHandlerUtil; import com.embraceplus.ble.utils.ServiceManager; import com.embraceplus.model.EmbraceMsg; import com.embraceplus.store.ExCommandManager; import com.embraceplus.utils.EmbraceBatteryUtil; import com.embraceplus.utils.Optional; import com.embraceplus.utils.PhoneBatteryListenerUtil; /** * Service for managing connection and data communication with a GATT server * hosted on a given Bluetooth LE device. */ public class BluetoothLeService extends Service { private final static String TAG = BluetoothLeService.class.getSimpleName(); private BluetoothManager mBluetoothManager; private BluetoothAdapter mBluetoothAdapter; private BluetoothGatt mBluetoothGatt; // private String mBluetoothDeviceAddress; private Handler myHandler = new Handler(); private Handler embraceHandler; private boolean embraceConnected = false; public static int currentRssi = 0; // public final static String ACTION_GATT_SERVICES_RSSIR = // "com.example.bluetooth.le.RSSI_R"; // Implements callback methods for GATT events that the app cares about. For // example, // connection change and services discovered. private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() { @Override public void onReadRemoteRssi(BluetoothGatt gatt, int rssi, int status) { currentRssi = rssi; // BluetoothLeService.this.sendBroadcast(new // Intent(ACTION_GATT_SERVICES_RSSIR)); if (rssi < Constants.BLEMinValue) { // final Optional<EmbraceMsg> msg = DbBuilder.getInstant() // .getExCommandByNotification( // Constants.notification_type_OUTOFSERVICE); final Optional<EmbraceMsg> msg = ExCommandManager.getInstance() .getExCommandByNotification(Constants.notification_type_OUTOFSERVICE); if (msg.notEmpty()) { if (!ServiceManager.getInstant().isRingOutofRangeMsgSended()) { if (ServiceManager.getInstant().getBluetoothService() != null) { ServiceManager .getInstant() .getBluetoothService() .writeEffectCommand( msg.get().getFXCommand()); } ServiceManager.getInstant().setRingOutofRangeMsgSended( true); } } } else if (rssi >= Constants.BLEMinValue) { ServiceManager.getInstant().setRingOutofRangeMsgSended(false); } } @Override public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) { if (newState == 2 && 0 == status) { // We can't rely on this event, so we'll move most of our status // switching code to the setIsConnected function if (gatt != null) gatt.discoverServices(); ServiceManager.getInstant().setConnectStateChangeByUser(false); ServiceManager.getInstant().setThisWakeupEmbraceLowPowerMsgSended( false); BluetoothLeService.this.setIsConnected(true); ServiceManager.getInstant().stopAutoConnectThread(); } else if (newState == BluetoothProfile.STATE_DISCONNECTED || (newState == 2 && status == 133)) { //disconnect(); if(newState == BluetoothProfile.STATE_DISCONNECTED){ BluetoothLeService.this.setIsConnected(false); } if (!ServiceManager.getInstant().isConnectStateChangeByUser()) { // AutoConnectUtil.getInstant().startAutoConnectThread(); ServiceManager.getInstant().startAutoConnectThread(); } } } @Override public void onServicesDiscovered(BluetoothGatt gatt, int status) { System.out.println("::onServicesDiscovered()"); PhoneBatteryListenerUtil.getInstant().registPhoneBatteryListener(); PhoneBatteryListenerUtil.getInstant().startListenRssi(); PhoneBatteryListenerUtil.getInstant().startListenEmbraceBattery(); } @Override public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) { byte[] notifValue = characteristic.getValue(); BluetoothLeService.this.setIsConnected(true); // int embraceBattery = (int)notifValue[0]; int embraceBattery = 0; try { embraceBattery = readUnsignedShort(notifValue); } catch (IOException e) { e.printStackTrace(); } // System.out.println("onCharacteristicRead() embraceBattery:" // + embraceBattery); if (embraceHandler != null) { Message msg = Message.obtain(embraceHandler, Constants.getEmbraceBattery); msg.obj = embraceBattery; msg.sendToTarget(); } String embraceBatteryValue = EmbraceBatteryUtil .getEmbraceBatteryValue(embraceBattery); if (embraceBatteryValue == null || embraceBatteryValue.equals("")) { return; } String embraceBatteryStringValue = embraceBatteryValue.substring(0, embraceBatteryValue.indexOf("%")); int embraceBatteryIntValue = Integer .parseInt(embraceBatteryStringValue); if (embraceBatteryIntValue <= Constants.minBattery && !ServiceManager.getInstant() .isThisWakeupEmbraceLowPowerMsgSended()) { // final Optional<EmbraceMsg> EmbraceBatteryMsg = DbBuilder // .getInstant().getExCommandByNotification( // Constants.notification_type_BATTERYEMBRACCE); final Optional<EmbraceMsg> EmbraceBatteryMsg = ExCommandManager.getInstance(). getExCommandByNotification(Constants.notification_type_BATTERYEMBRACCE); if (EmbraceBatteryMsg.notEmpty()) { ServiceManager .getInstant() .getBluetoothService() .writeEffectCommand( EmbraceBatteryMsg.get().getFXCommand()); PhoneBatteryListenerUtil.getInstant() .setEmbraceBatteryThreadActived(true); ServiceManager.getInstant() .setThisWakeupEmbraceLowPowerMsgSended(true); } } } private int readUnsignedShort(byte[] readBuffer) throws IOException { if (readBuffer == null || readBuffer.length < 2) return 0; byte[] uint64 = new byte[3]; uint64[2] = 0; System.arraycopy(readBuffer, 0, uint64, 0, 2); BigInteger intg = new BigInteger(reverse(uint64)); return intg.intValue(); } private byte[] reverse(byte[] b) { byte[] temp = new byte[b.length]; for (int i = 0; i < b.length; i++) { temp[i] = b[b.length - 1 - i]; } return temp; } @Override public void onCharacteristicWrite(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) { // System.out.println("::onCharacteristicWrite()"); // System.out.println(status); // Do nothing ??? } @Override public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) { // Do nothing ??? } }; public class LocalBinder extends Binder { public BluetoothLeService getService() { return BluetoothLeService.this; } } @Override public IBinder onBind(Intent intent) { return mBinder; } @Override public boolean onUnbind(Intent intent) { // After using a given device, you should make sure that // BluetoothGatt.close() is called // such that resources are cleaned up properly. In this particular // example, close() is // invoked when the UI is disconnected from the Service. close(); return super.onUnbind(intent); } private final IBinder mBinder = new LocalBinder(); /** * Initializes a reference to the local Bluetooth adapter. * * @return Return true if the initialization is successful. */ public boolean initialize() { // For API level 18 and above, get a reference to BluetoothAdapter // through // BluetoothManager. if (mBluetoothManager == null) { mBluetoothManager = (BluetoothManager) getSystemService(Context.BLUETOOTH_SERVICE); if (mBluetoothManager == null) { Log.e(TAG, "Unable to initialize BluetoothManager."); return false; } } mBluetoothAdapter = mBluetoothManager.getAdapter(); if (mBluetoothAdapter == null) { Log.e(TAG, "Unable to obtain a BluetoothAdapter."); return false; } return true; } private void setIsConnected(boolean connected) { if (connected) { embraceConnected = true; notifyStateChange(true); } else { embraceConnected = false; notifyStateChange(false); } } /** * Tells if the device is reliabily connected. True mean connected and ready * to use False means connecting, disconnecting or disconnected. * * @return */ public boolean isEmbraceAvailable() { return embraceConnected; } public void notifyStateChange(boolean connected) { int connectedMessage = Constants.handler_msg_disconnect_server; if (connected) connectedMessage = Constants.handler_msg_connected_server; if (GlobalHandlerUtil.getInstant().getDemoHandler().notEmpty()) { Message msg = Message.obtain(GlobalHandlerUtil.getInstant() .getDemoHandler().get(), connectedMessage); msg.sendToTarget(); } } public boolean connect(final String address) { if (!initialize()) { return false; } if (mBluetoothAdapter == null || address == null) { return false; } final BluetoothDevice device = mBluetoothAdapter .getRemoteDevice(address); // Previously connected device. Try to reconnect. /*if (mBluetoothDeviceAddress != null && address.equals(mBluetoothDeviceAddress) && mBluetoothGatt != null) { mBluetoothGatt.disconnect(); Log.d(TAG, "Trying to use an existing mBluetoothGatt for connection."); if (mBluetoothGatt.connect()) { return true; } else { return false; } }*/ if(mBluetoothGatt != null) { mBluetoothGatt.disconnect(); mBluetoothGatt.close(); } if (device == null) { Log.w(TAG, "Device not found. Unable to connect."); return false; } // We want to directly connect to the device, so we are setting the // autoConnect // parameter to false. mBluetoothGatt = device.connectGatt(this, false, mGattCallback); // mBluetoothGatt = device.connectGatt(this, true, mGattCallback); Log.d(TAG, "Trying to create a new connection."); // mBluetoothDeviceAddress = address; return true; } /** * Disconnects an existing connection or cancel a pending connection. The * disconnection result is reported asynchronously through the * {@code BluetoothGattCallback#onConnectionStateChange(android.bluetooth.BluetoothGatt, int, int)} * callback. */ public void disconnect() { BluetoothLeService.this.setIsConnected(false); if (mBluetoothAdapter == null || mBluetoothGatt == null) { Log.w(TAG, "BluetoothAdapter not initialized"); return; } mBluetoothGatt.disconnect(); mBluetoothGatt.close(); } /** * After using a given BLE device, the app must call this method to ensure * resources are released properly. */ public void close() { if (mBluetoothGatt == null) { return; } mBluetoothGatt.close(); mBluetoothGatt = null; } /** * Request a read on a given {@code BluetoothGattCharacteristic}. The read * result is reported asynchronously through the * {@code BluetoothGattCallback#onCharacteristicRead(android.bluetooth.BluetoothGatt, android.bluetooth.BluetoothGattCharacteristic, int)} * callback. * * @param characteristic * The characteristic to read from. */ public void readCharacteristic(BluetoothGattCharacteristic characteristic) { if (mBluetoothAdapter == null || mBluetoothGatt == null) { Log.w(TAG, "BluetoothAdapter not initialized"); return; } if (characteristic == null) { Log.w(TAG, "Characteristic is null"); return; } mBluetoothGatt.readCharacteristic(characteristic); } public void readEmbraceBattery() { // use below code to read battery... // ServiceManager.getInstant().getBluetoothService().readEmbraceBattery(); UUID batteryserviceUUID = UUID .fromString(Constants.Battery_service_uuid); UUID batteryNotifyUUID = UUID .fromString(Constants.Battery_service_characteristics_uuid); if (mBluetoothGatt != null && mBluetoothGatt.getService(batteryserviceUUID) != null) { BluetoothGattCharacteristic notifyCharacteristic3 = mBluetoothGatt .getService(batteryserviceUUID).getCharacteristic( batteryNotifyUUID); readCharacteristic(notifyCharacteristic3); } } public void writeEffectCommand(byte[] msg) { if (mBluetoothAdapter == null || mBluetoothGatt == null) { Log.w(TAG, "BluetoothAdapter not initialized"); return; } if (msg == null) { Log.w(TAG, "Message is null"); return; } List<BluetoothGattService> services = mBluetoothGatt.getServices(); UUID serviceUUID = UUID.fromString(Constants.service_uuid); UUID writeUUID = UUID.fromString(Constants.write_uuid_effect); /* * UUID serviceUUID = * UUID.fromString("00001802-0000-1000-8000-00805f9b34fb"); UUID * writeUUID = UUID.fromString("00002A06-0000-1000-8000-00805f9b34fb"); */ if (mBluetoothGatt == null || mBluetoothGatt.getService(serviceUUID) == null) { /* * System.out.println(mBluetoothGatt); * System.out.println(mBluetoothGatt.getService(serviceUUID)); * System.out.println("aaaaaa"); */ return; } BluetoothGattCharacteristic writeCharacteristic = mBluetoothGatt .getService(serviceUUID).getCharacteristic(writeUUID); writeCharacteristic.setValue(msg); mBluetoothGatt.writeCharacteristic(writeCharacteristic); } public void readRssi() { if (mBluetoothGatt != null) { mBluetoothGatt.readRemoteRssi(); } } public Handler getMyHandler() { return myHandler; } public void setMyHandler(Handler myHandler) { this.myHandler = myHandler; } public Handler getEmbraceHandler() { return embraceHandler; } public void setEmbraceHandler(Handler embraceHandler) { this.embraceHandler = embraceHandler; } @Override public void onStart(Intent intent, int startId) { super.onStart(intent, startId); ServiceManager.getInstant().setBluetoothService(this); SearchActivity.i = 200; } Handler handler = new Handler(); @Override public void onDestroy() { super.onDestroy(); Intent gattServiceIntent = new Intent(this, BluetoothLeService.class); gattServiceIntent.putExtra("needConnect", embraceConnected); this.startService(gattServiceIntent); if(embraceConnected){ handler.postDelayed(new Runnable() { @Override public void run() { SharedPreferences deviceadd = BluetoothLeService.this.getSharedPreferences("deviceAddress", 0); String previousAddress = deviceadd.getString("address", ""); if (ServiceManager.getInstant().getBluetoothService() != null && previousAddress != null){ ServiceManager.getInstant().getBluetoothService().connect(previousAddress); } } }, 3000); } } /* @Override public int onStartCommand(Intent intent, int flags, int startId) { flags = START_STICKY; return super.onStartCommand(intent, flags, startId); // return START_REDELIVER_INTENT; }*/ //@Override //public int onStartCommand(Intent intent, int flags, int startId) { // return START_STICKY; //} }
package org.cirdles.squid.prawnLegacy; import org.cirdles.squid.Squid; import org.cirdles.squid.prawn.PrawnFile; import org.cirdles.squid.prawn.RunParameterNames; import org.cirdles.squid.prawn.RunTableEntryParameterNames; import org.cirdles.squid.prawn.SetParameterNames; import org.cirdles.squid.shrimp.ShrimpDataFileInterface; import org.cirdles.squid.shrimp.ShrimpDataLegacyFileInterface; import java.util.List; public class PrawnLegacyFileHandler { public static ShrimpDataFileInterface convertPrawnLegacyFileToPrawnFile(ShrimpDataLegacyFileInterface prawnLegacyFile) { ShrimpDataFileInterface prawnFile = new PrawnFile(); prawnFile.setSoftwareVersion("Squid3 v" + Squid.VERSION + " - translated from SHRIMP II v2 SW"); prawnFile.setRuns((short) prawnLegacyFile.getRun().size()); for (PrawnLegacyFile.Run legacyRun : prawnLegacyFile.getRun()) { PrawnFile.Run run = new PrawnFile.Run(); // run // run parameters PrawnFile.Run.Par title = new PrawnFile.Run.Par(); title.setName(RunParameterNames.TITLE); title.setValue(legacyRun.getTitle()); run.getPar().add(title); PrawnFile.Run.Par sets = new PrawnFile.Run.Par(); sets.setName(RunParameterNames.SETS); sets.setValue("" + legacyRun.getSets()); run.getPar().add(sets); PrawnFile.Run.Par measurements = new PrawnFile.Run.Par(); measurements.setName(RunParameterNames.MEASUREMENTS); measurements.setValue("" + legacyRun.getPeaks()); run.getPar().add(measurements); PrawnFile.Run.Par scans = new PrawnFile.Run.Par(); scans.setName(RunParameterNames.SCANS); scans.setValue("" + legacyRun.getScans()); run.getPar().add(scans); PrawnFile.Run.Par deadTimeNS = new PrawnFile.Run.Par(); deadTimeNS.setName(RunParameterNames.DEAD_TIME_NS); deadTimeNS.setValue("" + legacyRun.getDeadTimeNs()); run.getPar().add(deadTimeNS); PrawnFile.Run.Par sbmZeroCps = new PrawnFile.Run.Par(); sbmZeroCps.setName(RunParameterNames.SBM_ZERO_CPS); sbmZeroCps.setValue("" + legacyRun.getSbmZeroCps()); run.getPar().add(sbmZeroCps); PrawnFile.Run.Par autocentering = new PrawnFile.Run.Par(); autocentering.setName(RunParameterNames.AUTOCENTERING); autocentering.setValue("" + legacyRun.getAutocentering()); run.getPar().add(autocentering); PrawnFile.Run.Par qt1y_mode = new PrawnFile.Run.Par(); qt1y_mode.setName(RunParameterNames.QT_1_Y_MODE); qt1y_mode.setValue("" + legacyRun.getQt1YMode()); run.getPar().add(qt1y_mode); PrawnFile.Run.Par deflect_beam_between_peaks = new PrawnFile.Run.Par(); deflect_beam_between_peaks.setName(RunParameterNames.DEFLECT_BEAM_BETWEEN_PEAKS); deflect_beam_between_peaks.setValue("" + legacyRun.getDeflectBeamBetweenPeaks()); run.getPar().add(deflect_beam_between_peaks); PrawnFile.Run.Par autocenter_method = new PrawnFile.Run.Par(); autocenter_method.setName(RunParameterNames.AUTOCENTER_METHOD); autocenter_method.setValue("" + legacyRun.getAutocenterMethod()); run.getPar().add(autocenter_method); // need placeholders for stage x,y,z PrawnFile.Run.Par stage_x = new PrawnFile.Run.Par(); stage_x.setName(RunParameterNames.STAGE_X); stage_x.setValue("0"); run.getPar().add(stage_x); PrawnFile.Run.Par stage_y = new PrawnFile.Run.Par(); stage_y.setName(RunParameterNames.STAGE_Y); stage_y.setValue("0"); run.getPar().add(stage_y); PrawnFile.Run.Par stage_z = new PrawnFile.Run.Par(); stage_z.setName(RunParameterNames.STAGE_Z); stage_z.setValue("0"); run.getPar().add(stage_z); // run.runtable // run.runtable entries // run.runtable entry parameters run.setRunTable(new PrawnFile.Run.RunTable()); for (int i = 0; i < legacyRun.getRunTable().getName().size(); i++) { PrawnFile.Run.RunTable.Entry entry = new PrawnFile.Run.RunTable.Entry(); PrawnLegacyFile.Run.RunTable legacyRunTable = legacyRun.getRunTable(); PrawnFile.Run.RunTable.Entry.Par label = new PrawnFile.Run.RunTable.Entry.Par(); label.setName(RunTableEntryParameterNames.LABEL); label.setValue("" + legacyRunTable.getName().get(i)); entry.getPar().add(label); PrawnFile.Run.RunTable.Entry.Par amu = new PrawnFile.Run.RunTable.Entry.Par(); amu.setName(RunTableEntryParameterNames.AMU); amu.setValue("" + legacyRunTable.getTrimMass().get(i)); entry.getPar().add(amu); PrawnFile.Run.RunTable.Entry.Par trim_amu = new PrawnFile.Run.RunTable.Entry.Par(); trim_amu.setName(RunTableEntryParameterNames.TRIM_AMU); trim_amu.setValue("0.0"); entry.getPar().add(trim_amu); PrawnFile.Run.RunTable.Entry.Par autocenter_offset_amu = new PrawnFile.Run.RunTable.Entry.Par(); autocenter_offset_amu.setName(RunTableEntryParameterNames.AUTOCENTER_OFFSET_AMU); autocenter_offset_amu.setValue("0.0"); entry.getPar().add(autocenter_offset_amu); PrawnFile.Run.RunTable.Entry.Par countTimeSec = new PrawnFile.Run.RunTable.Entry.Par(); countTimeSec.setName(RunTableEntryParameterNames.COUNT_TIME_SEC); countTimeSec.setValue("" + legacyRunTable.getCountTimeSec().get(i)); entry.getPar().add(countTimeSec); PrawnFile.Run.RunTable.Entry.Par delay_sec = new PrawnFile.Run.RunTable.Entry.Par(); delay_sec.setName(RunTableEntryParameterNames.DELAY_SEC); delay_sec.setValue("" + legacyRunTable.getDelaySec().get(i)); entry.getPar().add(delay_sec); PrawnFile.Run.RunTable.Entry.Par collector_focus = new PrawnFile.Run.RunTable.Entry.Par(); collector_focus.setName(RunTableEntryParameterNames.COLLECTOR_FOCUS); collector_focus.setValue("" + legacyRunTable.getCollectorFocus().get(i)); entry.getPar().add(collector_focus); PrawnFile.Run.RunTable.Entry.Par centeringTimeSec = new PrawnFile.Run.RunTable.Entry.Par(); centeringTimeSec.setName(RunTableEntryParameterNames.CENTERING_TIME_SEC); centeringTimeSec.setValue("" + legacyRunTable.getCenteringTimeSec().get(i)); entry.getPar().add(centeringTimeSec); PrawnFile.Run.RunTable.Entry.Par centering_frequency = new PrawnFile.Run.RunTable.Entry.Par(); centering_frequency.setName(RunTableEntryParameterNames.CENTERING_FREQUENCY); centering_frequency.setValue("" + legacyRunTable.getCenteringFrequency().get(i)); entry.getPar().add(centering_frequency); PrawnFile.Run.RunTable.Entry.Par detector_selection = new PrawnFile.Run.RunTable.Entry.Par(); detector_selection.setName(RunTableEntryParameterNames.DETECTOR_SELECTION); detector_selection.setValue("0.0"); entry.getPar().add(detector_selection); PrawnFile.Run.RunTable.Entry.Par mc_lm_pos = new PrawnFile.Run.RunTable.Entry.Par(); mc_lm_pos.setName(RunTableEntryParameterNames.MC_LM_POS); mc_lm_pos.setValue("-1.000"); entry.getPar().add(mc_lm_pos); PrawnFile.Run.RunTable.Entry.Par mc_hm_pos = new PrawnFile.Run.RunTable.Entry.Par(); mc_hm_pos.setName(RunTableEntryParameterNames.MC_HM_POS); mc_hm_pos.setValue("-1.000"); entry.getPar().add(mc_hm_pos); PrawnFile.Run.RunTable.Entry.Par sc_reference = new PrawnFile.Run.RunTable.Entry.Par(); sc_reference.setName(RunTableEntryParameterNames.SC_REFERENCE); sc_reference.setValue("" + legacyRunTable.getReference().get(i)); entry.getPar().add(sc_reference); PrawnFile.Run.RunTable.Entry.Par sc_detector = new PrawnFile.Run.RunTable.Entry.Par(); sc_detector.setName(RunTableEntryParameterNames.SC_DETECTOR); sc_detector.setValue("" + legacyRunTable.getDetector().get(i)); entry.getPar().add(sc_detector); run.getRunTable().getEntry().add(entry); } // run.set // run.set parameters // run.set.scan // run.set.scan.measurement // run.set.scan.measurement.parameters // run.set.scan.measurement.data run.setSet(new PrawnFile.Run.Set()); PrawnFile.Run.Set.Par date = new PrawnFile.Run.Set.Par(); date.setName(SetParameterNames.DATE); date.setValue(legacyRun.getSet().getDate()); run.getSet().getPar().add(date); PrawnFile.Run.Set.Par time = new PrawnFile.Run.Set.Par(); time.setName(SetParameterNames.TIME); time.setValue(legacyRun.getSet().getTime()); run.getSet().getPar().add(time); PrawnFile.Run.Set.Par qt1y = new PrawnFile.Run.Set.Par(); qt1y.setName(SetParameterNames.QT_1_Y); qt1y.setValue("" + legacyRun.getSet().getQt1Y()); run.getSet().getPar().add(qt1y); PrawnFile.Run.Set.Par qt1y_volts = new PrawnFile.Run.Set.Par(); qt1y_volts.setName(SetParameterNames.QT_1_Y_VOLTS); qt1y_volts.setValue("0.0"); run.getSet().getPar().add(qt1y_volts); PrawnFile.Run.Set.Par qt1z = new PrawnFile.Run.Set.Par(); qt1z.setName(SetParameterNames.QT_1_Z); qt1z.setValue("0"); run.getSet().getPar().add(qt1z); PrawnFile.Run.Set.Par pbm = new PrawnFile.Run.Set.Par(); pbm.setName(SetParameterNames.PBM); pbm.setValue("" + legacyRun.getSet().getPbm()); run.getSet().getPar().add(pbm); // the issue is that the legacy prawn file lists all the scans for a peak together // while the newer prawn file lists the scans for every peak together // so set up the Prawn data structure to populate List<PrawnFile.Run.Set.Scan> scanSet = run.getSet().getScan(); scanSet.clear(); for (int i = 0; i < legacyRun.getScans(); i++) { PrawnFile.Run.Set.Scan scan = new PrawnFile.Run.Set.Scan(); scan.setNumber((short) (i + 1)); scanSet.add(scan); } for (PrawnLegacyFile.Run.Set.Data.Peak legacyPeak : legacyRun.getSet().getData().getPeak()) { for (int i = 0; i < legacyPeak.getScan().size(); i++) { PrawnFile.Run.Set.Scan.Measurement measurement = makeMeasurement(legacyPeak.getScan().get(i), legacyPeak.getPeakName()); scanSet.get(i).getMeasurement().add(measurement); } } prawnFile.getRun().add(run); } return prawnFile; } private static PrawnFile.Run.Set.Scan.Measurement makeMeasurement(PrawnLegacyFile.Run.Set.Data.Peak.Scan legacyScan, String peakName) { PrawnFile.Run.Set.Scan.Measurement measurement = new PrawnFile.Run.Set.Scan.Measurement(); PrawnFile.Run.Set.Scan.Measurement.Par detectors = new PrawnFile.Run.Set.Scan.Measurement.Par(); detectors.setName("detectors"); detectors.setValue("0"); measurement.getPar().add(detectors); PrawnFile.Run.Set.Scan.Measurement.Par trimMass = new PrawnFile.Run.Set.Scan.Measurement.Par(); trimMass.setName("trim_mass"); trimMass.setValue("" + legacyScan.getTrimMass()); measurement.getPar().add(trimMass); PrawnFile.Run.Set.Scan.Measurement.Par time_stamp_sec = new PrawnFile.Run.Set.Scan.Measurement.Par(); time_stamp_sec.setName("time_stamp_sec"); time_stamp_sec.setValue("" + legacyScan.getTimeStampSec()); measurement.getPar().add(time_stamp_sec); PrawnFile.Run.Set.Scan.Measurement.Par autocentering_result = new PrawnFile.Run.Set.Scan.Measurement.Par(); autocentering_result.setName("autocentering_result"); autocentering_result.setValue("ok"); measurement.getPar().add(autocentering_result); PrawnFile.Run.Set.Scan.Measurement.Par autocentering_detector = new PrawnFile.Run.Set.Scan.Measurement.Par(); autocentering_detector.setName("autocentering_detector"); autocentering_detector.setValue("1"); measurement.getPar().add(autocentering_detector); PrawnFile.Run.Set.Scan.Measurement.Data ionCount = new PrawnFile.Run.Set.Scan.Measurement.Data(); ionCount.setName(peakName); String ionCounts = ""; for (int i = 0; i < legacyScan.getIonCount().size(); i++) { ionCounts += legacyScan.getIonCount().get(i); if (i < (legacyScan.getIonCount().size() - 1)) { ionCounts += ","; } } ionCount.setValue(ionCounts); measurement.getData().add(ionCount); PrawnFile.Run.Set.Scan.Measurement.Data SBM = new PrawnFile.Run.Set.Scan.Measurement.Data(); SBM.setName("SBM"); String SBMCounts = ""; for (int i = 0; i < legacyScan.getNormCount().size(); i++) { SBMCounts += legacyScan.getNormCount().get(i); if (i < (legacyScan.getNormCount().size() - 1)) { SBMCounts += ","; } } SBM.setValue(SBMCounts); measurement.getData().add(SBM); return measurement; } }
package net.bytebuddy.implementation.bytecode.constant; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.implementation.Implementation; import net.bytebuddy.implementation.auxiliary.PrivilegedMemberLookupAction; import net.bytebuddy.implementation.bytecode.Duplication; import net.bytebuddy.implementation.bytecode.StackManipulation; import net.bytebuddy.implementation.bytecode.TypeCreation; import net.bytebuddy.implementation.bytecode.assign.TypeCasting; import net.bytebuddy.implementation.bytecode.collection.ArrayFactory; import net.bytebuddy.implementation.bytecode.member.FieldAccess; import net.bytebuddy.implementation.bytecode.member.MethodInvocation; import org.objectweb.asm.MethodVisitor; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.List; import static net.bytebuddy.matcher.ElementMatchers.isConstructor; /** * Represents the creation of a {@link java.lang.reflect.Method} value which can be created from a given * set of constant pool values and can therefore be considered a constant in the broader meaning. */ public abstract class MethodConstant implements StackManipulation { /** * A description of the method to be loaded onto the stack. */ protected final MethodDescription.InDefinedShape methodDescription; /** * Creates a new method constant. * * @param methodDescription The method description for which the {@link java.lang.reflect.Method} representation * should be created. */ protected MethodConstant(MethodDescription.InDefinedShape methodDescription) { this.methodDescription = methodDescription; } /** * Creates a stack manipulation that loads a method constant onto the operand stack. * * @param methodDescription The method to be loaded onto the stack. * @return A stack manipulation that assigns a method constant for the given method description. */ public static CanCache of(MethodDescription.InDefinedShape methodDescription) { if (methodDescription.isTypeInitializer()) { return CanCacheIllegal.INSTANCE; } else if (methodDescription.isConstructor()) { return new ForConstructor(methodDescription); } else { return new ForMethod(methodDescription); } } /** * Creates a stack manipulation that loads a method constant onto the operand stack using an {@link AccessController}. * * @param methodDescription The method to be loaded onto the stack. * @return A stack manipulation that assigns a method constant for the given method description. */ public static CanCache ofPrivileged(MethodDescription.InDefinedShape methodDescription) { if (methodDescription.isTypeInitializer()) { return CanCacheIllegal.INSTANCE; } else if (methodDescription.isConstructor()) { return new ForConstructor(methodDescription).privileged(); } else { return new ForMethod(methodDescription).privileged(); } } /** * Returns a list of type constant load operations for the given list of parameters. * * @param parameterTypes A list of all type descriptions that should be represented as type constant * load operations. * @return A corresponding list of type constant load operations. */ protected static List<StackManipulation> typeConstantsFor(List<TypeDescription> parameterTypes) { List<StackManipulation> typeConstants = new ArrayList<StackManipulation>(parameterTypes.size()); for (TypeDescription parameterType : parameterTypes) { typeConstants.add(ClassConstant.of(parameterType)); } return typeConstants; } @Override public boolean isValid() { return true; } @Override public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) { return new Compound( ClassConstant.of(methodDescription.getDeclaringType()), methodName(), ArrayFactory.forType(TypeDescription.Generic.OfNonGenericType.CLASS) .withValues(typeConstantsFor(methodDescription.getParameters().asTypeList().asErasures())), MethodInvocation.invoke(accessorMethod()) ).apply(methodVisitor, implementationContext); } /** * Returns a method constant that uses an {@link AccessController} to look up this constant. * * @return A method constant that uses an {@link AccessController} to look up this constant. */ protected CanCache privileged() { return new PrivilegedLookup(methodDescription, methodName()); } /** * Returns a stack manipulation that loads the method name onto the operand stack if this is required. * * @return A stack manipulation that loads the method name onto the operand stack if this is required. */ protected abstract StackManipulation methodName(); /** * Returns the method for loading a declared method or constructor onto the operand stack. * * @return The method for loading a declared method or constructor onto the operand stack. */ protected abstract MethodDescription.InDefinedShape accessorMethod(); @Override public int hashCode() { return methodDescription.hashCode(); } @Override public boolean equals(Object other) { if (this == other) { return true; } else if (other == null || getClass() != other.getClass()) { return false; } MethodConstant methodConstant = (MethodConstant) other; return methodDescription.equals(methodConstant.methodDescription); } /** * Represents a method constant that cannot be represented by Java's reflection API. */ protected enum CanCacheIllegal implements CanCache { /** * The singleton instance. */ INSTANCE; @Override public StackManipulation cached() { return Illegal.INSTANCE; } @Override public boolean isValid() { return false; } @Override public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) { return Illegal.INSTANCE.apply(methodVisitor, implementationContext); } } /** * Represents a {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant} that is * directly loaded onto the operand stack without caching the value. Since the look-up of a Java method bares * some costs that sometimes need to be avoided, such a stack manipulation offers a convenience method for * defining this loading instruction as the retrieval of a field value that is initialized in the instrumented * type's type initializer. */ public interface CanCache extends StackManipulation { /** * Returns this method constant as a cached version. * * @return A cached version of the method constant that is represented by this instance. */ StackManipulation cached(); } /** * Creates a {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant} for loading * a {@link java.lang.reflect.Method} instance onto the operand stack. */ protected static class ForMethod extends MethodConstant implements CanCache { /** * The {@link Class#getMethod(String, Class[])} method. */ private static final MethodDescription.InDefinedShape GET_METHOD; /** * The {@link Class#getDeclaredMethod(String, Class[])} method. */ private static final MethodDescription.InDefinedShape GET_DECLARED_METHOD; /* * Looks up methods used for creating the manipulation. */ static { try { GET_METHOD = new MethodDescription.ForLoadedMethod(Class.class.getMethod("getMethod", String.class, Class[].class)); GET_DECLARED_METHOD = new MethodDescription.ForLoadedMethod(Class.class.getMethod("getDeclaredMethod", String.class, Class[].class)); } catch (NoSuchMethodException exception) { throw new IllegalStateException("Could not locate method lookup", exception); } } /** * Creates a new {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant} for * creating a {@link java.lang.reflect.Method} instance. * * @param methodDescription The method to be loaded onto the stack. */ protected ForMethod(MethodDescription.InDefinedShape methodDescription) { super(methodDescription); } @Override protected StackManipulation methodName() { return new TextConstant(methodDescription.getInternalName()); } @Override protected MethodDescription.InDefinedShape accessorMethod() { return methodDescription.isPublic() ? GET_METHOD : GET_DECLARED_METHOD; } @Override public StackManipulation cached() { return new CachedMethod(this); } } /** * Creates a {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant} for loading * a {@link java.lang.reflect.Constructor} instance onto the operand stack. */ protected static class ForConstructor extends MethodConstant implements CanCache { /** * The {@link Class#getConstructor(Class[])} method. */ private static final MethodDescription.InDefinedShape GET_CONSTRUCTOR; /** * The {@link Class#getDeclaredConstructor(Class[])} method. */ private static final MethodDescription.InDefinedShape GET_DECLARED_CONSTRUCTOR; /* * Looks up the method used for creating the manipulation. */ static { try { GET_CONSTRUCTOR = new MethodDescription.ForLoadedMethod(Class.class.getMethod("getConstructor", Class[].class)); GET_DECLARED_CONSTRUCTOR = new MethodDescription.ForLoadedMethod(Class.class.getMethod("getDeclaredConstructor", Class[].class)); } catch (NoSuchMethodException exception) { throw new IllegalStateException("Could not locate Class::getDeclaredConstructor", exception); } } /** * Creates a new {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant} for * creating a {@link java.lang.reflect.Constructor} instance. * * @param methodDescription The constructor to be loaded onto the stack. */ protected ForConstructor(MethodDescription.InDefinedShape methodDescription) { super(methodDescription); } @Override protected StackManipulation methodName() { return Trivial.INSTANCE; } @Override protected MethodDescription.InDefinedShape accessorMethod() { return methodDescription.isPublic() ? GET_CONSTRUCTOR : GET_DECLARED_CONSTRUCTOR; } @Override public StackManipulation cached() { return new CachedConstructor(this); } } /** * Performs a privileged lookup of a method constant by using an {@link AccessController}. */ protected static class PrivilegedLookup implements StackManipulation, CanCache { /** * The {@link AccessController#doPrivileged(PrivilegedExceptionAction)} method. */ private static final MethodDescription.InDefinedShape DO_PRIVILEGED; /* * Locates the access controller's do privileged method. */ static { try { DO_PRIVILEGED = new MethodDescription.ForLoadedMethod(AccessController.class.getMethod("doPrivileged", PrivilegedExceptionAction.class)); } catch (NoSuchMethodException exception) { throw new IllegalStateException("Cannot locate AccessController::doPrivileged", exception); } } /** * The method constant to load. */ private final MethodDescription.InDefinedShape methodDescription; /** * The stack manipulation for locating the method name. */ private final StackManipulation methodName; /** * Creates a new privileged lookup. * * @param methodDescription The method constant to load. * @param methodName The stack manipulation for locating the method name. */ protected PrivilegedLookup(MethodDescription.InDefinedShape methodDescription, StackManipulation methodName) { this.methodDescription = methodDescription; this.methodName = methodName; } @Override public boolean isValid() { return methodName.isValid(); } @Override public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) { TypeDescription auxiliaryType = implementationContext.register(PrivilegedMemberLookupAction.of(methodDescription)); return new Compound( TypeCreation.of(auxiliaryType), Duplication.SINGLE, ClassConstant.of(methodDescription.getDeclaringType()), methodName, ArrayFactory.forType(TypeDescription.Generic.OfNonGenericType.CLASS) .withValues(typeConstantsFor(methodDescription.getParameters().asTypeList().asErasures())), MethodInvocation.invoke(auxiliaryType.getDeclaredMethods().filter(isConstructor()).getOnly()), MethodInvocation.invoke(DO_PRIVILEGED), TypeCasting.to(TypeDescription.ForLoadedType.of(methodDescription.isConstructor() ? Constructor.class : Method.class)) ).apply(methodVisitor, implementationContext); } @Override public StackManipulation cached() { return methodDescription.isConstructor() ? new CachedConstructor(this) : new CachedMethod(this); } @Override public int hashCode() { return methodDescription.hashCode(); } @Override public boolean equals(Object other) { if (this == other) { return true; } else if (other == null || getClass() != other.getClass()) { return false; } PrivilegedLookup privilegedLookup = (PrivilegedLookup) other; return methodDescription.equals(privilegedLookup.methodDescription); } } /** * Represents a cached method for a {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant}. */ protected static class CachedMethod implements StackManipulation { /** * A description of the {@link java.lang.reflect.Method} type. */ private static final TypeDescription METHOD_TYPE = TypeDescription.ForLoadedType.of(Method.class); /** * The stack manipulation that is represented by this caching wrapper. */ private final StackManipulation methodConstant; /** * Creates a new cached {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant}. * * @param methodConstant The method constant to store in the field cache. */ protected CachedMethod(StackManipulation methodConstant) { this.methodConstant = methodConstant; } @Override public boolean isValid() { return methodConstant.isValid(); } @Override public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) { return FieldAccess.forField(implementationContext.cache(methodConstant, METHOD_TYPE)) .read() .apply(methodVisitor, implementationContext); } @Override public int hashCode() { return methodConstant.hashCode(); } @Override public boolean equals(Object other) { if (this == other) { return true; } else if (other == null || getClass() != other.getClass()) { return false; } CachedMethod cachedMethod = (CachedMethod) other; return methodConstant.equals(cachedMethod.methodConstant); } } /** * Represents a cached constructor for a {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant}. */ protected static class CachedConstructor implements StackManipulation { /** * A description of the {@link java.lang.reflect.Constructor} type. */ private static final TypeDescription CONSTRUCTOR_TYPE = TypeDescription.ForLoadedType.of(Constructor.class); /** * The stack manipulation that is represented by this caching wrapper. */ private final StackManipulation constructorConstant; /** * Creates a new cached {@link net.bytebuddy.implementation.bytecode.constant.MethodConstant}. * * @param constructorConstant The method constant to store in the field cache. */ protected CachedConstructor(StackManipulation constructorConstant) { this.constructorConstant = constructorConstant; } @Override public boolean isValid() { return constructorConstant.isValid(); } @Override public Size apply(MethodVisitor methodVisitor, Implementation.Context implementationContext) { return FieldAccess.forField(implementationContext.cache(constructorConstant, CONSTRUCTOR_TYPE)) .read() .apply(methodVisitor, implementationContext); } @Override public int hashCode() { return constructorConstant.hashCode(); } @Override public boolean equals(Object other) { if (this == other) { return true; } else if (other == null || getClass() != other.getClass()) { return false; } CachedConstructor cachedConstructor = (CachedConstructor) other; return constructorConstant.equals(cachedConstructor.constructorConstant); } } }
/* * Copyright (c) 2010-2014 William Bittle http://www.dyn4j.org/ * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions * and the following disclaimer in the documentation and/or other materials provided with the * distribution. * * Neither the name of dyn4j nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.dyn4j.sandbox.panels; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.text.DecimalFormat; import java.text.MessageFormat; import javax.swing.BorderFactory; import javax.swing.GroupLayout; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JFormattedTextField; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JToggleButton; import javax.swing.border.TitledBorder; import org.dyn4j.dynamics.joint.Joint; import org.dyn4j.dynamics.joint.RevoluteJoint; import org.dyn4j.geometry.Vector2; import org.dyn4j.sandbox.SandboxBody; import org.dyn4j.sandbox.icons.Icons; import org.dyn4j.sandbox.listeners.SelectTextFocusListener; import org.dyn4j.sandbox.resources.Messages; import org.dyn4j.sandbox.utilities.ControlUtilities; /** * Panel used to create or edit an revolute joint. * @author William Bittle * @version 1.0.1 * @since 1.0.0 */ public class RevoluteJointPanel extends JointPanel implements InputPanel, ActionListener, ItemListener { /** The version id */ private static final long serialVersionUID = 8812128051146951491L; /** The body 1 drop down label */ private JLabel lblBody1; /** The body 2 drop down label */ private JLabel lblBody2; /** The body 1 drop down */ private JComboBox cmbBody1; /** The body 2 drop down */ private JComboBox cmbBody2; // reference angle /** The reference angle label */ private JLabel lblReferenceAngle; /** The reference angle text field */ private JFormattedTextField txtReferenceAngle; /** The reference angle auto compute button */ private JToggleButton tglReferenceAngle; /** The button used to reset the reference angle (only used in edit mode) */ private JButton btnResetReferenceAngle; // anchor points /** The anchor label */ private JLabel lblAnchor; /** The x label for the anchor point */ private JLabel lblX1; /** The y label for the anchor point */ private JLabel lblY1; /** The anchor's x text field */ private JFormattedTextField txtX1; /** The anchor's y text field */ private JFormattedTextField txtY1; /** The button to set anchor1 to body1's center of mass */ private JButton btnUseCenter1; /** The button to set anchor2 to body2's center of mass */ private JButton btnUseCenter2; // limits /** The limit enabled label */ private JLabel lblLimitEnabled; /** The limit enable check box */ private JCheckBox chkLimitEnabled; /** The upper limit label */ private JLabel lblUpperLimit; /** The lower limit label */ private JLabel lblLowerLimit; /** The upper limit text field */ private JFormattedTextField txtUpperLimit; /** The lower limit text field */ private JFormattedTextField txtLowerLimit; // motor /** The motor enabled label */ private JLabel lblMotorEnabled; /** The motor enabled check box */ private JCheckBox chkMotorEnabled; /** The motor speed label */ private JLabel lblMotorSpeed; /** The motor speed text field */ private JFormattedTextField txtMotorSpeed; /** The max motor force label */ private JLabel lblMaxMotorTorque; /** The max motor force text field */ private JFormattedTextField txtMaxMotorTorque; /** * Full constructor. * @param joint the original joint; null if creating * @param bodies the list of bodies to choose from * @param edit true if the joint is being edited */ public RevoluteJointPanel(RevoluteJoint joint, SandboxBody[] bodies, boolean edit) { super(); // get initial values String name = (String)joint.getUserData(); boolean collision = joint.isCollisionAllowed(); SandboxBody b1 = (SandboxBody)joint.getBody1(); SandboxBody b2 = (SandboxBody)joint.getBody2(); Vector2 an = joint.getAnchor1(); boolean limit = joint.isLimitEnabled(); boolean motor = joint.isMotorEnabled(); double ul = joint.getUpperLimit(); double ll = joint.getLowerLimit(); double ms = joint.getMotorSpeed(); double mt = joint.getMaximumMotorTorque(); double ref = joint.getReferenceAngle(); // set the super classes defaults this.txtName.setText(name); this.txtName.setColumns(15); this.chkCollision.setSelected(collision); this.lblBody1 = new JLabel(Messages.getString("panel.joint.body1"), Icons.INFO, JLabel.LEFT); this.lblBody2 = new JLabel(Messages.getString("panel.joint.body2"), Icons.INFO, JLabel.LEFT); this.lblBody1.setToolTipText(Messages.getString("panel.joint.body1.tooltip")); this.lblBody2.setToolTipText(Messages.getString("panel.joint.body2.tooltip")); this.cmbBody1 = new JComboBox(bodies); this.cmbBody2 = new JComboBox(bodies); this.lblAnchor = new JLabel(Messages.getString("panel.joint.anchor"), Icons.INFO, JLabel.LEFT); this.lblAnchor.setToolTipText(Messages.getString("panel.joint.revolute.anchor.tooltip")); this.lblX1 = new JLabel(Messages.getString("x")); this.lblY1 = new JLabel(Messages.getString("y")); this.txtX1 = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.anchor.format"))); this.txtX1.addFocusListener(new SelectTextFocusListener(this.txtX1)); this.txtX1.setColumns(7); this.txtY1 = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.anchor.format"))); this.txtY1.addFocusListener(new SelectTextFocusListener(this.txtY1)); this.txtY1.setColumns(7); this.btnUseCenter1 = new JButton(Messages.getString("panel.joint.useCenter")); this.btnUseCenter1.setToolTipText(Messages.getString("panel.joint.useCenter.tooltip")); this.btnUseCenter1.setActionCommand("use-com1"); this.btnUseCenter1.addActionListener(this); this.btnUseCenter2 = new JButton(Messages.getString("panel.joint.useCenter")); this.btnUseCenter2.setToolTipText(Messages.getString("panel.joint.useCenter.tooltip")); this.btnUseCenter2.setActionCommand("use-com2"); this.btnUseCenter2.addActionListener(this); this.lblReferenceAngle = new JLabel(Messages.getString("panel.joint.referenceAngle"), Icons.INFO, JLabel.LEFT); this.lblReferenceAngle.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.referenceAngle.tooltip"), Messages.getString("unit.rotation"))); this.txtReferenceAngle = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.referenceAngle.format"))); this.txtReferenceAngle.addFocusListener(new SelectTextFocusListener(this.txtReferenceAngle)); this.txtReferenceAngle.setValue(Math.toDegrees(ref)); this.tglReferenceAngle = new JToggleButton(Messages.getString("panel.joint.referenceAngle.autoCompute")); this.tglReferenceAngle.setToolTipText(Messages.getString("panel.joint.referenceAngle.autoCompute.tooltip")); this.tglReferenceAngle.setActionCommand("toggle-auto-compute"); this.tglReferenceAngle.setSelected(true); this.btnResetReferenceAngle = new JButton(Messages.getString("panel.joint.referenceAngle.reset")); this.btnResetReferenceAngle.setToolTipText(Messages.getString("panel.joint.referenceAngle.reset.tooltip")); this.btnResetReferenceAngle.setActionCommand("reset-reference-angle"); this.lblLimitEnabled = new JLabel(Messages.getString("panel.joint.limitsEnabled"), Icons.INFO, JLabel.LEFT); this.lblLimitEnabled.setToolTipText(Messages.getString("panel.joint.limitsEnabled.tooltip")); this.chkLimitEnabled = new JCheckBox(); this.lblUpperLimit = new JLabel(Messages.getString("panel.joint.upperLimit"), Icons.INFO, JLabel.LEFT); this.lblUpperLimit.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.upperLimit.tooltip"), Messages.getString("unit.rotation"))); this.txtUpperLimit = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.revolute.upperLimit.format"))); this.txtUpperLimit.addFocusListener(new SelectTextFocusListener(this.txtUpperLimit)); this.txtUpperLimit.setColumns(8); this.lblLowerLimit = new JLabel(Messages.getString("panel.joint.lowerLimit"), Icons.INFO, JLabel.LEFT); this.lblLowerLimit.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.lowerLimit.tooltip"), Messages.getString("unit.rotation"))); this.txtLowerLimit = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.revolute.lowerLimit.format"))); this.txtLowerLimit.addFocusListener(new SelectTextFocusListener(this.txtLowerLimit)); this.txtLowerLimit.setColumns(8); this.lblMotorEnabled = new JLabel(Messages.getString("panel.joint.motorEnabled"), Icons.INFO, JLabel.LEFT); this.lblMotorEnabled.setToolTipText(Messages.getString("panel.joint.revolute.motorEnabled.tooltip")); this.chkMotorEnabled = new JCheckBox(); this.lblMotorSpeed = new JLabel(Messages.getString("panel.joint.motorSpeed"), Icons.INFO, JLabel.LEFT); this.lblMotorSpeed.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.motorSpeed.tooltip"), Messages.getString("unit.velocity.angular"))); this.txtMotorSpeed = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.revolute.motorSpeed.format"))); this.txtMotorSpeed.addFocusListener(new SelectTextFocusListener(this.txtMotorSpeed)); this.lblMaxMotorTorque = new JLabel(Messages.getString("panel.joint.motorMaximumTorque"), Icons.INFO, JLabel.LEFT); this.lblMaxMotorTorque.setToolTipText(MessageFormat.format(Messages.getString("panel.joint.motorMaximumTorque.tooltip"), Messages.getString("unit.torque"))); this.txtMaxMotorTorque = new JFormattedTextField(new DecimalFormat(Messages.getString("panel.joint.revolute.motorMaximumTorque.format"))); this.txtMaxMotorTorque.addFocusListener(new SelectTextFocusListener(this.txtMaxMotorTorque)); // set defaults this.cmbBody1.setSelectedItem(b1); this.cmbBody2.setSelectedItem(b2); this.txtX1.setValue(an.x); this.txtY1.setValue(an.y); this.chkLimitEnabled.setSelected(limit); this.txtUpperLimit.setValue(Math.toDegrees(ul)); this.txtLowerLimit.setValue(Math.toDegrees(ll)); this.chkMotorEnabled.setSelected(motor); this.txtMaxMotorTorque.setValue(mt); this.txtMotorSpeed.setValue(Math.toDegrees(ms)); // setup edit mode if necessary if (edit) { // disable/hide certain controls this.cmbBody1.setEnabled(false); this.cmbBody2.setEnabled(false); this.txtX1.setEnabled(false); this.txtY1.setEnabled(false); this.btnUseCenter1.setEnabled(false); this.btnUseCenter2.setEnabled(false); this.tglReferenceAngle.setVisible(false); } else { this.btnResetReferenceAngle.setVisible(false); } // add listeners after all the values have been set // this will preserve the initial values this.cmbBody1.addItemListener(this); this.cmbBody2.addItemListener(this); this.tglReferenceAngle.addActionListener(this); this.btnResetReferenceAngle.addActionListener(this); // setup the sections GroupLayout layout; // setup the general section JPanel pnlGeneral = new JPanel(); TitledBorder border = BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), Messages.getString("panel.section.general")); border.setTitlePosition(TitledBorder.TOP); pnlGeneral.setBorder(border); layout = new GroupLayout(pnlGeneral); pnlGeneral.setLayout(layout); layout.setAutoCreateContainerGaps(true); layout.setAutoCreateGaps(true); layout.setHorizontalGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup() .addComponent(this.lblName) .addComponent(this.lblCollision) .addComponent(this.lblBody1) .addComponent(this.lblBody2) .addComponent(this.lblAnchor) .addComponent(this.lblReferenceAngle)) .addGroup(layout.createParallelGroup() .addComponent(this.txtName) .addComponent(this.chkCollision) .addGroup(layout.createSequentialGroup() .addComponent(this.cmbBody1) .addComponent(this.btnUseCenter1)) .addGroup(layout.createSequentialGroup() .addComponent(this.cmbBody2) .addComponent(this.btnUseCenter2)) .addGroup(layout.createSequentialGroup() .addComponent(this.txtX1) .addComponent(this.lblX1) .addComponent(this.txtY1) .addComponent(this.lblY1)) .addGroup(layout.createSequentialGroup() .addComponent(this.txtReferenceAngle) .addComponent(this.tglReferenceAngle) .addComponent(this.btnResetReferenceAngle)))); layout.setVerticalGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblName) .addComponent(this.txtName, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblCollision) .addComponent(this.chkCollision, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblBody1) .addComponent(this.cmbBody1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(this.btnUseCenter1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblBody2) .addComponent(this.cmbBody2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(this.btnUseCenter2, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblAnchor) .addComponent(this.txtX1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(this.lblX1) .addComponent(this.txtY1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(this.lblY1)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblReferenceAngle) .addComponent(this.txtReferenceAngle, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(this.tglReferenceAngle, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addComponent(this.btnResetReferenceAngle, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))); // setup the limits section JPanel pnlLimits = new JPanel(); border = BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), Messages.getString("panel.joint.section.limits")); border.setTitlePosition(TitledBorder.TOP); pnlLimits.setBorder(border); layout = new GroupLayout(pnlLimits); pnlLimits.setLayout(layout); layout.setAutoCreateContainerGaps(true); layout.setAutoCreateGaps(true); layout.setHorizontalGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup() .addComponent(this.lblLimitEnabled) .addComponent(this.lblLowerLimit) .addComponent(this.lblUpperLimit)) .addGroup(layout.createParallelGroup() .addComponent(this.chkLimitEnabled) .addComponent(this.txtLowerLimit) .addComponent(this.txtUpperLimit))); layout.setVerticalGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblLimitEnabled) .addComponent(this.chkLimitEnabled)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblLowerLimit) .addComponent(this.txtLowerLimit, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblUpperLimit) .addComponent(this.txtUpperLimit, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))); // setup the motor section JPanel pnlMotor = new JPanel(); border = BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), Messages.getString("panel.joint.section.motor")); border.setTitlePosition(TitledBorder.TOP); pnlMotor.setBorder(border); layout = new GroupLayout(pnlMotor); pnlMotor.setLayout(layout); layout.setAutoCreateContainerGaps(true); layout.setAutoCreateGaps(true); layout.setHorizontalGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup() .addComponent(this.lblMotorEnabled) .addComponent(this.lblMotorSpeed) .addComponent(this.lblMaxMotorTorque)) .addGroup(layout.createParallelGroup() .addComponent(this.chkMotorEnabled) .addComponent(this.txtMotorSpeed) .addComponent(this.txtMaxMotorTorque))); layout.setVerticalGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblMotorEnabled) .addComponent(this.chkMotorEnabled)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblMotorSpeed) .addComponent(this.txtMotorSpeed, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER) .addComponent(this.lblMaxMotorTorque) .addComponent(this.txtMaxMotorTorque, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))); // setup the layout of the sections layout = new GroupLayout(this); this.setLayout(layout); layout.setAutoCreateContainerGaps(true); layout.setAutoCreateGaps(true); layout.setHorizontalGroup(layout.createParallelGroup() .addComponent(pnlGeneral) .addComponent(pnlLimits) .addComponent(pnlMotor)); layout.setVerticalGroup(layout.createSequentialGroup() .addComponent(pnlGeneral) .addComponent(pnlLimits) .addComponent(pnlMotor)); } /** * Returns the computed reference angle between the two bodies. * @return double */ private double computeReferenceAngle() { double r1 = ((SandboxBody)this.cmbBody1.getSelectedItem()).getTransform().getRotation(); double r2 = ((SandboxBody)this.cmbBody2.getSelectedItem()).getTransform().getRotation(); return r1 - r2; } /* (non-Javadoc) * @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent) */ @Override public void actionPerformed(ActionEvent e) { if ("use-com1".equals(e.getActionCommand())) { Vector2 c = ((SandboxBody)this.cmbBody1.getSelectedItem()).getWorldCenter(); this.txtX1.setValue(c.x); this.txtY1.setValue(c.y); } else if ("use-com2".equals(e.getActionCommand())) { Vector2 c = ((SandboxBody)this.cmbBody2.getSelectedItem()).getWorldCenter(); this.txtX1.setValue(c.x); this.txtY1.setValue(c.y); } else if ("reset-reference-angle".equals(e.getActionCommand())) { this.txtReferenceAngle.setValue(Math.toDegrees(this.computeReferenceAngle())); } else if ("toggle-auto-compute".equals(e.getActionCommand())) { // if the state of the toggle button changes, check if its selected now, if so // then recompute the reference angle if (this.tglReferenceAngle.isSelected()) { this.txtReferenceAngle.setValue(Math.toDegrees(this.computeReferenceAngle())); } } } /* (non-Javadoc) * @see java.awt.event.ItemListener#itemStateChanged(java.awt.event.ItemEvent) */ @Override public void itemStateChanged(ItemEvent e) { // when the items change in either drop down, check if the auto compute button is // selected, if so, then compute the reference angle if (this.tglReferenceAngle.isSelected()) { this.txtReferenceAngle.setValue(Math.toDegrees(this.computeReferenceAngle())); } } /* (non-Javadoc) * @see org.dyn4j.sandbox.panels.JointPanel#setJoint(org.dyn4j.dynamics.joint.Joint) */ @Override public void setJoint(Joint joint) { if (joint instanceof RevoluteJoint) { RevoluteJoint rj = (RevoluteJoint)joint; // set the super class properties rj.setUserData(this.txtName.getText()); rj.setCollisionAllowed(this.chkCollision.isSelected()); // set the properties that can change rj.setLimitEnabled(this.chkLimitEnabled.isSelected()); rj.setLimits( Math.toRadians(ControlUtilities.getDoubleValue(this.txtLowerLimit)), Math.toRadians(ControlUtilities.getDoubleValue(this.txtUpperLimit))); rj.setMaximumMotorTorque(ControlUtilities.getDoubleValue(this.txtMaxMotorTorque)); rj.setMotorEnabled(this.chkMotorEnabled.isSelected()); rj.setMotorSpeed(Math.toRadians(ControlUtilities.getDoubleValue(this.txtMotorSpeed))); rj.setReferenceAngle(Math.toRadians(ControlUtilities.getDoubleValue(this.txtReferenceAngle))); } } /* (non-Javadoc) * @see org.dyn4j.sandbox.panels.JointPanel#getJoint() */ @Override public Joint getJoint() { // get the selected bodies SandboxBody body1 = (SandboxBody)this.cmbBody1.getSelectedItem(); SandboxBody body2 = (SandboxBody)this.cmbBody2.getSelectedItem(); // get the anchor points Vector2 a = new Vector2( ControlUtilities.getDoubleValue(this.txtX1), ControlUtilities.getDoubleValue(this.txtY1)); RevoluteJoint rj = new RevoluteJoint(body1, body2, a); // set the super class properties rj.setUserData(this.txtName.getText()); rj.setCollisionAllowed(this.chkCollision.isSelected()); // set the other properties rj.setLimitEnabled(this.chkLimitEnabled.isSelected()); rj.setLimits( Math.toRadians(ControlUtilities.getDoubleValue(this.txtLowerLimit)), Math.toRadians(ControlUtilities.getDoubleValue(this.txtUpperLimit))); rj.setMaximumMotorTorque(ControlUtilities.getDoubleValue(this.txtMaxMotorTorque)); rj.setMotorEnabled(this.chkMotorEnabled.isSelected()); rj.setMotorSpeed(Math.toRadians(ControlUtilities.getDoubleValue(this.txtMotorSpeed))); rj.setReferenceAngle(Math.toRadians(ControlUtilities.getDoubleValue(this.txtReferenceAngle))); return rj; } /* (non-Javadoc) * @see org.dyn4j.sandbox.panels.InputPanel#isValidInput() */ @Override public boolean isValidInput() { // must have some name String name = this.txtName.getText(); if (name == null || name.isEmpty()) { return false; } // they can't be the same body if (this.cmbBody1.getSelectedItem() == this.cmbBody2.getSelectedItem()) { return false; } // check the limit if (ControlUtilities.getDoubleValue(this.txtLowerLimit) > ControlUtilities.getDoubleValue(this.txtUpperLimit)) { return false; } // check the maximum motor torque if (ControlUtilities.getDoubleValue(this.txtMaxMotorTorque) < 0.0) { return false; } return true; } /* (non-Javadoc) * @see org.dyn4j.sandbox.panels.InputPanel#showInvalidInputMessage(java.awt.Window) */ @Override public void showInvalidInputMessage(Window owner) { String name = this.txtName.getText(); if (name == null || name.isEmpty()) { JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.missingName"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE); } // they can't be the same body if (this.cmbBody1.getSelectedItem() == this.cmbBody2.getSelectedItem()) { JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.sameBody"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE); } // check the limit if (ControlUtilities.getDoubleValue(this.txtLowerLimit) > ControlUtilities.getDoubleValue(this.txtUpperLimit)) { JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.invalidLimits"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE); } // check the maximum motor force if (ControlUtilities.getDoubleValue(this.txtMaxMotorTorque) < 0.0) { JOptionPane.showMessageDialog(owner, Messages.getString("panel.joint.invalidMaximumMotorTorque"), Messages.getString("panel.invalid.title"), JOptionPane.ERROR_MESSAGE); } } }
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package org.boon.core.reflection.fields; import org.boon.Exceptions; import java.lang.reflect.Field; import java.lang.reflect.Method; import static org.boon.Boon.sputs; public class PropertyField extends BaseField { final Method getter; final Method setter; public PropertyField( String name, Method setter, Method getter ) { super ( name, getter, setter); this.getter = getter; this.setter = setter; // MethodHandles.Lookup lookupWithDefault = MethodHandles.lookupWithDefault(); // MethodType methodType // = MethodType.methodType ( this.type ); // MethodHandle methodHandle = null; // CallSite callSiteMethod; // // if ( parentType != null && getter != null ) { // try { // methodHandle = lookupWithDefault.findVirtual ( this.parentType, getter.name (), methodType ); // } catch ( NoSuchMethodException e ) { // Exceptions.handle ( e ); // } catch ( IllegalAccessException e ) { // Exceptions.handle ( e ); // } // callSiteMethod = new ConstantCallSite(methodHandle); // this.getter = callSiteMethod.dynamicInvoker(); // // } else { // this.getter = null; // } // // // if ( parentType != null && setter != null ) { // // methodType // = MethodType.methodType ( void.class, this.type() ); // // // try { // methodHandle = lookupWithDefault.findVirtual ( this.parentType, setter.name(), methodType ); // } catch ( NoSuchMethodException e ) { // Exceptions.handle ( e ); // } catch ( IllegalAccessException e ) { // Exceptions.handle ( e ); // } // // callSiteMethod = new ConstantCallSite(methodHandle); // this.setter = callSiteMethod.dynamicInvoker (); // } else { // this.setter = null; // } } @Override public Object getObject( Object obj ) { try { return getter.invoke ( obj ); } catch ( Throwable e ) { return Exceptions.handle( Object.class, sputs( "unable to call getObject for property ", this.name, "for class ", this.type ), e ); } } @Override public final void setObject( Object obj, Object value ) { try { if (!isReadOnly()) setter.invoke ( obj, value ); } catch ( Throwable e ) { Exceptions.handle( String.format( "You tried to modify property %s of %s for instance %s " + "with set %s using %s, and this property read only status is %s", name, obj.getClass().getSimpleName(), obj, value, name(), isReadOnly () ), e ); } } public final boolean getBoolean( Object obj ) { try { return ( Boolean ) this.getObject ( obj ); } catch ( Exception e ) { return Exceptions.handle( boolean.class, sputs( "unable to call getObject for property", this.name ), e ); } } @Override public final int getInt( Object obj ) { try { return ( Integer ) this.getObject ( obj ); } catch ( Exception e ) { return Exceptions.handle( int.class, sputs( "unable to call getObject for property", this.name ), e ); } } @Override public final short getShort( Object obj ) { try { return ( Short ) this.getObject ( obj ); } catch ( Exception e ) { return Exceptions.handle( short.class, sputs( "unable to call getObject for property", this.name ), e ); } } @Override public final char getChar( Object obj ) { try { return ( Character ) this.getObject ( obj ); } catch ( Exception e ) { return Exceptions.handle( char.class, sputs( "unable to call getObject for property", this.name ), e ); } } @Override public final long getLong( Object obj ) { try { return ( Long ) this.getObject ( obj ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final double getDouble( Object obj ) { try { return ( Double ) this.getObject ( obj ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final float getFloat( Object obj ) { try { return ( Float ) this.getObject ( obj ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final byte getByte( Object obj ) { try { return ( Byte ) this.getObject ( obj ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final Field getField() { return null; } @Override public void setStaticValue(Object newValue) { } @Override public final void setBoolean( Object obj, boolean value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setInt( Object obj, int value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setShort( Object obj, short value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setChar( Object obj, char value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setLong( Object obj, long value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setDouble( Object obj, double value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setFloat( Object obj, float value ) { try { this.setObject( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } @Override public final void setByte( Object obj, byte value ) { try { this.setObject ( obj, value ); } catch ( Exception e ) { throw new RuntimeException( e ); } } }
/** * Copyright (c) 2013, impossibl.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of impossibl.com nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.impossibl.postgres.system.procs; import com.impossibl.postgres.system.Context; import com.impossibl.postgres.types.Modifiers; import com.impossibl.postgres.types.PrimitiveType; import com.impossibl.postgres.types.Type; import static com.impossibl.postgres.system.Settings.FIELD_VARYING_LENGTH_MAX; import static com.impossibl.postgres.types.Modifiers.LENGTH; import static com.impossibl.postgres.types.PrimitiveType.String; import java.io.IOException; import java.util.HashMap; import java.util.Map; import static java.lang.Math.min; import io.netty.buffer.ByteBuf; public class Strings extends SimpleProcProvider { public static final BinDecoder BINARY_DECODER = new BinDecoder(); public static final BinEncoder BINARY_ENCODER = new BinEncoder(); public Strings() { super(new TxtEncoder(), new TxtDecoder(), BINARY_ENCODER, BINARY_DECODER, new ModParser(), "text", "varchar", "bpchar", "char", "enum_", "json_", "cstring_", "citext", "unknown"); } public static class BinDecoder extends BinaryDecoder { @Override public PrimitiveType getInputPrimitiveType() { return String; } @Override public Class<?> getOutputType() { return String.class; } @Override public String decode(Type type, Short typeLength, Integer typeModifier, ByteBuf buffer, Context context) throws IOException { int length = buffer.readInt(); if (length == -1) { return null; } byte[] bytes; Integer maxLength = (Integer) context.getSetting(FIELD_VARYING_LENGTH_MAX); if (maxLength != null) { bytes = new byte[min(maxLength, length)]; } else { bytes = new byte[length]; } buffer.readBytes(bytes); buffer.skipBytes(length - bytes.length); return new String(bytes, context.getCharset()); } } public static class BinEncoder extends BinaryEncoder { @Override public Class<?> getInputType() { return String.class; } @Override public PrimitiveType getOutputPrimitiveType() { return String; } byte[] toBytes(Object val, Context context) { return val.toString().getBytes(context.getCharset()); } @Override public void encode(Type type, ByteBuf buffer, Object val, Context context) throws IOException { if (val == null) { buffer.writeInt(-1); } else { byte[] bytes = toBytes(val, context); buffer.writeInt(bytes.length); buffer.writeBytes(bytes); } } @Override public int length(Type type, Object val, Context context) throws IOException { return val == null ? 4 : 4 + toBytes(val, context).length; } } public static class TxtDecoder extends TextDecoder { @Override public PrimitiveType getInputPrimitiveType() { return String; } @Override public Class<?> getOutputType() { return String.class; } @Override public String decode(Type type, Short typeLength, Integer typeModifier, CharSequence buffer, Context context) throws IOException { return buffer.toString(); } } public static class TxtEncoder extends TextEncoder { @Override public Class<?> getInputType() { return String.class; } @Override public PrimitiveType getOutputPrimitiveType() { return String; } @Override public void encode(Type type, StringBuilder buffer, Object val, Context context) throws IOException { if (val instanceof String) { buffer.append((String)val); } else if (val.getClass() == byte[].class) { buffer.append(new String((byte[]) val, context.getCharset())); } else { throw new IOException(val.getClass() + " cannot be encoded as a String"); } } } static class ModParser implements Modifiers.Parser { @Override public Map<String, Object> parse(long mod) { Map<String, Object> mods = new HashMap<String, Object>(); if (mod > 4) { mods.put(LENGTH, (int)(mod - 4)); } return mods; } } }
/* * This file is part of Locutil2. * * Copyright (c) 2007 Thomas King <king@informatik.uni-mannheim.de>, * University of Mannheim, Germany * * All rights reserved. * * Loclib is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Loclib is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Loclib; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package org.pi4.locutil; import java.io.Serializable; import java.util.StringTokenizer; /** * A geographical position. * * @author faerber * @author abiskop * @author koelsch */ public class GeoPosition implements Comparable, Serializable { private static final long serialVersionUID = 7318226428301060903L; double[] coords; double orientation; // in degrees public GeoPosition() { coords = new double[] { Double.NaN, Double.NaN, Double.NaN }; orientation = Double.NaN; } /** * Creates a new <code>GeoPosition</code> with the specified X-/Y-coordinates. * Altitude (Z-coordinate) is set to 0.0. Orientation is set to Double.NaN. * * @param x * @param y */ public GeoPosition(double x, double y) { coords = new double[] {x, y, 0.0}; orientation = Double.NaN; } /** * Creates a new <code>GeoPosition</code> with the specified coordinates. * * @param x * @param y * @param z */ public GeoPosition(double x, double y, double z) { coords = new double[] { x, y, z }; orientation = Double.NaN; } /** * Creates an instance of <code>GeoPosition</code>. * * @param x x-coord * @param y y-coord * @param z z-coord * @param orientation must be greater than or equal to 0 and less than 360 * @throws IllegalArgumentException if orientation is out of range */ public GeoPosition(double x, double y, double z, double orientation) { this(x, y, z); if (((orientation < 0.0) || (orientation >= 360.0)) && (!Double.isNaN(orientation))) throw new IllegalArgumentException("Orientation must be 0 <= x < 360 degrees."); this.orientation = orientation; } /** * Creates an instance of <code>GeoPosition</code> based on a given string. The string format * is "x.x y.y z.z orienation.orientation" or "x.x y.y z.z" or "x.x,y.y,z.z,orientaton.orientation" or * "x.x,y.y,z.z". * * @param pos String that represents a GeoPosition * @return GeoPosition object */ public static GeoPosition parse(String pos) { StringTokenizer st; if (pos.contains(",")) { st = new StringTokenizer(pos, ","); } else { st = new StringTokenizer(pos); } if ((st.countTokens() == 3) || (st.countTokens() == 4)) { double x = Double.parseDouble(st.nextToken()); double y = Double.parseDouble(st.nextToken()); double z = Double.parseDouble(st.nextToken()); double orientation = Double.NaN; if (st.hasMoreTokens()) { String currentToken = (st.nextToken()).trim(); if (! currentToken.equals("NaN")) { orientation = Double.parseDouble(currentToken); } } return new GeoPosition(x, y, z, orientation); } else { throw new IllegalArgumentException("Given string does not represent a GeoPosition."); } } /** * Returns the X-coordinate of this <code>GeoPosition</code> object. * * @return X-coordinate */ public double getX() { return coords[0]; } /** * Returns the Y-coordinate of this <code>GeoPosition</code> object. * * @return Y-coordinate */ public double getY() { return coords[1]; } /** * Returns the Z-coordinate of this <code>GeoPosition</code> object. * * @return Z-coordinate */ public double getZ() { return coords[2]; } /** * Returns the orientation * * @return orientation, greater than or equal to 0 and less than 360 */ public double getOrientation() { return orientation; } /** * Sets the X-coordinate of this <code>GeoPosition</code> object to the specified value. * * @param x the new X-coordinate */ public void setX(double x) { coords[0] = x; } /** * Sets the Y-coordinate of this <code>GeoPosition</code> object to the specified value. * * @param y the new Y-coordinate */ public void setY(double y) { coords[1] = y; } /** * Sets the Z-coordinate of this <code>GeoPosition</code> object to the specified value. * * @param z the new Z-coordinate */ public void setZ(double z) { coords[2] = z; } /** * Sets the orientation to the specified value. * * @param orientation must be greater than or equal to 0 and less than 360 * @throws IllegalArgumentException if orientation is out of range */ public void setOrientation(double orientation) { if ((orientation < 0.0) || (orientation >= 360.0)) throw new IllegalArgumentException("Orientation must be 0 <= x < 360 degrees."); this.orientation = orientation; } /** * Check if GeoPosition is initialized or not. A GeoPosition is initialized if and only if all coordinates (X, Y, Z) * are not set to NaN. * @return <code>true</code> if this object is initialized, <code>false</code> otherwise */ public boolean isInitialized() { if (Double.isNaN(coords[0]) && Double.isNaN(coords[1]) && Double.isNaN(coords[2])) return false; return true; } /** * Determines whether two <code>GeoPosition</code>s are equal. * * @param pos2 the <code>GeoPosition</code> to compare this instance to * @return <code>true</code> if equal, <code>false</code> otherwise */ public boolean equals(Object o) { if (o == null) return false; if (o.getClass() != this.getClass()) return false; GeoPosition pos2 = (GeoPosition) o; if ((Double.isNaN(coords[0])) && (Double.isNaN(coords[1])) && (Double.isNaN(coords[2])) && (Double.isNaN(pos2.coords[0])) && (Double.isNaN(pos2.coords[1])) && (Double.isNaN(pos2.coords[2])) && (Double.isNaN(orientation) && (Double.isNaN(pos2.orientation)))) { return true; } if ((coords[0] != pos2.coords[0]) || (coords[1] != pos2.coords[1]) || (coords[2] != pos2.coords[2])) return false; return (orientation == pos2.orientation) || (Double.isNaN(orientation) && Double.isNaN(pos2.orientation)); } /** * Determines whether two <code>GeoPosition</code>s are equal by only considering the positions. * * @param pos2 the <code>GeoPosition</code> to compare this instance to * @return <code>true</code> if equal, <code>false</code> otherwise */ public boolean equalsWithoutOrientation(Object o) { if (o == null) return false; if (o.getClass() != this.getClass()) return false; GeoPosition pos2 = (GeoPosition) o; if ((coords[0] != pos2.coords[0]) || (coords[1] != pos2.coords[1]) || (coords[2] != pos2.coords[2])) return false; return true; } /** * Returns a hashcode that encodes the coordinates and the orientation of this <code>GeoPosition</code> object as <code>Integer</code>. The generated hash codes stay unique as long as the coordinates are integer values smaller than 128. */ public int hashCode() { int x = ((int) coords[0]) % 127; int y = ((int) coords[1]) % 127; int z = ((int) coords[2]) % 127; int bits29to23 = x << 23; int bits22to16 = y << 16; int bits15to9 = z << 9; int bits8to0 = ((int) orientation); return bits29to23 + bits22to16 + bits15to9 + bits8to0; } /** * Returns the euclidian distance between this <code>GeoPosition</code> and the argument. */ public double distance(GeoPosition pos2) { return Math.sqrt( (coords[0] - pos2.coords[0]) * (coords[0] - pos2.coords[0]) + (coords[1] - pos2.coords[1]) * (coords[1] - pos2.coords[1]) + (coords[2] - pos2.coords[2]) * (coords[2] - pos2.coords[2])); } /** * Adds the coordinates to the current position and returns the newly calculated location. * * @param pos * @return */ public GeoPosition addPosition(GeoPosition pos) { GeoPosition newPosition = new GeoPosition(); newPosition.setX(getX() + pos.getX()); newPosition.setY(getY() + pos.getY()); newPosition.setZ(getZ() + pos.getZ()); newPosition.setOrientation(getOrientation()); return newPosition; } /** * Multiplies the coordinates of a GeoPosition with a factor and returns * the newly calculated location. * * @param factor * @return */ public GeoPosition stretch(double factor) { GeoPosition newPosition = new GeoPosition(); newPosition.setX(getX() * factor); newPosition.setY(getY() * factor); newPosition.setZ(getZ() * factor); newPosition.setOrientation(getOrientation()); return newPosition; } /** * Returns the coordinates and orientation of this <code>GeoPosition</code> object as <code>String</code>. * * @return coordinates and orientation as String */ public String toString() { return "(" + coords[0] + ", " + coords[1] + ", " + coords[2] + ", " + orientation + ")"; } public String toStringWithoutOrientation() { return "(" + coords[0] + ", " + coords[1] + ", " + coords[2] + ")"; } public int compareTo(Object o) { if (o == null) return 1; if (o.getClass() != this.getClass()) return 1; GeoPosition g = (GeoPosition)o; if (this.getX() < g.getX()) { return -1; } else if (this.getX() > g.getX()) { return 1; } else { if (this.getY() < g.getY()) { return -1; } else if (this.getY() > g.getY()) { return 1; } else { if (this.getZ() < g.getZ()) { return -1; } else if (this.getZ() > g.getZ()) { return 1; } else { if (this.getOrientation() < g.getOrientation()) { return -1; } else if (this.getOrientation() > g.getOrientation()) { return 1; } else { return 0; } } } } } public int compare(Object o1, Object o2) { if (o1 == null) return -1; if (o2 == null) return 1; if (o1.getClass() != o2.getClass()) return 1; GeoPosition g1 = (GeoPosition)o1; GeoPosition g2 = (GeoPosition)o2; if (g1.getX() < g2.getX()) { return -1; } else if (g1.getX() > g2.getX()) { return 1; } else { if (g1.getY() < g2.getY()) { return -1; } else if (g1.getY() > g2.getY()) { return 1; } else { if (g1.getZ() < g2.getZ()) { return -1; } else if (g1.getZ() > g2.getZ()) { return 1; } else { if (g1.getOrientation() < g2.getOrientation()) { return -1; } else if (g1.getOrientation() > g2.getOrientation()) { return 1; } else { return 0; } } } } } /** * Returns a deep copy of the GeoPosition object */ public GeoPosition clone() { GeoPosition pos = new GeoPosition(getX(), getY(), getZ(), getOrientation()); return pos; } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes an instance export task. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ExportToS3TaskSpecification" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ExportToS3TaskSpecification implements Serializable, Cloneable { /** * <p> * The format for the exported image. * </p> */ private String diskImageFormat; /** * <p> * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk image is * exported. * </p> */ private String containerFormat; /** * <p> * The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. * </p> */ private String s3Bucket; /** * <p> * The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. * </p> */ private String s3Prefix; /** * <p> * The format for the exported image. * </p> * * @param diskImageFormat * The format for the exported image. * @see DiskImageFormat */ public void setDiskImageFormat(String diskImageFormat) { this.diskImageFormat = diskImageFormat; } /** * <p> * The format for the exported image. * </p> * * @return The format for the exported image. * @see DiskImageFormat */ public String getDiskImageFormat() { return this.diskImageFormat; } /** * <p> * The format for the exported image. * </p> * * @param diskImageFormat * The format for the exported image. * @return Returns a reference to this object so that method calls can be chained together. * @see DiskImageFormat */ public ExportToS3TaskSpecification withDiskImageFormat(String diskImageFormat) { setDiskImageFormat(diskImageFormat); return this; } /** * <p> * The format for the exported image. * </p> * * @param diskImageFormat * The format for the exported image. * @see DiskImageFormat */ public void setDiskImageFormat(DiskImageFormat diskImageFormat) { this.diskImageFormat = diskImageFormat.toString(); } /** * <p> * The format for the exported image. * </p> * * @param diskImageFormat * The format for the exported image. * @return Returns a reference to this object so that method calls can be chained together. * @see DiskImageFormat */ public ExportToS3TaskSpecification withDiskImageFormat(DiskImageFormat diskImageFormat) { setDiskImageFormat(diskImageFormat); return this; } /** * <p> * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk image is * exported. * </p> * * @param containerFormat * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk * image is exported. * @see ContainerFormat */ public void setContainerFormat(String containerFormat) { this.containerFormat = containerFormat; } /** * <p> * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk image is * exported. * </p> * * @return The container format used to combine disk images with metadata (such as OVF). If absent, only the disk * image is exported. * @see ContainerFormat */ public String getContainerFormat() { return this.containerFormat; } /** * <p> * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk image is * exported. * </p> * * @param containerFormat * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk * image is exported. * @return Returns a reference to this object so that method calls can be chained together. * @see ContainerFormat */ public ExportToS3TaskSpecification withContainerFormat(String containerFormat) { setContainerFormat(containerFormat); return this; } /** * <p> * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk image is * exported. * </p> * * @param containerFormat * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk * image is exported. * @see ContainerFormat */ public void setContainerFormat(ContainerFormat containerFormat) { this.containerFormat = containerFormat.toString(); } /** * <p> * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk image is * exported. * </p> * * @param containerFormat * The container format used to combine disk images with metadata (such as OVF). If absent, only the disk * image is exported. * @return Returns a reference to this object so that method calls can be chained together. * @see ContainerFormat */ public ExportToS3TaskSpecification withContainerFormat(ContainerFormat containerFormat) { setContainerFormat(containerFormat); return this; } /** * <p> * The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. * </p> * * @param s3Bucket * The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. */ public void setS3Bucket(String s3Bucket) { this.s3Bucket = s3Bucket; } /** * <p> * The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. * </p> * * @return The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. */ public String getS3Bucket() { return this.s3Bucket; } /** * <p> * The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. * </p> * * @param s3Bucket * The S3 bucket for the destination image. The destination bucket must exist and grant WRITE and READ_ACP * permissions to the AWS account <code>vm-import-export@amazon.com</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ExportToS3TaskSpecification withS3Bucket(String s3Bucket) { setS3Bucket(s3Bucket); return this; } /** * <p> * The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. * </p> * * @param s3Prefix * The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. */ public void setS3Prefix(String s3Prefix) { this.s3Prefix = s3Prefix; } /** * <p> * The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. * </p> * * @return The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. */ public String getS3Prefix() { return this.s3Prefix; } /** * <p> * The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. * </p> * * @param s3Prefix * The image is written to a single object in the S3 bucket at the S3 key s3prefix + exportTaskId + '.' + * diskImageFormat. * @return Returns a reference to this object so that method calls can be chained together. */ public ExportToS3TaskSpecification withS3Prefix(String s3Prefix) { setS3Prefix(s3Prefix); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDiskImageFormat() != null) sb.append("DiskImageFormat: ").append(getDiskImageFormat()).append(","); if (getContainerFormat() != null) sb.append("ContainerFormat: ").append(getContainerFormat()).append(","); if (getS3Bucket() != null) sb.append("S3Bucket: ").append(getS3Bucket()).append(","); if (getS3Prefix() != null) sb.append("S3Prefix: ").append(getS3Prefix()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ExportToS3TaskSpecification == false) return false; ExportToS3TaskSpecification other = (ExportToS3TaskSpecification) obj; if (other.getDiskImageFormat() == null ^ this.getDiskImageFormat() == null) return false; if (other.getDiskImageFormat() != null && other.getDiskImageFormat().equals(this.getDiskImageFormat()) == false) return false; if (other.getContainerFormat() == null ^ this.getContainerFormat() == null) return false; if (other.getContainerFormat() != null && other.getContainerFormat().equals(this.getContainerFormat()) == false) return false; if (other.getS3Bucket() == null ^ this.getS3Bucket() == null) return false; if (other.getS3Bucket() != null && other.getS3Bucket().equals(this.getS3Bucket()) == false) return false; if (other.getS3Prefix() == null ^ this.getS3Prefix() == null) return false; if (other.getS3Prefix() != null && other.getS3Prefix().equals(this.getS3Prefix()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDiskImageFormat() == null) ? 0 : getDiskImageFormat().hashCode()); hashCode = prime * hashCode + ((getContainerFormat() == null) ? 0 : getContainerFormat().hashCode()); hashCode = prime * hashCode + ((getS3Bucket() == null) ? 0 : getS3Bucket().hashCode()); hashCode = prime * hashCode + ((getS3Prefix() == null) ? 0 : getS3Prefix().hashCode()); return hashCode; } @Override public ExportToS3TaskSpecification clone() { try { return (ExportToS3TaskSpecification) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http.websocketx.extensions; import io.netty.channel.ChannelPromise; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpResponse; import java.io.IOException; import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Test; import static io.netty.handler.codec.http.websocketx.extensions.WebSocketExtensionTestUtil.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; public class WebSocketServerExtensionHandlerTest { WebSocketServerExtensionHandshaker mainHandshakerMock = mock(WebSocketServerExtensionHandshaker.class, "mainHandshaker"); WebSocketServerExtensionHandshaker fallbackHandshakerMock = mock(WebSocketServerExtensionHandshaker.class, "fallbackHandshaker"); WebSocketServerExtension mainExtensionMock = mock(WebSocketServerExtension.class, "mainExtension"); WebSocketServerExtension fallbackExtensionMock = mock(WebSocketServerExtension.class, "fallbackExtension"); @Test public void testMainSuccess() { // initialize when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("main"))). thenReturn(mainExtensionMock); when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("fallback"))). thenReturn(null); when(fallbackHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("fallback"))). thenReturn(fallbackExtensionMock); when(fallbackHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("main"))). thenReturn(null); when(mainExtensionMock.rsv()).thenReturn(WebSocketExtension.RSV1); when(mainExtensionMock.newReponseData()).thenReturn( new WebSocketExtensionData("main", Collections.<String, String>emptyMap())); when(mainExtensionMock.newExtensionEncoder()).thenReturn(new DummyEncoder()); when(mainExtensionMock.newExtensionDecoder()).thenReturn(new DummyDecoder()); when(fallbackExtensionMock.rsv()).thenReturn(WebSocketExtension.RSV1); // execute WebSocketServerExtensionHandler extensionHandler = new WebSocketServerExtensionHandler(mainHandshakerMock, fallbackHandshakerMock); EmbeddedChannel ch = new EmbeddedChannel(extensionHandler); HttpRequest req = newUpgradeRequest("main, fallback"); ch.writeInbound(req); HttpResponse res = newUpgradeResponse(null); ch.writeOutbound(res); HttpResponse res2 = ch.readOutbound(); List<WebSocketExtensionData> resExts = WebSocketExtensionUtil.extractExtensions( res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS)); // test assertNull(ch.pipeline().context(extensionHandler)); assertEquals(1, resExts.size()); assertEquals("main", resExts.get(0).name()); assertTrue(resExts.get(0).parameters().isEmpty()); assertNotNull(ch.pipeline().get(DummyDecoder.class)); assertNotNull(ch.pipeline().get(DummyEncoder.class)); verify(mainHandshakerMock, atLeastOnce()).handshakeExtension(webSocketExtensionDataMatcher("main")); verify(mainHandshakerMock, atLeastOnce()).handshakeExtension(webSocketExtensionDataMatcher("fallback")); verify(fallbackHandshakerMock, atLeastOnce()).handshakeExtension(webSocketExtensionDataMatcher("fallback")); verify(mainExtensionMock, atLeastOnce()).rsv(); verify(mainExtensionMock).newReponseData(); verify(mainExtensionMock).newExtensionEncoder(); verify(mainExtensionMock).newExtensionDecoder(); verify(fallbackExtensionMock, atLeastOnce()).rsv(); } @Test public void testCompatibleExtensionTogetherSuccess() { // initialize when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("main"))). thenReturn(mainExtensionMock); when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("fallback"))). thenReturn(null); when(fallbackHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("fallback"))). thenReturn(fallbackExtensionMock); when(fallbackHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("main"))). thenReturn(null); when(mainExtensionMock.rsv()).thenReturn(WebSocketExtension.RSV1); when(mainExtensionMock.newReponseData()).thenReturn( new WebSocketExtensionData("main", Collections.<String, String>emptyMap())); when(mainExtensionMock.newExtensionEncoder()).thenReturn(new DummyEncoder()); when(mainExtensionMock.newExtensionDecoder()).thenReturn(new DummyDecoder()); when(fallbackExtensionMock.rsv()).thenReturn(WebSocketExtension.RSV2); when(fallbackExtensionMock.newReponseData()).thenReturn( new WebSocketExtensionData("fallback", Collections.<String, String>emptyMap())); when(fallbackExtensionMock.newExtensionEncoder()).thenReturn(new Dummy2Encoder()); when(fallbackExtensionMock.newExtensionDecoder()).thenReturn(new Dummy2Decoder()); // execute WebSocketServerExtensionHandler extensionHandler = new WebSocketServerExtensionHandler(mainHandshakerMock, fallbackHandshakerMock); EmbeddedChannel ch = new EmbeddedChannel(extensionHandler); HttpRequest req = newUpgradeRequest("main, fallback"); ch.writeInbound(req); HttpResponse res = newUpgradeResponse(null); ch.writeOutbound(res); HttpResponse res2 = ch.readOutbound(); List<WebSocketExtensionData> resExts = WebSocketExtensionUtil.extractExtensions( res2.headers().get(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS)); // test assertNull(ch.pipeline().context(extensionHandler)); assertEquals(2, resExts.size()); assertEquals("main", resExts.get(0).name()); assertEquals("fallback", resExts.get(1).name()); assertNotNull(ch.pipeline().get(DummyDecoder.class)); assertNotNull(ch.pipeline().get(DummyEncoder.class)); assertNotNull(ch.pipeline().get(Dummy2Decoder.class)); assertNotNull(ch.pipeline().get(Dummy2Encoder.class)); verify(mainHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("main")); verify(mainHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("fallback")); verify(fallbackHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("fallback")); verify(mainExtensionMock, times(2)).rsv(); verify(mainExtensionMock).newReponseData(); verify(mainExtensionMock).newExtensionEncoder(); verify(mainExtensionMock).newExtensionDecoder(); verify(fallbackExtensionMock, times(2)).rsv(); verify(fallbackExtensionMock).newReponseData(); verify(fallbackExtensionMock).newExtensionEncoder(); verify(fallbackExtensionMock).newExtensionDecoder(); } @Test public void testNoneExtensionMatchingSuccess() { // initialize when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("unknown"))). thenReturn(null); when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("unknown2"))). thenReturn(null); when(fallbackHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("unknown"))). thenReturn(null); when(fallbackHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("unknown2"))). thenReturn(null); // execute WebSocketServerExtensionHandler extensionHandler = new WebSocketServerExtensionHandler(mainHandshakerMock, fallbackHandshakerMock); EmbeddedChannel ch = new EmbeddedChannel(extensionHandler); HttpRequest req = newUpgradeRequest("unknown, unknown2"); ch.writeInbound(req); HttpResponse res = newUpgradeResponse(null); ch.writeOutbound(res); HttpResponse res2 = ch.readOutbound(); // test assertNull(ch.pipeline().context(extensionHandler)); assertFalse(res2.headers().contains(HttpHeaderNames.SEC_WEBSOCKET_EXTENSIONS)); verify(mainHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("unknown")); verify(mainHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("unknown2")); verify(fallbackHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("unknown")); verify(fallbackHandshakerMock).handshakeExtension(webSocketExtensionDataMatcher("unknown2")); } @Test public void testExtensionHandlerNotRemovedByFailureWritePromise() { // initialize when(mainHandshakerMock.handshakeExtension(webSocketExtensionDataMatcher("main"))) .thenReturn(mainExtensionMock); when(mainExtensionMock.newReponseData()).thenReturn( new WebSocketExtensionData("main", Collections.<String, String>emptyMap())); // execute WebSocketServerExtensionHandler extensionHandler = new WebSocketServerExtensionHandler(mainHandshakerMock); EmbeddedChannel ch = new EmbeddedChannel(extensionHandler); HttpRequest req = newUpgradeRequest("main"); ch.writeInbound(req); HttpResponse res = newUpgradeResponse(null); ChannelPromise failurePromise = ch.newPromise(); ch.writeOneOutbound(res, failurePromise); failurePromise.setFailure(new IOException("Cannot write response")); // test assertNull(ch.readOutbound()); assertNotNull(ch.pipeline().context(extensionHandler)); assertTrue(ch.finish()); } }
package com.bkahlert.nebula.utils; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.eclipse.core.runtime.Assert; import org.eclipse.jface.util.Policy; import org.eclipse.jface.viewers.AbstractTreeViewer; import org.eclipse.jface.viewers.CellLabelProvider; import org.eclipse.jface.viewers.ColumnViewer; import org.eclipse.jface.viewers.IContentProvider; import org.eclipse.jface.viewers.IStructuredContentProvider; import org.eclipse.jface.viewers.ITreeContentProvider; import org.eclipse.jface.viewers.ITreeSelection; import org.eclipse.jface.viewers.StructuredViewer; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TableViewerColumn; import org.eclipse.jface.viewers.TreePath; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.TreeViewerColumn; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerColumn; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Item; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.Tree; import com.bkahlert.nebula.utils.DistributionUtils.AbsoluteWidth; import com.bkahlert.nebula.utils.DistributionUtils.Width; /** * Utility class for manipulation of {@link Viewer Viewers}. * <p> * Tries to call alternative methods if a special operation is not supported by * the viewer. All calls are automatically done in the synchronous SWT thread. * <p> * <b>Example 1:</b><br/> * If you try to update a specific element in a viewer and the viewer does not * support updating single elements, the whole viewer will be refreshed. * <p> * <b>Example 2:</b><br/> * One element of your model has been removed. For performance reasons you don't * want to refresh the whole viewer but manually remove the element from the * viewer in order to reflect the model. If your viewer supports this action the * element is removed. Otherwise the viewer is advised to reload the model. * * @author bkahlert */ public class ViewerUtils { public static class FullWidthResizer { private final ColumnViewer columnViewer; private final Map<Integer, Width> numbers = new HashMap<Integer, Width>(); private final Listener resizeListener = event -> FullWidthResizer.this .resize(); public FullWidthResizer(ColumnViewer columnViewer) { this.columnViewer = columnViewer; this.columnViewer.getControl().addListener(SWT.Resize, this.resizeListener); } public void setWidth(int column, Width width) { this.numbers.put(column, width); this.resize(); } public void resize() { if (this.columnViewer != null && this.columnViewer.getControl() != null && !this.columnViewer.getControl().isDisposed()) { Control control = this.columnViewer.getControl(); if (control instanceof Tree) { Tree tree = (Tree) control; Width[] input = new Width[tree.getColumnCount()]; for (int i = 0; i < input.length; i++) { input[i] = this.numbers.containsKey(i) ? this.numbers .get(i) : new AbsoluteWidth( Width.DEFAULT_MIN_WIDTH); } double[] distribution = DistributionUtils.distribute(input, tree.getClientArea().width); for (int i = 0; i < distribution.length; i++) { tree.getColumn(i).setWidth( (int) Math.round(distribution[i])); } } else if (control instanceof Table) { Table table = (Table) control; Width[] input = new Width[table.getColumnCount()]; for (int i = 0; i < input.length; i++) { input[i] = this.numbers.containsKey(i) ? this.numbers .get(i) : new AbsoluteWidth( Width.DEFAULT_MIN_WIDTH); } double[] distribution = DistributionUtils.distribute(input, table.getClientArea().width); for (int i = 0; i < distribution.length; i++) { table.getColumn(i).setWidth( (int) Math.round(distribution[i])); } } } } public void dispose() { if (this.columnViewer != null && this.columnViewer.getControl() != null && !this.columnViewer.getControl().isDisposed()) { this.columnViewer.getControl().removeListener(SWT.Resize, this.resizeListener); } } } private ViewerUtils() { // no instantiation allowed } /** * Sets a viewer's input and makes sure it runs in the SWT thread * * @param viewer * @param input * * @see Viewer#setInput(Object) */ public static void setInput(final Viewer viewer, final Object input) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } viewer.setInput(input); }); } /** * Gets a viewer's input * * @param viewer * @return * * @see StructuredViewer#setInput(Object) */ public static Object getInput(final Viewer viewer) { if (viewer instanceof StructuredViewer) { return ((StructuredViewer) viewer).getInput(); } return null; } /** * Add the a new element to a given element in a viewer and makes sure it * runs in the SWT thread. Runs a refresh in case the viewer does not * support additions. * * @param viewer * @param parentElementOrTreePath * @param childElement * * @see StructuredViewer#refresh(boolean) */ public static void add(final Viewer viewer, final Object parentElementOrTreePath, final Object childElement) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.add(parentElementOrTreePath, childElement); } else { viewer.refresh(); } }); } /** * Add the new elements to a given element in a viewer and makes sure it * runs in the SWT thread. Runs a refresh in case the viewer does not * support additions. * * @param viewer * @param parentElementOrTreePath * @param childElements * * @see StructuredViewer#refresh(boolean) */ public static void add(final Viewer viewer, final Object parentElementOrTreePath, final Object[] childElements) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.add(parentElementOrTreePath, childElements); } else { viewer.refresh(); } }); } /** * Removes an existing element from a viewer and makes sure it runs in the * SWT thread. Runs a refresh in case the viewer does not support removals. * * @param viewer * @param elementsOrTreePaths * * @see StructuredViewer#refresh(boolean) */ public static void remove(final Viewer viewer, final Object elementsOrTreePaths) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.remove(elementsOrTreePaths); } else { viewer.refresh(); } }); } /** * Removes existing elements from a viewer and makes sure it runs in the SWT * thread. Runs a refresh in case the viewer does not support removals. * * @param viewer * @param elementsOrTreePaths * * @see StructuredViewer#refresh(boolean) */ public static void remove(final Viewer viewer, final Object[] elementsOrTreePaths) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.remove(elementsOrTreePaths); } else { viewer.refresh(); } }); } /** * Updates a viewer's element and makes sure it runs in the SWT thread. Runs * a refresh in case the viewer does not support updates. * * @param viewer * @param element * @param properties * * @see StructuredViewer#update(Object, String[]) */ public static void update(final Viewer viewer, final Object element, final String[] properties) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof StructuredViewer) { StructuredViewer structuredViewer = (StructuredViewer) viewer; structuredViewer.update(element, properties); } else { viewer.refresh(); } }); } /** * Updates a viewer's elements and makes sure it runs in the SWT thread. * Runs a refresh in case the viewer does not support updates. * * @param viewer * @param elements * @param properties * * @see StructuredViewer#update(Object[], String[]) */ public static void update(final Viewer viewer, final Object[] elements, final String[] properties) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof StructuredViewer) { StructuredViewer structuredViewer = (StructuredViewer) viewer; structuredViewer.update(elements, properties); } else { viewer.refresh(); } }); } /** * Refreshes a viewer's display and makes sure it runs in the SWT thread. * * @param viewer * @param updateLabels * * @see Viewer#refresh() * @see StructuredViewer#refresh(boolean) */ public static void refresh(final Viewer viewer, final boolean updateLabels) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof StructuredViewer) { StructuredViewer structuredViewer = (StructuredViewer) viewer; structuredViewer.refresh(updateLabels); } else { viewer.refresh(); } }); } /** * Refreshes a viewer's display and makes sure it runs in the SWT thread. * * @param viewer * @param element * @param updateLabels * * @see Viewer#refresh() * @see StructuredViewer#refresh(Object, boolean) */ public static void refresh(final Viewer viewer, final Object element, final boolean updateLabels) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof StructuredViewer) { StructuredViewer structuredViewer = (StructuredViewer) viewer; structuredViewer.refresh(element, updateLabels); } else { viewer.refresh(); } }); } /** * @see AbstractTreeViewer#expandToLevel(int) */ public static void expandToLevel(final Viewer viewer, final int level) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.expandToLevel(level); } }); } /** * @see AbstractTreeViewer#expandToLevel(int) */ public static void expandToLevel(final Viewer viewer, final Object elementOrTreePath, final int level) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.expandToLevel(elementOrTreePath, level); } }); } /** * If supported by the viewer expands all elements and makes sure it runs in * the SWT thread. * * @param viewer * * @see AbstractTreeViewer#expandAll() */ public static void expandAll(final Viewer viewer) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.expandAll(); } }); } /** * @see AbstractTreeViewer#expandToLevel(int) */ public static void expandAll(final Viewer viewer, final Object elementOrTreePath) { Display.getDefault().syncExec(() -> { if (viewer == null || viewer.getControl().isDisposed()) { return; } if (viewer instanceof AbstractTreeViewer) { AbstractTreeViewer treeViewer = (AbstractTreeViewer) viewer; treeViewer.expandToLevel(elementOrTreePath, Integer.MAX_VALUE); } }); } /** * Returns all elements contained in the given viewer. This calculation is * independent of what is currently displayed. * * @param viewer * @return * @throws Exception */ public static List<Object> getAllItems(final Viewer viewer) throws Exception { final List<Object> objects = new ArrayList<Object>(); ExecUtils.syncExec(() -> { for (Object topLevelElement : getTopLevelItems(viewer)) { objects.add(topLevelElement); objects.addAll(getDescendants(viewer, topLevelElement)); } return null; }); return objects; } /** * Returns all top-level elements contained in the given viewer. This * calculation is independent of what is currently displayed. * * @param viewer * @return * @throws Exception */ public static List<Object> getTopLevelItems(final Viewer viewer) throws Exception { final List<Object> topLevelElements = new ArrayList<Object>(); if (viewer instanceof StructuredViewer) { ExecUtils .syncExec(() -> { IContentProvider cp = ((StructuredViewer) viewer) .getContentProvider(); if (cp instanceof IStructuredContentProvider) { IStructuredContentProvider scp = (IStructuredContentProvider) cp; for (Object object : scp.getElements(viewer .getInput())) { topLevelElements.add(object); } } return null; }); } return topLevelElements; } /** * Returns all descendants of the given element. * * @param viewer * @param parent * @return * @throws Exception */ public static List<Object> getDescendants(final Viewer viewer, final Object parent) throws Exception { final List<Object> descendants = new ArrayList<Object>(); if (viewer instanceof StructuredViewer) { ExecUtils.syncExec(() -> { IContentProvider cp = ((StructuredViewer) viewer) .getContentProvider(); if (cp instanceof ITreeContentProvider) { ITreeContentProvider tcp = (ITreeContentProvider) cp; for (Object child : tcp.getChildren(parent)) { descendants.add(child); descendants.addAll(getDescendants(viewer, child)); } } return null; }); } return descendants; } /** * Merges an array of {@link TreePath}s to one {@link TreePath}. * * Example: {@link TreePath}s * * <pre> * A<br/> * | -B * </pre> * * and * * <pre> * C<br/> * | -D * </pre> * * become * * <pre> * A<br/> * | -B<br/> * | -C<br/> * | -D * </pre> * * @param treePaths * @return */ public static TreePath merge(TreePath... treePaths) { ArrayList<Object> segments = new ArrayList<Object>(); for (TreePath treePath : treePaths) { for (int i = 0; i < treePath.getSegmentCount(); i++) { segments.add(treePath.getSegment(i)); } } return new TreePath(segments.toArray()); } public static Rectangle getBounds(ViewerColumn column) { int index = ViewerUtils.getIndex(column); Control control = column.getViewer().getControl(); int x = 0; int w; if (control instanceof Table) { for (int i = 0; i < index; i++) { x += TreeTableUtils.getColumn((Table) control, i).getWidth(); } w = TreeTableUtils.getColumn((Table) control, index).getWidth(); } else { for (int i = 0; i < index; i++) { x += TreeTableUtils.getColumn((Tree) control, i).getWidth(); } w = TreeTableUtils.getColumn((Tree) control, index).getWidth(); ; } return new Rectangle(x, 0, w, control.getBounds().height); } public static int getIndex(ViewerColumn viewerColumn) { Control control = viewerColumn.getViewer().getControl(); Item column = viewerColumn instanceof TableViewerColumn ? ((TableViewerColumn) viewerColumn) .getColumn() : ((TreeViewerColumn) viewerColumn).getColumn(); Item[] columns = control instanceof Table ? ((Table) control) .getColumns() : ((Tree) control).getColumns(); for (int i = 0, m = columns.length; i < m; i++) { if (columns[i] == column) { int[] order = control instanceof Table ? ((Table) control) .getColumnOrder() : ((Tree) control).getColumnOrder(); for (int j = 0, n = order.length; j < n; j++) { if (order[j] == i) { return j; } } } } return -1; } public static List<TableViewerColumn> getColumns(TableViewer tableViewer) { List<TableViewerColumn> tableViewerColumns = new ArrayList<TableViewerColumn>(); for (int i = 0, m = tableViewer.getTable().getColumnCount(); i < m; i++) { TableViewerColumn tableViewerColumn = getColumn(tableViewer, i); if (tableViewerColumn != null) { tableViewerColumns.add(tableViewerColumn); } } return tableViewerColumns; } public static List<TreeViewerColumn> getColumns(TreeViewer treeViewer) { List<TreeViewerColumn> treeViewerColumns = new ArrayList<TreeViewerColumn>(); for (int i = 0, m = treeViewer.getTree().getColumnCount(); i < m; i++) { TreeViewerColumn treeViewerColumn = getColumn(treeViewer, i); if (treeViewerColumn != null) { treeViewerColumns.add(treeViewerColumn); } } return treeViewerColumns; } public static TableViewerColumn getColumn(TableViewer tableViewer, int index) { Object data = TreeTableUtils.getColumn(tableViewer.getTable(), index) .getData(Policy.JFACE + ".columnViewer"); if (data instanceof TableViewerColumn) { return (TableViewerColumn) data; } return null; } public static TreeViewerColumn getColumn(TreeViewer treeViewer, int index) { Object data = TreeTableUtils.getColumn(treeViewer.getTree(), index) .getData(Policy.JFACE + ".columnViewer"); if (data instanceof TreeViewerColumn) { return (TreeViewerColumn) data; } return null; } public static CellLabelProvider getLabelProvider(ViewerColumn viewerColumn) { Assert.isLegal(viewerColumn != null); CellLabelProvider cellLabelProvider = null; try { Method m = ViewerColumn.class.getDeclaredMethod("getLabelProvider"); m.setAccessible(true); cellLabelProvider = (CellLabelProvider) m.invoke(viewerColumn); m.setAccessible(false); } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { e.printStackTrace(); } return cellLabelProvider; } public static void refresh(final Viewer viewer) { if (viewer != null) { Display.getDefault().syncExec(() -> viewer.refresh()); } } public static TreePath clone(TreePath treePath) { Object[] segments = new Object[treePath.getSegmentCount()]; for (int i = 0; i < segments.length; i++) { segments[i] = treePath.getSegment(i); } return new TreePath(segments); } public static Object[] getSegments(TreePath treePath) { Object[] segments = new Object[treePath.getSegmentCount()]; for (int i = 0; i < segments.length; i++) { segments[i] = treePath.getSegment(i); } return segments; } public static TreePath[] addTreePath(TreePath[] treePaths1, TreePath... treePaths2) { if (treePaths1 != null) { TreePath[] copy = new TreePath[treePaths1.length + treePaths2.length]; System.arraycopy(treePaths1, 0, copy, 0, treePaths1.length); System.arraycopy(treePaths2, 0, copy, treePaths1.length, treePaths2.length); return copy; } else { return treePaths2; } } /** * Creates a {@link TreePath} array that - passed to * {@link TreeViewer#setExpandedTreePaths(TreePath[])} - will not only * expand the deepest child of the given {@link TreePath} but also all of * its parents. * * @param treePath * @return */ public static TreePath[] createCompletedTreePath(TreePath treePath) { Assert.isNotNull(treePath); TreePath[] completedTreePaths = new TreePath[treePath.getSegmentCount()]; for (int i = 0; i < completedTreePaths.length; i++) { completedTreePaths[completedTreePaths.length - i - 1] = i == 0 ? treePath : completedTreePaths[completedTreePaths.length - i] .getParentPath(); } return completedTreePaths; } /** * Creates a {@link TreePath} array that - passed to * {@link TreeViewer#setExpandedTreePaths(TreePath[])} - will not only * expand the deepest children of the given {@link TreePath}[] but also all * of its parents. * * @param treePath * @return */ public static TreePath[] createCompletedTreePaths(TreePath... treePaths) { Assert.isNotNull(treePaths); List<TreePath> completedTreePaths = new ArrayList<TreePath>(); for (TreePath treePath : treePaths) { for (TreePath completedTreePath : createCompletedTreePath(treePath)) { if (!completedTreePaths.contains(completedTreePath)) { completedTreePaths.add(completedTreePath); } } } return completedTreePaths.toArray(new TreePath[0]); } /** * Creates a {@link TreePath} array that - passed to * {@link TreeViewer#setExpandedTreePaths(TreePath[])} - will expand all * elements so the selection is visible. * * @param treePath * @return */ public static TreePath[] createCompletedTreePaths( ITreeSelection treeSelection) { Assert.isNotNull(treeSelection); List<TreePath> parentTreePaths = new ArrayList<TreePath>(); for (TreePath treePath : treeSelection.getPaths()) { parentTreePaths.add(treePath.getParentPath()); } return createCompletedTreePaths(parentTreePaths .toArray(new TreePath[0])); } }
/* * $Id$ * Copyright 2006 by Softstart Services Inc. */ package stellar.dialog; import stellar.data.Astrogation; import stellar.data.GroupRecord; import stellar.data.HexID; import stellar.map.MapLabel; import stellar.map.SquareIcons; import stellar.data.GroupType; import stellar.map.MapScale; import stellar.swing.HTMLEditPanel; import java.io.IOException; import javax.swing.JDialog; import javax.swing.JTree; import java.awt.BorderLayout; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.SpinnerNumberModel; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.MutableTreeNode; import javax.swing.tree.TreeSelectionModel; import javax.swing.JButton; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; import javax.swing.JLabel; import javax.swing.JTextField; import java.awt.GridBagLayout; import java.awt.GridBagConstraints; import java.awt.Insets; import javax.swing.JComboBox; import javax.swing.JTabbedPane; import org.jibx.runtime.JiBXException; import javax.swing.JSpinner; import java.awt.event.FocusEvent; import stellar.MapPreferences; /** * The EditGroups dialog allows users to create, view and edit the groups of * stars in the map. Traveller defines a set of groups: domain, sector, quadrant, * and subsectors, each nested within each other. Cartrogrpher adds a generic * "group" group to allow users to define their own groups as needed. * <BR> * TODO: * <UL> * <LI>The New/Update button combination does not work. It's confusing and * implemented wrong here. We need to have the valueChanges()/bOKActionPerformed() * do the Update and make the update button go away. * <LI>The New button really needs to actually finish the creation of the new group * and put it entirely in the list. * <LI>Need a delete button to remove groups. * </UL> */ public class EditGroups extends JDialog implements TreeSelectionListener { private Astrogation data; private MutableTreeNode root; private boolean dataChanged = false; private BorderLayout borderLayout1 = new BorderLayout(); private JScrollPane treeView = new JScrollPane(); private JTree groupTree; private ButtonPanel buttonPanel = new ButtonPanel(); private JButton bNewGroup = new JButton(); private JButton bUpdate = new JButton(); private HTMLEditPanel commentPanel = new HTMLEditPanel(); private JTabbedPane jTabbedPane1 = new JTabbedPane(); private JPanel infoPanel = new JPanel(); private GridBagLayout gridBagLayout1 = new GridBagLayout(); private JLabel jLabel1 = new JLabel(); private JTextField groupName = new JTextField(); private JLabel jLabel2 = new JLabel(); private JComboBox groupType = new JComboBox(); private JLabel jLabel3 = new JLabel(); private JTextField groupLocation = new JTextField(); private JLabel jLabel4 = new JLabel(); private JScrollPane mapPane = new JScrollPane(); private MapLabel map; private SquareIcons squareMap = new SquareIcons (false); private JLabel jLabel5 = new JLabel(); private JSpinner extentX = new JSpinner(new SpinnerNumberModel (1, 1, 99, 1)); private JLabel jLabel6 = new JLabel(); private JSpinner extentY = new JSpinner(new SpinnerNumberModel (1, 1, 99, 1)); public EditGroups(Astrogation data) { try { this.data = data; jbInit(); } catch(Exception e) { e.printStackTrace(); } } private class OKActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { bOK_actionPerformed(e); } } private class CancelActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { bCancel_actionPerformed(e); } } private void jbInit() throws Exception { this.getContentPane().setLayout(borderLayout1); infoPanel.setLayout(gridBagLayout1); buttonPanel.addOKActionListener(new OKActionListener()); buttonPanel.addCancelActionListener(new CancelActionListener()); bNewGroup.setText("New"); bNewGroup.setMnemonic('N'); bNewGroup.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { bNewGroup_actionPerformed(e); } }); bUpdate.setText("Update"); bUpdate.setMnemonic('U'); bUpdate.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { bUpdate_actionPerformed(e); } }); buttonPanel.add (bNewGroup); //buttonPanel.add (bUpdate); jLabel1.setText("Name"); jLabel2.setText("Type"); jLabel4.setText("Location"); jLabel5.setText("Extent X"); jLabel6.setText("Extent Y"); groupLocation.setText("0000"); groupLocation.setHorizontalAlignment(JTextField.TRAILING); groupLocation.setColumns(6); groupLocation.setMinimumSize(groupLocation.getPreferredSize()); groupType.addItem(GroupType.DOMAIN); groupType.addItem(GroupType.SECTOR); groupType.addItem(GroupType.QUADRANT); groupType.addItem(GroupType.SUBSECTOR); groupType.addItem(GroupType.GROUP); groupType.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { groupType_actionPerformed(e); } }); infoPanel.add(jLabel1, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(5, 5, 0, 5), 0, 0)); infoPanel.add(groupName, new GridBagConstraints(1, 1, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(5, 0, 5, 5), 0, 0)); infoPanel.add(jLabel2, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(0, 5, 0, 5), 0, 0)); infoPanel.add(groupType, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); infoPanel.add(jLabel3, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(0, 5, 0, 5), 0, 0)); infoPanel.add(groupLocation, new GridBagConstraints(1, 2, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); infoPanel.add(jLabel4, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); infoPanel.add(jLabel5, new GridBagConstraints(0, 3, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); infoPanel.add(extentX, new GridBagConstraints(1, 3, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); infoPanel.add(jLabel6, new GridBagConstraints(0, 4, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); infoPanel.add(extentY, new GridBagConstraints(1, 4, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)); /* map.setMapStyle(HexLabel.STYLE_SQUARE); map.setMapData(data); map.setMapScale (MapIcon.SCALE_5); map.setMapSize(4,4); map.setHexLayout(MapIcon.SCALE_5, EditOptions.getInstance().getScaleLayout(MapIcon.SCALE_5)); map.setLevel(MapIcon.GROUP_DOMAIN); */ squareMap.setMapData(data); squareMap.setScale(MapScale.SCALE_5); squareMap.setSize(4,4); squareMap.setLayout(MapPreferences.getInstance().getScaleLayout(MapScale.SCALE_5)); squareMap.setLevel(GroupType.DOMAIN); map = new MapLabel (squareMap); groupLocation.addFocusListener(new java.awt.event.FocusAdapter() { public void focusLost(FocusEvent e) { groupLocation_focusLost(e); } }); groupName.addFocusListener(new java.awt.event.FocusAdapter() { public void focusLost(FocusEvent e) { groupName_focusLost(e); } }); mapPane.getViewport().add(map, null); if (data != null && data.getGroup(0) != null) { createGroupTree(); groupTree = new JTree (root); groupTree.getSelectionModel().setSelectionMode (TreeSelectionModel.SINGLE_TREE_SELECTION); groupTree.addTreeSelectionListener(this); treeView.getViewport().add(groupTree, null); this.getContentPane().add(treeView, BorderLayout.WEST); groupTree.setSelectionRow(0); } jTabbedPane1.addTab("Info", infoPanel); jTabbedPane1.addTab("Comments", commentPanel); jTabbedPane1.addTab("Map", mapPane); this.getContentPane().add(buttonPanel, BorderLayout.SOUTH); this.getContentPane().add(jTabbedPane1, BorderLayout.CENTER); this.setTitle("Edit Groups"); this.pack(); if (groupTree != null) { valueChanged (null); } } public void valueChanged(TreeSelectionEvent e) { MutableTreeNode node = (MutableTreeNode)groupTree.getLastSelectedPathComponent(); if (node == null) return; GroupRecord g = (GroupRecord)node; groupType.setSelectedItem(g.getType()); groupName.setText(g.getName()); groupLocation.setText(g.getLocation().toString()); commentPanel.setDocument(g.getComment()); squareMap.setLevel (g.getType()); groupTypeSetExtentsEnabled(); //squareMap.redrawMap(); map.redrawMap(); } private void bOK_actionPerformed(ActionEvent e) { this.setVisible(false); } private void bCancel_actionPerformed(ActionEvent e) { this.setVisible(false); } private void bNewGroup_actionPerformed(ActionEvent e) { jTabbedPane1.setSelectedIndex(0); groupType.setSelectedIndex(0); groupName.setText(""); groupLocation.setText(""); commentPanel.newDocument(); GroupRecord g = new GroupRecord(); g.setProvider (MapPreferences.getInstance().getUserData()); if (groupTree == null) {} else { data.addGroup(g); } } private void bUpdate_actionPerformed (ActionEvent e) { dataChanged = true; GroupRecord g; if (groupTree == null) { g = new GroupRecord(); g.setProvider(MapPreferences.getInstance().getUserData()); } else { g = (GroupRecord) groupTree.getLastSelectedPathComponent(); } g.setType((GroupType)groupType.getSelectedItem()); if ((g.getLocation() == null) || !(groupLocation.getText().equals(g.getLocation().toString()))) { g.setLocation(new HexID (groupLocation.getText())); g.getLocation().setHexType(GroupType.GROUP); g.getLocation().setHexGroup (g.getParentName()); } if ((g.getName() == null) || !(groupName.getText().equals (g.getName()))) { g.setName(groupName.getText()); } if (g.getKey() == null) { g.setKey(g.getName().substring(0,4).toLowerCase() + "." + g.getLocation()); } g.setComment(commentPanel.getDocument()); if (groupTree == null) { try { data = new Astrogation (MapPreferences.getInstance().getExternalRefsFileName()); } catch (IOException ex) { ex.printStackTrace(); } catch (JiBXException ex) { ex.printStackTrace(); } data.addGroup(g); root = g; groupTree = new JTree (root); groupTree.getSelectionModel().setSelectionMode (TreeSelectionModel.SINGLE_TREE_SELECTION); groupTree.addTreeSelectionListener(this); treeView.getViewport().add(groupTree, null); this.getContentPane().add(treeView, BorderLayout.WEST); groupTree.setSelectionRow(0); this.pack(); } } private void groupType_actionPerformed(ActionEvent e) { GroupRecord g; if (groupTree != null) { g = (GroupRecord) groupTree.getLastSelectedPathComponent(); g.setType((GroupType)groupType.getSelectedItem()); extentX.setValue (new Integer (g.getExtentX())); extentY.setValue (new Integer (g.getExtentY())); groupTypeSetExtentsEnabled(); } } private void groupTypeSetExtentsEnabled () { if (groupType.getSelectedItem().equals(GroupType.GROUP)) { extentX.setEnabled(true); extentY.setEnabled(true); } else { extentX.setEnabled(false); extentY.setEnabled(false); } } private void createGroupTree () { for (int i = 0; i < data.getGroupCount(); i++) { GroupRecord g = data.getGroup(i); /* * Look through all the groups to find the parent record of this * group. */ if (g.getParentName() != null) { for (int j = 0; j < data.getGroupCount(); j++) { GroupRecord parent = data.getGroup(j); if (g.getParentName().equals (parent.getName())) { if (parent.getIndex(g) < 0) { parent.insert (g, parent.getChildCount()); } break; } } } } /* * find the root of the group list tree. */ root = data.getGroup(0); while (root.getParent() != null) { root = (MutableTreeNode)root.getParent(); } } private void groupName_focusLost(FocusEvent e) { GroupRecord g = (GroupRecord) groupTree.getLastSelectedPathComponent(); if (!(groupName.getText().equals (g.getName()))) g.setName(groupName.getText()); } private void groupLocation_focusLost(FocusEvent e) { GroupRecord g = (GroupRecord) groupTree.getLastSelectedPathComponent(); if ((g.getLocation() == null) || !(groupLocation.getText().equals(g.getLocation().toString()))) { g.setLocation(new HexID (groupLocation.getText())); g.getLocation().setHexType(GroupType.GROUP); g.getLocation().setHexGroup (g.getParentName()); } } }
/****************************************************************** * File: LibReg.java * Created by: Dave Reynolds * Created on: 31 Jan 2013 * * (c) Copyright 2013, Epimorphics Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *****************************************************************/ package com.epimorphics.registry.webapi; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.shiro.SecurityUtils; import org.apache.shiro.authz.Permission; import org.apache.shiro.subject.Subject; import com.epimorphics.rdfutil.ModelWrapper; import com.epimorphics.rdfutil.QueryUtil; import com.epimorphics.rdfutil.RDFNodeWrapper; import com.epimorphics.registry.core.Description; import com.epimorphics.registry.core.Register; import com.epimorphics.registry.core.RegisterItem; import com.epimorphics.registry.core.Registry; import com.epimorphics.registry.core.Status; import com.epimorphics.registry.security.RegAuthorizationInfo; import com.epimorphics.registry.security.RegPermission; import com.epimorphics.registry.security.UserInfo; import com.epimorphics.registry.store.RegisterEntryInfo; import com.epimorphics.registry.store.StoreAPI; import com.epimorphics.registry.util.Prefixes; import com.epimorphics.registry.util.TypedTemplateIndex; import com.epimorphics.registry.vocab.RegistryVocab; import com.epimorphics.server.core.Service; import com.epimorphics.server.core.ServiceBase; import com.epimorphics.server.templates.LibPlugin; import com.epimorphics.server.webapi.facets.FacetResultEntry; import com.epimorphics.util.EpiException; import com.epimorphics.util.PrefixUtils; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.shared.PrefixMapping; import com.hp.hpl.jena.vocabulary.DCTerms; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDFS; /** * Some supporting methods to help Velocity UI access the registry store. * * @author <a href="mailto:dave@epimorphics.com">Dave Reynolds</a> */ public class LibReg extends ServiceBase implements LibPlugin, Service { /** * Raw access to the registry store */ public StoreAPI getStore() { return Registry.get().getStore(); } /** * Return a resource known to the store, wrapped for scripting */ public RDFNodeWrapper getResource(String uri) { if ( ! uri.startsWith("http") ) { uri = Registry.get().getBaseURI() + uri; } Description d = getStore().getCurrentVersion(uri); if (d == null) { return null; } return wrapNode( d.getRoot() ); } private ModelWrapper wrapModel(Model m) { m.setNsPrefixes( Prefixes.get() ); return new ModelWrapper( m ); } private RDFNodeWrapper wrapNode(Resource root) { return wrapModel( root.getModel() ).getNode(root); } /** * Helper to list members of a register */ public List<RegisterEntryInfo> listMembers(Object arg) { Register reg = null;; if (arg instanceof String) { String uri = (String)arg; if ( ! uri.startsWith("http") ) { uri = Registry.get().getBaseURI() + uri; } Description d = getStore().getCurrentVersion(uri); if (d != null) { reg = d.asRegister(); } else { return null; } } else if (arg instanceof RDFNodeWrapper) { reg = new Register( ((RDFNodeWrapper)arg).asResource() ); } else if (arg instanceof Register) { reg = (Register) arg; } else { return null; } return getStore().listMembers(reg); } /** * Convert a resource, maybe wrapped, to a status code */ public Status asStatus(Object state) { if (state instanceof Status) { return (Status)state; } else if (state instanceof Resource) { return Status.forResource((Resource)state); } else if (state instanceof RDFNodeWrapper) { return Status.forResource(((RDFNodeWrapper)state).asResource()); } else { return null; } } /** * List the legal next states after this state. */ public List<Status> nextStates(RDFNodeWrapper state) { Status current = asStatus(state); if (current == null) return new ArrayList<Status>(); List<Status> next = current.nextStates(); next.remove(current); return next; } /** * Check if the given action(s) are permitted on the given URI for the current subject */ public boolean isPermitted(String action, String uri) { Subject subject = SecurityUtils.getSubject(); if (!subject.isAuthenticated()) { return false; } return subject.isPermitted(action + ":/" + uri); } /** * Return the subjected (logged in user if any). * Needed for simple UI pages that aren't rendered as part of visiting the registry body */ public Subject getSubject() { try { return SecurityUtils.getSubject(); } catch (Exception e) { return null; } } /** * List permissions for current authenticated user (if any), ordered by path */ public List<RegPermission> getPermissions() { Subject subject = getSubject(); if (subject.isAuthenticated()) { String id = ((UserInfo)subject.getPrincipal()).getOpenid(); RegAuthorizationInfo auth = Registry.get().getUserStore().getPermissions(id); List<RegPermission> perms = new ArrayList<RegPermission>( ); if (auth.getObjectPermissions() != null) { for (Permission p : auth.getObjectPermissions()) { perms.add( (RegPermission) p); } Collections.sort(perms, new Comparator<RegPermission>(){ @Override public int compare(RegPermission arg0, RegPermission arg1) { return arg0.getPath().compareTo(arg1.getPath()); } }); } return perms; } else { return new ArrayList<RegPermission>(); } } /** * Return string giving the Turtle prefixes header for a model/node */ public String turtlePrefixes(Object obj) { PrefixMapping prefixes; if (obj instanceof ModelWrapper) { prefixes = ((ModelWrapper)obj).getPrefixes(); } else if (obj instanceof RDFNodeWrapper) { prefixes = ((RDFNodeWrapper)obj).getModelW().getPrefixes(); } else if (obj instanceof Model) { prefixes = (Model)obj; } else { throw new EpiException("Not a type with prefixes: " + obj); } StringBuffer result = new StringBuffer(); for (Map.Entry<String, String> mapping : prefixes.getNsPrefixMap().entrySet()) { result.append(String.format("@prefix %s: <%s>. \n", mapping.getKey(), mapping.getValue())); } return result.toString(); } /** * Convert the URI for a managed entity or an item to a path relative to the registry base */ public String pathFor(String uri) { String base = Registry.get().getBaseURI(); if (uri.startsWith(base)) { return uri.substring(base.length() + 1); } return uri; } /** * Test whether RegisterItem property should be allowed into an edit dialog */ protected static Set<Resource> NonEditableRIProps = new HashSet<Resource>(); static { NonEditableRIProps.add( RegistryVocab.definition); NonEditableRIProps.add( RegistryVocab.itemClass); NonEditableRIProps.add( RegistryVocab.notation); NonEditableRIProps.add( RegistryVocab.register); NonEditableRIProps.add( RegistryVocab.submitter); NonEditableRIProps.add( DCTerms.dateAccepted ); NonEditableRIProps.add( DCTerms.dateSubmitted ); NonEditableRIProps.add( OWL.versionInfo ); NonEditableRIProps.add( RDFS.label ); } public boolean isEditable(Object prop) { Resource p; if (prop instanceof RDFNodeWrapper) { p = ((RDFNodeWrapper)prop).asResource(); } else if (prop instanceof Resource) { p = (Resource)prop; } else { throw new EpiException("Illegal type"); } return ! NonEditableRIProps.contains(p); } /** * Test if a URI corresponds to a RegisterItem */ public boolean isItem(String uri) { return uri.matches(".*/_[^/]+$"); } /** * Create a list of item/entity pairs corresponding to a list of (wrapper) entities * in a paged register listing */ public List<ItemMember> asItemList(List<RDFNodeWrapper> members) { List<ItemMember> items = new ArrayList<ItemMember>(); Set<RDFNodeWrapper> seenItems = new HashSet<RDFNodeWrapper>(); for (RDFNodeWrapper member : members) { List<RDFNodeWrapper> linkedItems = member.connectedNodes("^reg:entity/^reg:definition"); if (linkedItems.isEmpty()) { items.add( new ItemMember(member, null) ); } else { Collections.sort(linkedItems, new Comparator<RDFNodeWrapper>(){ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public int compare(RDFNodeWrapper arg0, RDFNodeWrapper arg1) { Object o0 = arg0.getPropertyValue(RegistryVocab.notation).asLiteral().getValue(); Object o1 = arg1.getPropertyValue(RegistryVocab.notation).asLiteral().getValue(); if (o0 instanceof Comparable && o1 instanceof Comparable) { return ((Comparable)o0).compareTo((Comparable)o1); } else { return (o0.toString()).compareTo(o1.toString()); } } }); for (RDFNodeWrapper item : linkedItems) { if (seenItems.add(item)) { items.add( new ItemMember(member, item) ); } } } } return items; } public class ItemMember { protected RDFNodeWrapper member; protected RDFNodeWrapper item; public ItemMember(RDFNodeWrapper member, RDFNodeWrapper item) { this.member = member; this.item = item; } public RDFNodeWrapper getMember() { return member; } public RDFNodeWrapper getItem() { return item; } } /** * Run a sparql query, expanding prefixes from the prefix registry, return as an array of variable bindings */ public List<Map<String, RDFNodeWrapper>> query(String query, Object... params) { String expandedQuery = PrefixUtils.expandQuery(query, Prefixes.get()); expandedQuery = QueryUtil.substituteInQuery(expandedQuery, params); ResultSet rs = Registry.get().getStore().query(expandedQuery); ModelWrapper mw = new ModelWrapper( ModelFactory.createDefaultModel() ); List<Map<String, RDFNodeWrapper>> result = new ArrayList<Map<String,RDFNodeWrapper>>(); while (rs.hasNext()) { QuerySolution soln = rs.next(); Map<String, RDFNodeWrapper> map = new HashMap<String, RDFNodeWrapper>(); for (Iterator<String> ni = soln.varNames(); ni.hasNext(); ) { String name = ni.next(); RDFNode node = soln.get(name); if (node != null) { map.put(name, new RDFNodeWrapper(mw, node) ); } } result.add( map ); } return result; } /** * Utility for incrementally building up compacted range notation * for reserved entries */ public ReservationList addReserved(ReservationList reservations, RDFNode notation) { reservations.add(notation); return reservations; } public ReservationList startReservationList() { return new ReservationList(); } public static class ReservationList { StringBuffer reservations = new StringBuffer(); int last; int rangeStart; boolean pendingNumeric = false; boolean inRange = false; public void add(RDFNode notation) { Literal l = notation.asLiteral(); Object value = l.getValue(); if (value instanceof Integer) { int n = ((Integer) value).intValue(); if (pendingNumeric) { if (n == last + 1) { if (!inRange) { rangeStart = last; inRange = true; } } else { finishNumeric(); } } last = n; pendingNumeric = true; } else { finishNumeric(); if (reservations.length() != 0) { reservations.append(", "); } reservations.append(l.getLexicalForm()); } } private void finishNumeric() { if (pendingNumeric) { if (reservations.length() != 0) { reservations.append(", "); } if (inRange) { reservations.append(rangeStart); reservations.append("-"); } reservations.append(last); inRange = false; pendingNumeric = false; } } public String getReservations() { if (pendingNumeric) { finishNumeric(); } return reservations.toString(); } public boolean isEmpty() { return reservations.length() == 0 && !pendingNumeric; } @Override public String toString() { return getReservations(); } } protected TypedTemplateIndex typedTemplateIndex; /** * Return the name of the template, if any, to use for rendering the given entity * @param arg can be a wrapped resource, raw resource or a register item */ public String templateFor(Object arg) { if (arg instanceof RDFNodeWrapper) { return templateForResource( ((RDFNodeWrapper)arg).asResource() ); } else if (arg instanceof RegisterItem) { return templateForResource( ((RegisterItem)arg).getEntity() ); } else if (arg instanceof Resource) { return templateForResource( (Resource)arg ); } else { return null; } } private String templateForResource(Resource r) { if (typedTemplateIndex == null) { typedTemplateIndex = new TypedTemplateIndex(); } return typedTemplateIndex.templateFor(r); } /** * Take a of facet search results (assumed over register items), fetches * the corresponding register items to a local model and returns a list of wrapped nodes * over that model. */ public List<RDFNodeWrapper> wrap(List<FacetResultEntry> results) { List<RDFNodeWrapper> wrappedResults = new ArrayList<>(results.size()); StoreAPI store = Registry.get().getStore(); Model model = ModelFactory.createDefaultModel(); ModelWrapper modelw = new ModelWrapper(model); model.setNsPrefixes( Prefixes.get() ); for (FacetResultEntry result : results) { RDFNode value = result.getItem(); if (value.isResource()) { Resource valueR = value.asResource(); RegisterItem item = store.getItem(valueR.getURI(), false); Resource root = item.getRoot(); model.add( root.getModel() ); wrappedResults.add( new RDFNodeWrapper(modelw, root.inModel(model)) ); } } return wrappedResults; } }
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.gluu.oxauth.client; import static org.gluu.oxauth.model.register.RegisterRequestParam.ACCESS_TOKEN_AS_JWT; import static org.gluu.oxauth.model.register.RegisterRequestParam.ACCESS_TOKEN_LIFETIME; import static org.gluu.oxauth.model.register.RegisterRequestParam.ACCESS_TOKEN_SIGNING_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.ALLOW_SPONTANEOUS_SCOPES; import static org.gluu.oxauth.model.register.RegisterRequestParam.APPLICATION_TYPE; import static org.gluu.oxauth.model.register.RegisterRequestParam.AUTHORIZED_ORIGINS; import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_AUTHENTICATION_REQUEST_SIGNING_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_CLIENT_NOTIFICATION_ENDPOINT; import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_LOGOUT_SESSION_REQUIRED; import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_LOGOUT_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_TOKEN_DELIVERY_MODE; import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_USER_CODE_PARAMETER; import static org.gluu.oxauth.model.register.RegisterRequestParam.CLAIMS; import static org.gluu.oxauth.model.register.RegisterRequestParam.CLAIMS_REDIRECT_URIS; import static org.gluu.oxauth.model.register.RegisterRequestParam.CLIENT_NAME; import static org.gluu.oxauth.model.register.RegisterRequestParam.CLIENT_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.CONTACTS; import static org.gluu.oxauth.model.register.RegisterRequestParam.DEFAULT_ACR_VALUES; import static org.gluu.oxauth.model.register.RegisterRequestParam.DEFAULT_MAX_AGE; import static org.gluu.oxauth.model.register.RegisterRequestParam.FRONT_CHANNEL_LOGOUT_SESSION_REQUIRED; import static org.gluu.oxauth.model.register.RegisterRequestParam.FRONT_CHANNEL_LOGOUT_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.GRANT_TYPES; import static org.gluu.oxauth.model.register.RegisterRequestParam.ID_TOKEN_ENCRYPTED_RESPONSE_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.ID_TOKEN_ENCRYPTED_RESPONSE_ENC; import static org.gluu.oxauth.model.register.RegisterRequestParam.ID_TOKEN_SIGNED_RESPONSE_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.ID_TOKEN_TOKEN_BINDING_CNF; import static org.gluu.oxauth.model.register.RegisterRequestParam.INITIATE_LOGIN_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.JWKS; import static org.gluu.oxauth.model.register.RegisterRequestParam.JWKS_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.KEEP_CLIENT_AUTHORIZATION_AFTER_EXPIRATION; import static org.gluu.oxauth.model.register.RegisterRequestParam.LOGO_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.POLICY_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.POST_LOGOUT_REDIRECT_URIS; import static org.gluu.oxauth.model.register.RegisterRequestParam.REDIRECT_URIS; import static org.gluu.oxauth.model.register.RegisterRequestParam.REQUEST_OBJECT_ENCRYPTION_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.REQUEST_OBJECT_ENCRYPTION_ENC; import static org.gluu.oxauth.model.register.RegisterRequestParam.REQUEST_OBJECT_SIGNING_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.REQUEST_URIS; import static org.gluu.oxauth.model.register.RegisterRequestParam.REQUIRE_AUTH_TIME; import static org.gluu.oxauth.model.register.RegisterRequestParam.RESPONSE_TYPES; import static org.gluu.oxauth.model.register.RegisterRequestParam.RPT_AS_JWT; import static org.gluu.oxauth.model.register.RegisterRequestParam.RUN_INTROSPECTION_SCRIPT_BEFORE_ACCESS_TOKEN_CREATION_AS_JWT_AND_INCLUDE_CLAIMS; import static org.gluu.oxauth.model.register.RegisterRequestParam.SCOPE; import static org.gluu.oxauth.model.register.RegisterRequestParam.SCOPES; import static org.gluu.oxauth.model.register.RegisterRequestParam.SECTOR_IDENTIFIER_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.SOFTWARE_ID; import static org.gluu.oxauth.model.register.RegisterRequestParam.SOFTWARE_STATEMENT; import static org.gluu.oxauth.model.register.RegisterRequestParam.SOFTWARE_VERSION; import static org.gluu.oxauth.model.register.RegisterRequestParam.SPONTANEOUS_SCOPES; import static org.gluu.oxauth.model.register.RegisterRequestParam.SUBJECT_TYPE; import static org.gluu.oxauth.model.register.RegisterRequestParam.TLS_CLIENT_AUTH_SUBJECT_DN; import static org.gluu.oxauth.model.register.RegisterRequestParam.TOKEN_ENDPOINT_AUTH_METHOD; import static org.gluu.oxauth.model.register.RegisterRequestParam.TOKEN_ENDPOINT_AUTH_SIGNING_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.TOS_URI; import static org.gluu.oxauth.model.register.RegisterRequestParam.USERINFO_ENCRYPTED_RESPONSE_ALG; import static org.gluu.oxauth.model.register.RegisterRequestParam.USERINFO_ENCRYPTED_RESPONSE_ENC; import static org.gluu.oxauth.model.register.RegisterRequestParam.USERINFO_SIGNED_RESPONSE_ALG; import static org.gluu.oxauth.model.util.StringUtils.implode; import java.util.List; import java.util.Map; import javax.ws.rs.HttpMethod; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation.Builder; import javax.ws.rs.core.MediaType; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.gluu.oxauth.model.register.ApplicationType; import org.gluu.oxauth.util.ClientUtil; import org.jboss.resteasy.client.jaxrs.ClientHttpEngine; import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder; import org.json.JSONArray; import org.json.JSONObject; /** * Encapsulates functionality to make Register request calls to an authorization server via REST Services. * * @author Javier Rojas Blum * @author Yuriy Zabrovarnyy * @author Yuriy Movchan * @version August 20, 2019 */ public class RegisterClient extends BaseClient<RegisterRequest, RegisterResponse> { private static final Logger LOG = Logger.getLogger(RegisterClient.class); /** * Construct a register client by providing an URL where the REST service is located. * * @param url The REST service location. */ public RegisterClient(String url) { super(url); } @Override public String getHttpMethod() { if (getRequest() != null) { if (StringUtils.isNotBlank(getRequest().getHttpMethod())) { return getRequest().getHttpMethod(); } if (getRequest().getRegistrationAccessToken() != null) { return HttpMethod.GET; } } return HttpMethod.POST; } /** * Executes the call to the REST service requesting to register and process the response. * * @param applicationType The application type. * @param clientName The client name. * @param redirectUri A list of space-delimited redirection URIs. * @return The service response. */ public RegisterResponse execRegister(ApplicationType applicationType, String clientName, List<String> redirectUri) { setRequest(new RegisterRequest(applicationType, clientName, redirectUri)); return exec(); } public RegisterResponse exec() { initClientRequest(); return _exec(); } @Deprecated public RegisterResponse exec(ClientHttpEngine engine) { resteasyClient = ((ResteasyClientBuilder) ResteasyClientBuilder.newBuilder()).httpEngine(engine).build(); webTarget = resteasyClient.target(getUrl()); return _exec(); } private RegisterResponse _exec() { try { // Prepare request parameters // clientRequest.setHttpMethod(getHttpMethod()); Entity requestEntity = null; // POST - Client Register, PUT - update client if (getHttpMethod().equals(HttpMethod.POST) || getHttpMethod().equals(HttpMethod.PUT)) { JSONObject requestBody = new JSONObject(); if (getRequest().getRedirectUris() != null && !getRequest().getRedirectUris().isEmpty()) { requestBody.put(REDIRECT_URIS.toString(), new JSONArray(getRequest().getRedirectUris())); } if (getRequest().getClaimsRedirectUris() != null && !getRequest().getClaimsRedirectUris().isEmpty()) { requestBody.put(CLAIMS_REDIRECT_URIS.toString(), new JSONArray(getRequest().getClaimsRedirectUris())); } if (getRequest().getResponseTypes() != null && !getRequest().getResponseTypes().isEmpty()) { requestBody.put(RESPONSE_TYPES.toString(), new JSONArray(getRequest().getResponseTypes_())); } if (getRequest().getGrantTypes() != null && !getRequest().getGrantTypes().isEmpty()) { requestBody.put(GRANT_TYPES.toString(), new JSONArray(getRequest().getGrantTypes())); } if (getRequest().getApplicationType() != null) { requestBody.put(APPLICATION_TYPE.toString(), getRequest().getApplicationType()); } if (getRequest().getContacts() != null && !getRequest().getContacts().isEmpty()) { requestBody.put(CONTACTS.toString(), new JSONArray(getRequest().getContacts())); } if (StringUtils.isNotBlank(getRequest().getClientName())) { requestBody.put(CLIENT_NAME.toString(), getRequest().getClientName()); } if (StringUtils.isNotBlank(getRequest().getIdTokenTokenBindingCnf())) { requestBody.put(ID_TOKEN_TOKEN_BINDING_CNF.toString(), getRequest().getIdTokenTokenBindingCnf()); } if (StringUtils.isNotBlank(getRequest().getLogoUri())) { requestBody.put(LOGO_URI.toString(), getRequest().getLogoUri()); } if (StringUtils.isNotBlank(getRequest().getClientUri())) { requestBody.put(CLIENT_URI.toString(), getRequest().getClientUri()); } if (StringUtils.isNotBlank(getRequest().getPolicyUri())) { requestBody.put(POLICY_URI.toString(), getRequest().getPolicyUri()); } if (StringUtils.isNotBlank(getRequest().getTosUri())) { requestBody.put(TOS_URI.toString(), getRequest().getTosUri()); } if (StringUtils.isNotBlank(getRequest().getJwksUri())) { requestBody.put(JWKS_URI.toString(), getRequest().getJwksUri()); } if (StringUtils.isNotBlank(getRequest().getJwks())) { requestBody.put(JWKS.toString(), getRequest().getJwks()); } if (StringUtils.isNotBlank(getRequest().getSectorIdentifierUri())) { requestBody.put(SECTOR_IDENTIFIER_URI.toString(), getRequest().getSectorIdentifierUri()); } if (getRequest().getSubjectType() != null) { requestBody.put(SUBJECT_TYPE.toString(), getRequest().getSubjectType()); } if (getRequest().getAccessTokenAsJwt() != null) { requestBody.put(ACCESS_TOKEN_AS_JWT.toString(), getRequest().getAccessTokenAsJwt().toString()); } if (getRequest().getAccessTokenSigningAlg() != null) { requestBody.put(ACCESS_TOKEN_SIGNING_ALG.toString(), getRequest().getAccessTokenSigningAlg().toString()); } if (getRequest().getRptAsJwt() != null) { requestBody.put(RPT_AS_JWT.toString(), getRequest().getRptAsJwt().toString()); } if (getRequest().getTlsClientAuthSubjectDn() != null) { requestBody.put(TLS_CLIENT_AUTH_SUBJECT_DN.toString(), getRequest().getTlsClientAuthSubjectDn()); } if (getRequest().getAllowSpontaneousScopes() != null) { requestBody.put(ALLOW_SPONTANEOUS_SCOPES.toString(), getRequest().getAllowSpontaneousScopes()); } if (getRequest().getSpontaneousScopes() != null) { requestBody.put(SPONTANEOUS_SCOPES.toString(), new JSONArray(getRequest().getSpontaneousScopes())); } if (getRequest().getRunIntrospectionScriptBeforeAccessTokenAsJwtCreationAndIncludeClaims() != null) { requestBody.put(RUN_INTROSPECTION_SCRIPT_BEFORE_ACCESS_TOKEN_CREATION_AS_JWT_AND_INCLUDE_CLAIMS.toString(), getRequest().getRunIntrospectionScriptBeforeAccessTokenAsJwtCreationAndIncludeClaims().toString()); } if (getRequest().getKeepClientAuthorizationAfterExpiration() != null) { requestBody.put(KEEP_CLIENT_AUTHORIZATION_AFTER_EXPIRATION.toString(), getRequest().getKeepClientAuthorizationAfterExpiration().toString()); } if (getRequest().getIdTokenSignedResponseAlg() != null) { requestBody.put(ID_TOKEN_SIGNED_RESPONSE_ALG.toString(), getRequest().getIdTokenSignedResponseAlg().getName()); } if (getRequest().getIdTokenEncryptedResponseAlg() != null) { requestBody.put(ID_TOKEN_ENCRYPTED_RESPONSE_ALG.toString(), getRequest().getIdTokenEncryptedResponseAlg().getName()); } if (getRequest().getIdTokenEncryptedResponseEnc() != null) { requestBody.put(ID_TOKEN_ENCRYPTED_RESPONSE_ENC.toString(), getRequest().getIdTokenEncryptedResponseEnc().getName()); } if (getRequest().getUserInfoSignedResponseAlg() != null) { requestBody.put(USERINFO_SIGNED_RESPONSE_ALG.toString(), getRequest().getUserInfoSignedResponseAlg().getName()); } if (getRequest().getUserInfoEncryptedResponseAlg() != null) { requestBody.put(USERINFO_ENCRYPTED_RESPONSE_ALG.toString(), getRequest().getUserInfoEncryptedResponseAlg().getName()); } if (getRequest().getUserInfoEncryptedResponseEnc() != null) { requestBody.put(USERINFO_ENCRYPTED_RESPONSE_ENC.toString(), getRequest().getUserInfoEncryptedResponseEnc().getName()); } if (getRequest().getRequestObjectSigningAlg() != null) { requestBody.put(REQUEST_OBJECT_SIGNING_ALG.toString(), getRequest().getRequestObjectSigningAlg().getName()); } if (getRequest().getRequestObjectEncryptionAlg() != null) { requestBody.put(REQUEST_OBJECT_ENCRYPTION_ALG.toString(), getRequest().getRequestObjectEncryptionAlg().getName()); } if (getRequest().getRequestObjectEncryptionEnc() != null) { requestBody.put(REQUEST_OBJECT_ENCRYPTION_ENC.toString(), getRequest().getRequestObjectEncryptionEnc().getName()); } if (getRequest().getTokenEndpointAuthMethod() != null) { requestBody.put(TOKEN_ENDPOINT_AUTH_METHOD.toString(), getRequest().getTokenEndpointAuthMethod()); } if (getRequest().getTokenEndpointAuthSigningAlg() != null) { requestBody.put(TOKEN_ENDPOINT_AUTH_SIGNING_ALG.toString(), getRequest().getTokenEndpointAuthSigningAlg()); } if (getRequest().getDefaultMaxAge() != null) { requestBody.put(DEFAULT_MAX_AGE.toString(), getRequest().getDefaultMaxAge()); } if (getRequest().getRequireAuthTime() != null) { requestBody.put(REQUIRE_AUTH_TIME.toString(), getRequest().getRequireAuthTime()); } if (getRequest().getDefaultAcrValues() != null && !getRequest().getDefaultAcrValues().isEmpty()) { requestBody.put(DEFAULT_ACR_VALUES.toString(), getRequest().getDefaultAcrValues()); } if (StringUtils.isNotBlank(getRequest().getInitiateLoginUri())) { requestBody.put(INITIATE_LOGIN_URI.toString(), getRequest().getInitiateLoginUri()); } if (getRequest().getPostLogoutRedirectUris() != null && !getRequest().getPostLogoutRedirectUris().isEmpty()) { requestBody.put(POST_LOGOUT_REDIRECT_URIS.toString(), getRequest().getPostLogoutRedirectUris()); } if (getRequest().getFrontChannelLogoutUris() != null && !getRequest().getFrontChannelLogoutUris().isEmpty()) { requestBody.put(FRONT_CHANNEL_LOGOUT_URI.getName(), getRequest().getFrontChannelLogoutUris()); } if (getRequest().getFrontChannelLogoutSessionRequired() != null) { requestBody.put(FRONT_CHANNEL_LOGOUT_SESSION_REQUIRED.getName(), getRequest().getFrontChannelLogoutSessionRequired()); } if (getRequest().getBackchannelLogoutUris() != null && !getRequest().getBackchannelLogoutUris().isEmpty()) { requestBody.put(BACKCHANNEL_LOGOUT_URI.getName(), getRequest().getBackchannelLogoutUris()); } if (getRequest().getBackchannelLogoutSessionRequired() != null) { requestBody.put(BACKCHANNEL_LOGOUT_SESSION_REQUIRED.getName(), getRequest().getBackchannelLogoutSessionRequired()); } if (getRequest().getRequestUris() != null && !getRequest().getRequestUris().isEmpty()) { requestBody.put(REQUEST_URIS.toString(), new JSONArray(getRequest().getRequestUris())); } if (getRequest().getAuthorizedOrigins() != null && !getRequest().getAuthorizedOrigins().isEmpty()) { requestBody.put(AUTHORIZED_ORIGINS.toString(), new JSONArray(getRequest().getAuthorizedOrigins())); } if (getRequest().getAccessTokenLifetime() != null) { requestBody.put(ACCESS_TOKEN_LIFETIME.toString(), getRequest().getAccessTokenLifetime()); } if (StringUtils.isNotBlank(getRequest().getSoftwareId())) { requestBody.put(SOFTWARE_ID.toString(), getRequest().getSoftwareId()); } if (StringUtils.isNotBlank(getRequest().getSoftwareVersion())) { requestBody.put(SOFTWARE_VERSION.toString(), getRequest().getSoftwareVersion()); } if (StringUtils.isNotBlank(getRequest().getSoftwareStatement())) { requestBody.put(SOFTWARE_STATEMENT.toString(), getRequest().getSoftwareStatement()); } if (getRequest().getScopes() != null && !getRequest().getScopes().isEmpty()) { requestBody.put(SCOPES.toString(), new JSONArray(getRequest().getScopes())); } else if (getRequest().getScope() != null && !getRequest().getScope().isEmpty()) { String spaceSeparatedScope = implode(getRequest().getScope(), " "); requestBody.put(SCOPE.toString(), spaceSeparatedScope); } if (getRequest().getClaims() != null && !getRequest().getClaims().isEmpty()) { String spaceSeparatedClaims = implode(getRequest().getClaims(), " "); requestBody.put(CLAIMS.toString(), spaceSeparatedClaims); } // CIBA if (getRequest().getBackchannelTokenDeliveryMode() != null) { requestBody.put(BACKCHANNEL_TOKEN_DELIVERY_MODE.toString(), getRequest().getBackchannelTokenDeliveryMode()); } if (StringUtils.isNotBlank(getRequest().getBackchannelClientNotificationEndpoint())) { requestBody.put(BACKCHANNEL_CLIENT_NOTIFICATION_ENDPOINT.toString(), getRequest().getBackchannelClientNotificationEndpoint()); } if (getRequest().getBackchannelAuthenticationRequestSigningAlg() != null) { requestBody.put(BACKCHANNEL_AUTHENTICATION_REQUEST_SIGNING_ALG.toString(), getRequest().getBackchannelAuthenticationRequestSigningAlg()); } if (getRequest().getBackchannelUserCodeParameter() != null) { requestBody.put(BACKCHANNEL_USER_CODE_PARAMETER.toString(), getRequest().getBackchannelUserCodeParameter()); } // Custom params final Map<String, String> customAttributes = getRequest().getCustomAttributes(); if (customAttributes != null && !customAttributes.isEmpty()) { for (Map.Entry<String, String> entry : customAttributes.entrySet()) { final String name = entry.getKey(); final String value = entry.getValue(); if (StringUtils.isNotBlank(name) && StringUtils.isNotBlank(value)) { requestBody.put(name, value); } } } requestEntity = Entity.json(ClientUtil.toPrettyJson(requestBody)); } Builder clientRequest = webTarget.request(); applyCookies(clientRequest); if (getHttpMethod().equals(HttpMethod.POST) || getHttpMethod().equals(HttpMethod.PUT)) { clientRequest.header("Content-Type", getRequest().getContentType()); clientRequest.accept(getRequest().getMediaType()); if (StringUtils.isNotBlank(getRequest().getRegistrationAccessToken())) { clientRequest.header("Authorization", "Bearer " + getRequest().getRegistrationAccessToken()); } } else { // GET, Client Read clientRequest.accept(MediaType.APPLICATION_JSON); if (StringUtils.isNotBlank(getRequest().getRegistrationAccessToken())) { clientRequest.header("Authorization", "Bearer " + getRequest().getRegistrationAccessToken()); } } // Call REST Service and handle response if (getHttpMethod().equals(HttpMethod.POST)) { clientResponse = clientRequest.buildPost(requestEntity).invoke(); } else if (getHttpMethod().equals(HttpMethod.PUT)) { clientResponse = clientRequest.buildPut(requestEntity).invoke(); } else if (getHttpMethod().equals(HttpMethod.DELETE)) { clientResponse = clientRequest.buildDelete().invoke(); } else { // GET clientResponse = clientRequest.buildGet().invoke(); } setResponse(new RegisterResponse(clientResponse)); } catch (Exception e) { LOG.error(e.getMessage(), e); } finally { closeConnection(); } return getResponse(); } }
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.gui.components.filter.condition; import com.google.common.base.Strings; import com.haulmont.chile.core.annotations.MetaClass; import com.haulmont.chile.core.model.MetaPropertyPath; import com.haulmont.cuba.core.app.dynamicattributes.DynamicAttributesUtils; import com.haulmont.cuba.core.entity.annotation.SystemLevel; import com.haulmont.cuba.core.global.AppBeans; import com.haulmont.cuba.core.global.MessageTools; import com.haulmont.cuba.core.global.Messages; import com.haulmont.cuba.core.global.QueryUtils; import com.haulmont.cuba.core.global.filter.ConditionType; import com.haulmont.cuba.core.global.filter.Op; import com.haulmont.cuba.gui.components.filter.ConditionParamBuilder; import com.haulmont.cuba.gui.components.filter.Param; import com.haulmont.cuba.gui.components.filter.descriptor.AbstractConditionDescriptor; import com.haulmont.cuba.gui.components.filter.operationedit.AbstractOperationEditor; import com.haulmont.cuba.gui.components.filter.operationedit.DynamicAttributesOperationEditor; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.dom4j.Element; import java.util.List; import java.util.Objects; import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; @MetaClass(name = "sec$DynamicAttributesCondition") @SystemLevel public class DynamicAttributesCondition extends AbstractCondition { protected UUID categoryId; protected UUID categoryAttributeId; protected boolean isCollection; protected String propertyPath; protected String join; private static Pattern LIKE_PATTERN = Pattern.compile("(like \\S+)\\s+(?!ESCAPE)"); public DynamicAttributesCondition(DynamicAttributesCondition condition) { super(condition); this.join = condition.getJoin(); this.categoryId = condition.getCategoryId(); this.categoryAttributeId = condition.getCategoryAttributeId(); this.isCollection = condition.getIsCollection(); } public DynamicAttributesCondition(AbstractConditionDescriptor descriptor, String entityAlias, String propertyPath) { super(descriptor); this.entityAlias = entityAlias; this.name = RandomStringUtils.randomAlphabetic(10); Messages messages = AppBeans.get(Messages.class); this.locCaption = messages.getMainMessage("newDynamicAttributeCondition"); this.propertyPath = propertyPath; } public DynamicAttributesCondition(Element element, String messagesPack, String filterComponentName, com.haulmont.chile.core.model.MetaClass metaClass) { super(element, messagesPack, filterComponentName, metaClass); propertyPath = element.attributeValue("propertyPath"); MessageTools messageTools = AppBeans.get(MessageTools.NAME); locCaption = isBlank(caption) ? element.attributeValue("locCaption") : messageTools.loadString(messagesPack, caption); entityAlias = element.attributeValue("entityAlias"); text = element.getText(); join = element.attributeValue("join"); categoryId = UUID.fromString(element.attributeValue("category")); String categoryAttributeValue = element.attributeValue("categoryAttribute"); if (!Strings.isNullOrEmpty(categoryAttributeValue)) { categoryAttributeId = UUID.fromString(categoryAttributeValue); } else { //for backward compatibility List<Element> paramElements = element.elements("param"); for (Element paramElement : paramElements) { if (BooleanUtils.toBoolean(paramElement.attributeValue("hidden", "false"), "true", "false")) { categoryAttributeId = UUID.fromString(paramElement.getText()); String paramName = paramElement.attributeValue("name"); text = text.replace(":" + paramName, "'" + categoryAttributeId + "'"); } } } isCollection = Boolean.parseBoolean(element.attributeValue("isCollection")); resolveParam(element); } @Override public void toXml(Element element, Param.ValueProperty valueProperty) { super.toXml(element, valueProperty); element.addAttribute("type", ConditionType.RUNTIME_PROPERTY.name()); if (isBlank(caption)) { element.addAttribute("locCaption", locCaption); } element.addAttribute("category", categoryId.toString()); element.addAttribute("categoryAttribute", categoryAttributeId.toString()); element.addAttribute("entityAlias", entityAlias); if (!isBlank(propertyPath)) { element.addAttribute("propertyPath", propertyPath); } if (!isBlank(join)) { element.addAttribute("join", StringEscapeUtils.escapeXml(join)); } if (isCollection) { element.addAttribute("isCollection", "true"); } } public UUID getCategoryId() { return categoryId; } public void setCategoryId(UUID id) { categoryId = id; } public UUID getCategoryAttributeId() { return categoryAttributeId; } public void setCategoryAttributeId(UUID categoryAttributeId) { this.categoryAttributeId = categoryAttributeId; } public boolean getIsCollection() { return isCollection; } public void setIsCollection(boolean collection) { isCollection = collection; } @Override public void setOperator(Op operator) { if (!Objects.equals(this.operator, operator)) { this.operator = operator; String paramName = param.getName(); ConditionParamBuilder paramBuilder = AppBeans.get(ConditionParamBuilder.class); if (operator.isUnary()) { unary = true; inExpr = false; Param param = Param.Builder.getInstance() .setName(paramName) .setJavaClass(Boolean.class) .setInExpr(false) .setRequired(required).build(); setParam(param); } else { unary = false; inExpr = operator.equals(Op.IN) || operator.equals(Op.NOT_IN); Param param = paramBuilder.createParam(this); setParam(param); } } } @Override public String getOperationCaption() { return operator.getLocCaption(); } @Override public AbstractOperationEditor createOperationEditor() { operationEditor = new DynamicAttributesOperationEditor(this); return operationEditor; } @Override protected void updateText() { if (operator == Op.NOT_EMPTY) { if (BooleanUtils.isTrue((Boolean) param.getValue())) { text = text.replace("not exists", "exists"); } else if (BooleanUtils.isFalse((Boolean) param.getValue()) && !text.contains("not exists")) { text = text.replace("exists ", "not exists "); } } if (!isCollection) { if (operator == Op.ENDS_WITH || operator == Op.STARTS_WITH || operator == Op.CONTAINS || operator == Op.DOES_NOT_CONTAIN) { Matcher matcher = LIKE_PATTERN.matcher(text); if (matcher.find()) { String escapeCharacter = ("\\".equals(QueryUtils.ESCAPE_CHARACTER) || "$".equals(QueryUtils.ESCAPE_CHARACTER)) ? QueryUtils.ESCAPE_CHARACTER + QueryUtils.ESCAPE_CHARACTER : QueryUtils.ESCAPE_CHARACTER; text = matcher.replaceAll("$1 ESCAPE '" + escapeCharacter + "' "); } } } else { if (operator == Op.CONTAINS) { text = text.replace("not exists", "exists"); } else if (operator == Op.DOES_NOT_CONTAIN && !text.contains("not exists")) { text = text.replace("exists ", "not exists "); } } } public String getJoin() { return join; } public void setJoin(String join) { this.join = join; } public String getWhere() { updateText(); return text; } public void setWhere(String where) { this.text = where; } public String getPropertyPath() { return propertyPath; } @Override public AbstractCondition createCopy() { return new DynamicAttributesCondition(this); } @Override public String getLocCaption() { if (isBlank(caption) && !isBlank(propertyPath)) { MessageTools messageTools = AppBeans.get(MessageTools.class); String propertyCaption = messageTools.getPropertyCaption(metaClass, propertyPath); if (!isBlank(propertyCaption)) { return propertyCaption + "." + locCaption; } } else if (isNotBlank(caption)) { MessageTools messageTools = AppBeans.get(MessageTools.class); return messageTools.loadString(messagesPack, caption); } MetaPropertyPath mpp = DynamicAttributesUtils.getMetaPropertyPath(metaClass, getCategoryAttributeId()); if (mpp != null) { return DynamicAttributesUtils.getCategoryAttribute(mpp.getMetaProperty()).getLocaleName(); } return super.getLocCaption(); } }
package apoc.export.csv; import apoc.export.util.BatchTransaction; import apoc.export.util.CountingReader; import apoc.export.util.ProgressReporter; import apoc.load.LoadCsv; import apoc.util.FileUtils; import com.opencsv.CSVReader; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Label; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Relationship; import org.neo4j.graphdb.RelationshipType; import java.io.IOException; import java.util.AbstractMap; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.Arrays; public class CsvEntityLoader { private final CsvLoaderConfig clc; private final ProgressReporter reporter; /** * @param clc configuration object * @param reporter */ public CsvEntityLoader(CsvLoaderConfig clc, ProgressReporter reporter) { this.clc = clc; this.reporter = reporter; } /** * Loads nodes from a CSV file with given labels to an online database, and fills the {@code idMapping}, * which will be used by the {@link #loadRelationships(String, String, GraphDatabaseService, Map)} * method. * * @param fileName URI of the CSV file representing the node * @param labels list of node labels to be applied to each node * @param db running database instance * @param idMapping to be filled with the mapping between the CSV ids and the DB's internal node ids * @throws IOException */ public void loadNodes(final String fileName, final List<String> labels, final GraphDatabaseService db, final Map<String, Map<String, Long>> idMapping) throws IOException { final CountingReader reader = FileUtils.readerFor(fileName); final String header = readFirstLine(reader); reader.skip(clc.getSkipLines() - 1); final List<CsvHeaderField> fields = CsvHeaderFields.processHeader(header, clc.getDelimiter(), clc.getQuotationCharacter()); final Optional<CsvHeaderField> idField = fields.stream() .filter(f -> CsvLoaderConstants.ID_FIELD.equals(f.getType())) .findFirst(); final Optional<String> idAttribute = idField.isPresent() ? Optional.of(idField.get().getName()) : Optional.empty(); final String idSpace = idField.isPresent() ? idField.get().getIdSpace() : CsvLoaderConstants.DEFAULT_IDSPACE; idMapping.putIfAbsent(idSpace, new HashMap<>()); final Map<String, Long> idspaceIdMapping = idMapping.get(idSpace); final Map<String, LoadCsv.Mapping> mapping = fields.stream().collect( Collectors.toMap( CsvHeaderField::getName, f -> { final Map<String, Object> mappingMap = Collections .unmodifiableMap(Stream.of( new AbstractMap.SimpleEntry<>("type", f.getType()), new AbstractMap.SimpleEntry<>("array", f.isArray()) ).collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue))); return new LoadCsv.Mapping(f.getName(), mappingMap, clc.getArrayDelimiter(), false); } ) ); final CSVReader csv = new CSVReader(reader, clc.getDelimiter(), clc.getQuotationCharacter()); final String[] loadCsvCompatibleHeader = fields.stream().map(f -> f.getName()).toArray(String[]::new); int lineNo = 0; try (BatchTransaction tx = new BatchTransaction(db, clc.getBatchSize(), reporter)) { for (String[] line : csv.readAll()) { lineNo++; final EnumSet<LoadCsv.Results> results = EnumSet.of(LoadCsv.Results.map); final LoadCsv.CSVResult result = new LoadCsv.CSVResult( loadCsvCompatibleHeader, line, lineNo, false, mapping, Collections.emptyList(), results ); final String nodeCsvId = result.map.get(idAttribute.get()).toString(); // if 'ignore duplicate nodes' is false, there is an id field and the mapping already has the current id, // we either fail the loading process or skip it depending on the 'ignore duplicate nodes' setting if (idField.isPresent() && idspaceIdMapping.containsKey(nodeCsvId)) { if (clc.getIgnoreDuplicateNodes()) { continue; } else { throw new IllegalStateException("Duplicate node with id " + nodeCsvId + " found on line "+lineNo+"\n" +Arrays.toString(line)); } } // create node and add its id to the mapping final Node node = db.createNode(); if (idField.isPresent()) { idspaceIdMapping.put(nodeCsvId, node.getId()); } // add labels for (String label : labels) { node.addLabel(Label.label(label)); } // add properties int props = 0; for (CsvHeaderField field : fields) { final String name = field.getName(); Object value = result.map.get(name); if (field.isMeta()) { final List<String> customLabels = (List<String>) value; for (String customLabel : customLabels) { node.addLabel(Label.label(customLabel)); } } else if (field.isId()) { final Object idValue; if (clc.getStringIds()) { idValue = value; } else { idValue = Long.valueOf((String) value); } node.setProperty(field.getName(), idValue); props++; } else { boolean propertyAdded = CsvPropertyConverter.addPropertyToGraphEntity(node, field, value); props += propertyAdded ? 1 : 0; } } reporter.update(1, 0, props++); } } } /** * Loads relationships from a CSV file with given relationship types to an online database, * using the {@code idMapping} created by the * {@link #loadNodes(String, List, GraphDatabaseService, Map)} method. * * @param fileName URI of the CSV file representing the relationship * @param type relationship type to be applied to each relationships * @param db running database instance * @param idMapping stores mapping between the CSV ids and the DB's internal node ids * @throws IOException */ public void loadRelationships( final String fileName, final String type, final GraphDatabaseService db, final Map<String, Map<String, Long>> idMapping) throws IOException { final CountingReader reader = FileUtils.readerFor(fileName); final String header = readFirstLine(reader); final List<CsvHeaderField> fields = CsvHeaderFields.processHeader(header, clc.getDelimiter(), clc.getQuotationCharacter()); final CsvHeaderField startIdField = fields.stream() .filter(f -> CsvLoaderConstants.START_ID_FIELD.equals(f.getType())) .findFirst().get(); final CsvHeaderField endIdField = fields.stream() .filter(f -> CsvLoaderConstants.END_ID_FIELD.equals(f.getType())) .findFirst().get(); final List<CsvHeaderField> edgePropertiesFields = fields.stream() .filter(field -> !CsvLoaderConstants.START_ID_FIELD.equals(field.getType())) .filter(field -> !CsvLoaderConstants.END_ID_FIELD.equals(field.getType())) .collect(Collectors.toList()); final Map<String, LoadCsv.Mapping> mapping = fields.stream().collect( Collectors.toMap( CsvHeaderField::getName, f -> { final Map<String, Object> mappingMap = Collections .unmodifiableMap(Stream.of( new AbstractMap.SimpleEntry<>("type", f.getType()), new AbstractMap.SimpleEntry<>("array", f.isArray()) ).collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue))); return new LoadCsv.Mapping(f.getName(), mappingMap, clc.getArrayDelimiter(), false); } ) ); final CSVReader csv = new CSVReader(reader, clc.getDelimiter()); final String[] loadCsvCompatibleHeader = fields.stream().map(f -> f.getName()).toArray(String[]::new); int lineNo = 0; try (BatchTransaction tx = new BatchTransaction(db, clc.getBatchSize(), reporter)) { for (String[] line : csv.readAll()) { lineNo++; final EnumSet<LoadCsv.Results> results = EnumSet.of(LoadCsv.Results.map); final LoadCsv.CSVResult result = new LoadCsv.CSVResult( loadCsvCompatibleHeader, line, lineNo, false, mapping, Collections.emptyList(), results ); final Object startId = result.map.get(CsvLoaderConstants.START_ID_ATTR); final Object startInternalId = idMapping.get(startIdField.getIdSpace()).get(startId); if (startInternalId == null) { throw new IllegalStateException("Node for id space " + endIdField.getIdSpace() + " and id " + startId + " not found"); } final Node source = db.getNodeById((long) startInternalId); final Object endId = result.map.get(CsvLoaderConstants.END_ID_ATTR); final Object endInternalId = idMapping.get(endIdField.getIdSpace()).get(endId); if (endInternalId == null) { throw new IllegalStateException("Node for id space " + endIdField.getIdSpace() + " and id " + endId + " not found"); } final Node target = db.getNodeById((long) endInternalId); final String currentType; final Object overridingType = result.map.get(CsvLoaderConstants.TYPE_ATTR); if (overridingType != null && !((String) overridingType).isEmpty()) { currentType = (String) overridingType; } else { currentType = type; } final Relationship rel = source.createRelationshipTo(target, RelationshipType.withName(currentType)); // add properties int props = 0; for (CsvHeaderField field : edgePropertiesFields) { final String name = field.getName(); Object value = result.map.get(name); boolean propertyAdded = CsvPropertyConverter.addPropertyToGraphEntity(rel, field, value); props += propertyAdded ? 1 : 0; } reporter.update(0, 1, props); } } } private static String readFirstLine(CountingReader reader) throws IOException { String line = ""; int i; while ((i = reader.read()) != 0) { char c = (char) i; if (c == '\n') break; line += c; } return line; } }