code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { private Set<String> filterByExistingColumns(String namespace, String... columnsToCheck) { // Collect existing columns here. Set<String> columnsForTable = new HashSet<>(); // Check table metadata for presence of columns. Connection connection = null; try { connection = GTFSGraphQL.getConnection(); ResultSet columns = connection .getMetaData() .getColumns(null, namespace, tableName, null); while (columns.next()) { // Column name is in the 4th index String column = columns.getString(4); columnsForTable.add(column); } } catch (SQLException e) { e.printStackTrace(); } finally { DbUtils.closeQuietly(connection); } // Filter available columns in table by search columns. columnsForTable.retainAll(Arrays.asList(columnsToCheck)); return columnsForTable; } }
public class class_name { private Set<String> filterByExistingColumns(String namespace, String... columnsToCheck) { // Collect existing columns here. Set<String> columnsForTable = new HashSet<>(); // Check table metadata for presence of columns. Connection connection = null; try { connection = GTFSGraphQL.getConnection(); // depends on control dependency: [try], data = [none] ResultSet columns = connection .getMetaData() .getColumns(null, namespace, tableName, null); while (columns.next()) { // Column name is in the 4th index String column = columns.getString(4); columnsForTable.add(column); // depends on control dependency: [while], data = [none] } } catch (SQLException e) { e.printStackTrace(); } finally { // depends on control dependency: [catch], data = [none] DbUtils.closeQuietly(connection); } // Filter available columns in table by search columns. columnsForTable.retainAll(Arrays.asList(columnsToCheck)); return columnsForTable; } }
public class class_name { public void startup(boolean forceStartup) throws GreenPepperServerException { log.info("Starting Plugin"); if (!isPluginEnabled) return; final GreenPepperServerConfiguration configuration = getConfiguration(); if ((configuration.isSetupComplete() && !isServerStarted) || forceStartup) { isServerStarted = false; try { closeSession(); Properties properties = configuration.getProperties(); injectAdditionalProperties(properties); HibernateSessionService sessionService = new HibernateSessionService(properties); log.info("Boostrapping datas"); new BootstrapData(sessionService, properties).execute(); new GreenPepperUserGroup().createIfNeeded(); Authorizer authorizer = new OpenSourceAuthorizer(sessionService, properties); authorizer.initialize(GreenPepperServer.versionDate()); ProjectDao projectDao = new HibernateProjectDao(sessionService); RepositoryDao repositoryDao = new HibernateRepositoryDao(sessionService); SystemUnderTestDao sutDao = new HibernateSystemUnderTestDao(sessionService); DocumentDao documentDao = new HibernateDocumentDao(sessionService); Object object = ContainerManager.getComponent("greenPepperServerService"); GreenPepperServerServiceImpl service = (GreenPepperServerServiceImpl) object; service.setAuthorizer(authorizer); service.setDocumentDao(documentDao); service.setProjectDao(projectDao); service.setRepositoryDao(repositoryDao); service.setSessionService(sessionService); service.setSutDao(sutDao); object = ContainerManager.getComponent("greenPepperXmlRpcServerService"); GreenPepperXmlRpcServer xmlRpcServer = (GreenPepperXmlRpcServer) object; xmlRpcServer.setService(service); hibernateSessionService = sessionService; configuration.setSetupComplete(true); storeConfiguration(configuration); isServerStarted = true; } catch (Exception ex) { log.error("Starting up GreenPepper plugin", ex); throw new GreenPepperServerException(GreenPepperServerErrorKey.GENERAL_ERROR, ex); } } } }
public class class_name { public void startup(boolean forceStartup) throws GreenPepperServerException { log.info("Starting Plugin"); if (!isPluginEnabled) return; final GreenPepperServerConfiguration configuration = getConfiguration(); if ((configuration.isSetupComplete() && !isServerStarted) || forceStartup) { isServerStarted = false; try { closeSession(); // depends on control dependency: [try], data = [none] Properties properties = configuration.getProperties(); injectAdditionalProperties(properties); // depends on control dependency: [try], data = [none] HibernateSessionService sessionService = new HibernateSessionService(properties); log.info("Boostrapping datas"); // depends on control dependency: [try], data = [none] new BootstrapData(sessionService, properties).execute(); // depends on control dependency: [try], data = [none] new GreenPepperUserGroup().createIfNeeded(); // depends on control dependency: [try], data = [none] Authorizer authorizer = new OpenSourceAuthorizer(sessionService, properties); authorizer.initialize(GreenPepperServer.versionDate()); // depends on control dependency: [try], data = [none] ProjectDao projectDao = new HibernateProjectDao(sessionService); RepositoryDao repositoryDao = new HibernateRepositoryDao(sessionService); SystemUnderTestDao sutDao = new HibernateSystemUnderTestDao(sessionService); DocumentDao documentDao = new HibernateDocumentDao(sessionService); Object object = ContainerManager.getComponent("greenPepperServerService"); GreenPepperServerServiceImpl service = (GreenPepperServerServiceImpl) object; service.setAuthorizer(authorizer); // depends on control dependency: [try], data = [none] service.setDocumentDao(documentDao); // depends on control dependency: [try], data = [none] service.setProjectDao(projectDao); // depends on control dependency: [try], data = [none] service.setRepositoryDao(repositoryDao); // depends on control dependency: [try], data = [none] service.setSessionService(sessionService); // depends on control dependency: [try], data = [none] service.setSutDao(sutDao); // depends on control dependency: [try], data = [none] object = ContainerManager.getComponent("greenPepperXmlRpcServerService"); // depends on control dependency: [try], data = [none] GreenPepperXmlRpcServer xmlRpcServer = (GreenPepperXmlRpcServer) object; xmlRpcServer.setService(service); // depends on control dependency: [try], data = [none] hibernateSessionService = sessionService; // depends on control dependency: [try], data = [none] configuration.setSetupComplete(true); // depends on control dependency: [try], data = [none] storeConfiguration(configuration); // depends on control dependency: [try], data = [none] isServerStarted = true; // depends on control dependency: [try], data = [none] } catch (Exception ex) { log.error("Starting up GreenPepper plugin", ex); throw new GreenPepperServerException(GreenPepperServerErrorKey.GENERAL_ERROR, ex); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public String getFullyQualifiedNameForPropertyName(String propertyName) { if (!propertyNameToFullyQualifiedName.containsKey(propertyName)) { return propertyName; } return propertyNameToFullyQualifiedName.get(propertyName); } }
public class class_name { public String getFullyQualifiedNameForPropertyName(String propertyName) { if (!propertyNameToFullyQualifiedName.containsKey(propertyName)) { return propertyName; // depends on control dependency: [if], data = [none] } return propertyNameToFullyQualifiedName.get(propertyName); } }
public class class_name { private boolean hasNextLazy() { try { while (true) { switch (this.queryState) { case REGULAR: if (this.elements != null) { return true; } else { if (this.queryResult != null) { iterateRegularQueries(); this.first = false; } if (this.elements == null) { closePreparedStatement(); //try the next distinctQueryStack if (this.distinctQueriesIterator.hasNext()) { this.currentDistinctQueryStack = this.distinctQueriesIterator.next(); this.subQueryStacks = SchemaTableTree.splitIntoSubStacks(this.currentDistinctQueryStack); this.currentRootSchemaTableTree.resetColumnAliasMaps(); //if there are duplicates in the stack we can not execute drop steps. //execute the query as per normal and the proper DropStep will do the rest. if (this.currentDistinctQueryStack.getLast().isDrop() && !this.currentRootSchemaTableTree.duplicatesInStack(currentDistinctQueryStack)) { executeDropQuery(); } else { executeRegularQuery(); } this.first = true; } else { //try the next rootSchemaTableTree if (this.rootSchemaTableTreeIterator.hasNext()) { this.currentRootSchemaTableTree = this.rootSchemaTableTreeIterator.next(); this.distinctQueriesIterator = this.currentRootSchemaTableTree.constructDistinctQueries().iterator(); } else { if (this.currentRootSchemaTableTree != null) { this.currentRootSchemaTableTree.resetColumnAliasMaps(); } this.queryState = QUERY.OPTIONAL; this.rootSchemaTableTreeIterator = this.rootSchemaTableTrees.iterator(); break; } } } } break; case OPTIONAL: if (this.elements != null) { return true; } else { if (this.queryResult != null) { iterateOptionalQueries(); this.first = false; } if (this.elements == null) { closePreparedStatement(); //try the next distinctQueryStack if (this.optionalLeftJoinResultsIterator.hasNext()) { this.optionalCurrentLeftJoinResult = this.optionalLeftJoinResultsIterator.next(); this.subQueryStacks = SchemaTableTree.splitIntoSubStacks(this.optionalCurrentLeftJoinResult.getLeft()); this.currentRootSchemaTableTree.resetColumnAliasMaps(); executeOptionalQuery(); this.first = true; } else { //try the next rootSchemaTableTree if (this.rootSchemaTableTreeIterator.hasNext()) { this.currentRootSchemaTableTree = this.rootSchemaTableTreeIterator.next(); List<Pair<LinkedList<SchemaTableTree>, Set<SchemaTableTree>>> leftJoinResult = new ArrayList<>(); SchemaTableTree.constructDistinctOptionalQueries(this.currentRootSchemaTableTree, leftJoinResult); this.optionalLeftJoinResultsIterator = leftJoinResult.iterator(); } else { if (this.currentRootSchemaTableTree != null) { this.currentRootSchemaTableTree.resetColumnAliasMaps(); } this.queryState = QUERY.EMIT; this.rootSchemaTableTreeIterator = this.rootSchemaTableTrees.iterator(); break; } } } } break; case EMIT: if (this.elements != null) { return true; } else { if (this.queryResult != null) { iterateEmitQueries(); this.first = false; } if (this.elements == null) { closePreparedStatement(); //try the next distinctQueryStack if (this.emitLeftJoinResultsIterator.hasNext()) { this.emitCurrentLeftJoinResult = this.emitLeftJoinResultsIterator.next(); this.subQueryStacks = SchemaTableTree.splitIntoSubStacks(this.emitCurrentLeftJoinResult); this.currentRootSchemaTableTree.resetColumnAliasMaps(); executeEmitQuery(); this.first = true; } else { //try the next rootSchemaTableTree if (this.rootSchemaTableTreeIterator.hasNext()) { this.currentRootSchemaTableTree = this.rootSchemaTableTreeIterator.next(); List<LinkedList<SchemaTableTree>> leftJoinResult = new ArrayList<>(); SchemaTableTree.constructDistinctEmitBeforeQueries(this.currentRootSchemaTableTree, leftJoinResult); this.emitLeftJoinResultsIterator = leftJoinResult.iterator(); if (currentRootSchemaTableTree.isFakeEmit()) { List<Emit<SqlgElement>> fake = new ArrayList<>(); fake.add(new Emit<>()); this.elements = fake; this.currentRootSchemaTableTree.setFakeEmit(false); } } else { if (this.currentRootSchemaTableTree != null) { this.currentRootSchemaTableTree.resetColumnAliasMaps(); } return false; } } } } break; } } } catch (SQLException e) { throw new RuntimeException(e); } } }
public class class_name { private boolean hasNextLazy() { try { while (true) { switch (this.queryState) { case REGULAR: if (this.elements != null) { return true; // depends on control dependency: [if], data = [none] } else { if (this.queryResult != null) { iterateRegularQueries(); // depends on control dependency: [if], data = [none] this.first = false; // depends on control dependency: [if], data = [none] } if (this.elements == null) { closePreparedStatement(); // depends on control dependency: [if], data = [none] //try the next distinctQueryStack if (this.distinctQueriesIterator.hasNext()) { this.currentDistinctQueryStack = this.distinctQueriesIterator.next(); // depends on control dependency: [if], data = [none] this.subQueryStacks = SchemaTableTree.splitIntoSubStacks(this.currentDistinctQueryStack); // depends on control dependency: [if], data = [none] this.currentRootSchemaTableTree.resetColumnAliasMaps(); // depends on control dependency: [if], data = [none] //if there are duplicates in the stack we can not execute drop steps. //execute the query as per normal and the proper DropStep will do the rest. if (this.currentDistinctQueryStack.getLast().isDrop() && !this.currentRootSchemaTableTree.duplicatesInStack(currentDistinctQueryStack)) { executeDropQuery(); // depends on control dependency: [if], data = [none] } else { executeRegularQuery(); // depends on control dependency: [if], data = [none] } this.first = true; // depends on control dependency: [if], data = [none] } else { //try the next rootSchemaTableTree if (this.rootSchemaTableTreeIterator.hasNext()) { this.currentRootSchemaTableTree = this.rootSchemaTableTreeIterator.next(); // depends on control dependency: [if], data = [none] this.distinctQueriesIterator = this.currentRootSchemaTableTree.constructDistinctQueries().iterator(); // depends on control dependency: [if], data = [none] } else { if (this.currentRootSchemaTableTree != null) { this.currentRootSchemaTableTree.resetColumnAliasMaps(); // depends on control dependency: [if], data = [none] } this.queryState = QUERY.OPTIONAL; // depends on control dependency: [if], data = [none] this.rootSchemaTableTreeIterator = this.rootSchemaTableTrees.iterator(); // depends on control dependency: [if], data = [none] break; } } } } break; case OPTIONAL: if (this.elements != null) { return true; // depends on control dependency: [if], data = [none] } else { if (this.queryResult != null) { iterateOptionalQueries(); // depends on control dependency: [if], data = [none] this.first = false; // depends on control dependency: [if], data = [none] } if (this.elements == null) { closePreparedStatement(); // depends on control dependency: [if], data = [none] //try the next distinctQueryStack if (this.optionalLeftJoinResultsIterator.hasNext()) { this.optionalCurrentLeftJoinResult = this.optionalLeftJoinResultsIterator.next(); // depends on control dependency: [if], data = [none] this.subQueryStacks = SchemaTableTree.splitIntoSubStacks(this.optionalCurrentLeftJoinResult.getLeft()); // depends on control dependency: [if], data = [none] this.currentRootSchemaTableTree.resetColumnAliasMaps(); // depends on control dependency: [if], data = [none] executeOptionalQuery(); // depends on control dependency: [if], data = [none] this.first = true; // depends on control dependency: [if], data = [none] } else { //try the next rootSchemaTableTree if (this.rootSchemaTableTreeIterator.hasNext()) { this.currentRootSchemaTableTree = this.rootSchemaTableTreeIterator.next(); // depends on control dependency: [if], data = [none] List<Pair<LinkedList<SchemaTableTree>, Set<SchemaTableTree>>> leftJoinResult = new ArrayList<>(); SchemaTableTree.constructDistinctOptionalQueries(this.currentRootSchemaTableTree, leftJoinResult); // depends on control dependency: [if], data = [none] this.optionalLeftJoinResultsIterator = leftJoinResult.iterator(); // depends on control dependency: [if], data = [none] } else { if (this.currentRootSchemaTableTree != null) { this.currentRootSchemaTableTree.resetColumnAliasMaps(); // depends on control dependency: [if], data = [none] } this.queryState = QUERY.EMIT; // depends on control dependency: [if], data = [none] this.rootSchemaTableTreeIterator = this.rootSchemaTableTrees.iterator(); // depends on control dependency: [if], data = [none] break; } } } } break; case EMIT: if (this.elements != null) { return true; // depends on control dependency: [if], data = [none] } else { if (this.queryResult != null) { iterateEmitQueries(); // depends on control dependency: [if], data = [none] this.first = false; // depends on control dependency: [if], data = [none] } if (this.elements == null) { closePreparedStatement(); // depends on control dependency: [if], data = [none] //try the next distinctQueryStack if (this.emitLeftJoinResultsIterator.hasNext()) { this.emitCurrentLeftJoinResult = this.emitLeftJoinResultsIterator.next(); // depends on control dependency: [if], data = [none] this.subQueryStacks = SchemaTableTree.splitIntoSubStacks(this.emitCurrentLeftJoinResult); // depends on control dependency: [if], data = [none] this.currentRootSchemaTableTree.resetColumnAliasMaps(); // depends on control dependency: [if], data = [none] executeEmitQuery(); // depends on control dependency: [if], data = [none] this.first = true; // depends on control dependency: [if], data = [none] } else { //try the next rootSchemaTableTree if (this.rootSchemaTableTreeIterator.hasNext()) { this.currentRootSchemaTableTree = this.rootSchemaTableTreeIterator.next(); // depends on control dependency: [if], data = [none] List<LinkedList<SchemaTableTree>> leftJoinResult = new ArrayList<>(); SchemaTableTree.constructDistinctEmitBeforeQueries(this.currentRootSchemaTableTree, leftJoinResult); // depends on control dependency: [if], data = [none] this.emitLeftJoinResultsIterator = leftJoinResult.iterator(); // depends on control dependency: [if], data = [none] if (currentRootSchemaTableTree.isFakeEmit()) { List<Emit<SqlgElement>> fake = new ArrayList<>(); fake.add(new Emit<>()); // depends on control dependency: [if], data = [none] this.elements = fake; // depends on control dependency: [if], data = [none] this.currentRootSchemaTableTree.setFakeEmit(false); // depends on control dependency: [if], data = [none] } } else { if (this.currentRootSchemaTableTree != null) { this.currentRootSchemaTableTree.resetColumnAliasMaps(); // depends on control dependency: [if], data = [none] } return false; // depends on control dependency: [if], data = [none] } } } } break; } } } catch (SQLException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void cacheDescriptorValue(IAtom atom, IAtomContainer container, IDescriptorResult value) { if (cachedDescriptorValues == null) { cachedDescriptorValues = new HashMap(); cachedDescriptorValues.put(PREVIOUS_ATOMCONTAINER, container); } else if (cachedDescriptorValues.get(PREVIOUS_ATOMCONTAINER) != container) { cachedDescriptorValues.clear(); cachedDescriptorValues.put(PREVIOUS_ATOMCONTAINER, container); } cachedDescriptorValues.put(atom, value); } }
public class class_name { public void cacheDescriptorValue(IAtom atom, IAtomContainer container, IDescriptorResult value) { if (cachedDescriptorValues == null) { cachedDescriptorValues = new HashMap(); // depends on control dependency: [if], data = [none] cachedDescriptorValues.put(PREVIOUS_ATOMCONTAINER, container); // depends on control dependency: [if], data = [none] } else if (cachedDescriptorValues.get(PREVIOUS_ATOMCONTAINER) != container) { cachedDescriptorValues.clear(); // depends on control dependency: [if], data = [none] cachedDescriptorValues.put(PREVIOUS_ATOMCONTAINER, container); // depends on control dependency: [if], data = [container)] } cachedDescriptorValues.put(atom, value); } }
public class class_name { @Override protected boolean initiateClient() { for (String host : hosts) { if (host == null) { LOGGER.error("Host Name should not be null."); throw new IllegalArgumentException("Host Name should not be null."); } } cluster = CouchbaseCluster.create(hosts); if (userName != null && password != null) { clusterManager = cluster.clusterManager(userName, password); } else { clusterManager = cluster.clusterManager(); } return true; } }
public class class_name { @Override protected boolean initiateClient() { for (String host : hosts) { if (host == null) { LOGGER.error("Host Name should not be null."); // depends on control dependency: [if], data = [none] throw new IllegalArgumentException("Host Name should not be null."); } } cluster = CouchbaseCluster.create(hosts); if (userName != null && password != null) { clusterManager = cluster.clusterManager(userName, password); // depends on control dependency: [if], data = [(userName] } else { clusterManager = cluster.clusterManager(); // depends on control dependency: [if], data = [none] } return true; } }
public class class_name { public static File createTempFile(String prefix, String suffix) { String p = System.getProperty(ND4JSystemProperties.ND4J_TEMP_DIR_PROPERTY); try { if (p == null || p.isEmpty()) { return File.createTempFile(prefix, suffix); } else { return File.createTempFile(prefix, suffix, new File(p)); } } catch (IOException e){ throw new RuntimeException("Error creating temporary file", e); } } }
public class class_name { public static File createTempFile(String prefix, String suffix) { String p = System.getProperty(ND4JSystemProperties.ND4J_TEMP_DIR_PROPERTY); try { if (p == null || p.isEmpty()) { return File.createTempFile(prefix, suffix); // depends on control dependency: [if], data = [(p] } else { return File.createTempFile(prefix, suffix, new File(p)); // depends on control dependency: [if], data = [(p] } } catch (IOException e){ throw new RuntimeException("Error creating temporary file", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static Bundle _loadBundle(BundleContext context, String path, InputStream is, boolean closeStream) throws BundleException { log(Log.LEVEL_INFO, "add bundle:" + path); try { // we make this very simply so an old loader that is calling this still works return context.installBundle(path, is); } finally { // we make this very simply so an old loader that is calling this still works if (closeStream && is != null) { try { is.close(); } catch (Throwable t) { ExceptionUtil.rethrowIfNecessary(t); } } } } }
public class class_name { private static Bundle _loadBundle(BundleContext context, String path, InputStream is, boolean closeStream) throws BundleException { log(Log.LEVEL_INFO, "add bundle:" + path); try { // we make this very simply so an old loader that is calling this still works return context.installBundle(path, is); } finally { // we make this very simply so an old loader that is calling this still works if (closeStream && is != null) { try { is.close(); // depends on control dependency: [try], data = [none] } catch (Throwable t) { ExceptionUtil.rethrowIfNecessary(t); } // depends on control dependency: [catch], data = [none] } } } }
public class class_name { private void withHashCode() { JMethod hashCode = this.pojo.method(JMod.PUBLIC, int.class, "hashCode"); Class<?> hashCodeBuilderClass = org.apache.commons.lang3.builder.HashCodeBuilder.class; if (!Config.getPojoConfig().isUseCommonsLang3()) { hashCodeBuilderClass = org.apache.commons.lang.builder.HashCodeBuilder.class; } JClass hashCodeBuilderRef = this.pojo.owner().ref(hashCodeBuilderClass); JInvocation hashCodeBuilderInvocation = appendFieldsToHashCode(getNonTransientAndNonStaticFields(), hashCodeBuilderRef); hashCode.body()._return(hashCodeBuilderInvocation.invoke("toHashCode")); } }
public class class_name { private void withHashCode() { JMethod hashCode = this.pojo.method(JMod.PUBLIC, int.class, "hashCode"); Class<?> hashCodeBuilderClass = org.apache.commons.lang3.builder.HashCodeBuilder.class; if (!Config.getPojoConfig().isUseCommonsLang3()) { hashCodeBuilderClass = org.apache.commons.lang.builder.HashCodeBuilder.class; // depends on control dependency: [if], data = [none] } JClass hashCodeBuilderRef = this.pojo.owner().ref(hashCodeBuilderClass); JInvocation hashCodeBuilderInvocation = appendFieldsToHashCode(getNonTransientAndNonStaticFields(), hashCodeBuilderRef); hashCode.body()._return(hashCodeBuilderInvocation.invoke("toHashCode")); } }
public class class_name { public static String extension(String file) { if (StringUtils.isNullOrBlank(file)) { return StringUtils.EMPTY; } file = StringUtils.trim(file); int index = indexOfFileExtension(file); if (index == -1) { return StringUtils.EMPTY; } return file.substring(index + 1); } }
public class class_name { public static String extension(String file) { if (StringUtils.isNullOrBlank(file)) { return StringUtils.EMPTY; // depends on control dependency: [if], data = [none] } file = StringUtils.trim(file); int index = indexOfFileExtension(file); if (index == -1) { return StringUtils.EMPTY; // depends on control dependency: [if], data = [none] } return file.substring(index + 1); } }
public class class_name { public Map<String, String> decode(byte[] bytes) throws DeserializationException { Map<String, String> map = new HashMap<String, String>(); if (bytes == null || bytes.length == 0) { return map; } UnsafeByteArrayInputStream in = new UnsafeByteArrayInputStream(bytes); try { while (in.available() > 0) { String key = readString(in); String value = readString(in); map.put(key, value); } return map; } catch (IOException ex) { throw new DeserializationException(ex.getMessage(), ex); } } }
public class class_name { public Map<String, String> decode(byte[] bytes) throws DeserializationException { Map<String, String> map = new HashMap<String, String>(); if (bytes == null || bytes.length == 0) { return map; } UnsafeByteArrayInputStream in = new UnsafeByteArrayInputStream(bytes); try { while (in.available() > 0) { String key = readString(in); String value = readString(in); map.put(key, value); // depends on control dependency: [while], data = [none] } return map; } catch (IOException ex) { throw new DeserializationException(ex.getMessage(), ex); } } }
public class class_name { private int countChar(String string, char c) { int count = 0; final int max = string.length(); for (int i = 0; i < max; i++) { if (c == string.charAt(i)) { count++; } } return count; } }
public class class_name { private int countChar(String string, char c) { int count = 0; final int max = string.length(); for (int i = 0; i < max; i++) { if (c == string.charAt(i)) { count++; // depends on control dependency: [if], data = [none] } } return count; } }
public class class_name { @Override // DataNodeMXBean public String getNamenodeAddresses() { final Map<String, Integer> info = new HashMap<String, Integer>(); for (NamespaceService ns : namespaceManager.getAllNamenodeThreads()) { if (ns != null && ns.initialized()) { info.put(ns.getNNSocketAddress().getAddress().getHostAddress(), ns.getNamespaceId()); } } return JSON.toString(info); } }
public class class_name { @Override // DataNodeMXBean public String getNamenodeAddresses() { final Map<String, Integer> info = new HashMap<String, Integer>(); for (NamespaceService ns : namespaceManager.getAllNamenodeThreads()) { if (ns != null && ns.initialized()) { info.put(ns.getNNSocketAddress().getAddress().getHostAddress(), ns.getNamespaceId()); // depends on control dependency: [if], data = [(ns] } } return JSON.toString(info); } }
public class class_name { protected Long getProcessDefinedPriority(ProcessDefinitionImpl processDefinition, String propertyKey, ExecutionEntity execution, String errorMsgHead) { if (processDefinition != null) { ParameterValueProvider priorityProvider = (ParameterValueProvider) processDefinition.getProperty(propertyKey); if (priorityProvider != null) { return evaluateValueProvider(priorityProvider, execution, errorMsgHead); } } return null; } }
public class class_name { protected Long getProcessDefinedPriority(ProcessDefinitionImpl processDefinition, String propertyKey, ExecutionEntity execution, String errorMsgHead) { if (processDefinition != null) { ParameterValueProvider priorityProvider = (ParameterValueProvider) processDefinition.getProperty(propertyKey); if (priorityProvider != null) { return evaluateValueProvider(priorityProvider, execution, errorMsgHead); // depends on control dependency: [if], data = [(priorityProvider] } } return null; } }
public class class_name { public void startup(String container, String deployPath, String contextPath) { StandardLogger logger = null; try { long before = System.currentTimeMillis(); System.out.println("MDW initialization..."); System.out.println(" deployPath: " + deployPath); System.out.println(" contextPath: " + contextPath); logger = LoggerUtil.getStandardLogger(); ApplicationContext.setDeployPath(deployPath); // initialize ApplicationContext logger.info("Initialize " + ApplicationContext.class.getName()); ApplicationContext.onStartup(container, null); // initialize db access and set database time try { DatabaseAccess db = new DatabaseAccess(null); db.checkAndUpgradeSchema(); // set db time difference so that later call does not go to db long dbtime = db.getDatabaseTime(); System.out.println("Database time: " + StringHelper.dateToString(new Date(dbtime))); // automatically update the ASSET_REF table as a safety check DataAccess.updateAssetRefs(); } catch (Exception e) { throw new StartupException("Failed to connect through database connection pool", e); } String v = PropertyManager.getProperty(PropertyNames.MDW_DB_VERSION_SUPPORTED); if (v!=null) DataAccess.supportedSchemaVersion = Integer.parseInt(v); logger.info("Initialize " + CacheRegistration.class.getName()); (new CacheRegistration()).onStartup(); DatabaseAccess.initDocumentDb(); logger.info("Starting Thread Pool"); threadPool = ApplicationContext.getThreadPoolProvider(); threadPool.start(); MessengerFactory.init(contextPath); logger.info("Initialize " + RMIListener.class.getName()); try { listener = new RMIListenerImpl(threadPool); ApplicationContext.getNamingProvider().bind(RMIListener.JNDI_NAME, listener); } catch (Exception e) { throw new StartupException("Failed to start RMI listener", e); } if (MessengerFactory.internalMessageUsingJms()) { internalEventListener = new InternalEventListener(threadPool); internalEventListener.start(); } if (ApplicationContext.getJmsProvider() != null) { intraMdwEventListener = new ExternalEventListener(JMSDestinationNames.INTRA_MDW_EVENT_HANDLER_QUEUE, threadPool); intraMdwEventListener.start(); externalEventListener = new ExternalEventListener(JMSDestinationNames.EXTERNAL_EVENT_HANDLER_QUEUE, threadPool); externalEventListener.start(); configurationEventListener = new ConfigurationEventListener(); configurationEventListener.start(); } logger.info("Initialize " + TimerTaskRegistration.class.getName()); (new TimerTaskRegistration()).onStartup(); List<StartupService> startupServices = StartupRegistry.getInstance().getDynamicStartupServices(); for (StartupService startupService : startupServices) { if (startupService.isEnabled()) { logger.info("Running startup service " + startupService.getClass()); startupService.onStartup(); } } logger.info("Initialize " + AssetImportMonitor.class.getName()); (new AssetImportMonitor()).onStartup(); logger.info("Initialize " + UserGroupMonitor.class.getName()); (new UserGroupMonitor()).onStartup(); logger.info("MDW initialization completed after " + (System.currentTimeMillis() - before) + " ms"); } catch (Exception e) { e.printStackTrace(); if (logger != null) logger.severeException(e.getMessage(), e); System.out.println("Starting up MDW failed, shut down now - " + e.getMessage()); shutdown(); } } }
public class class_name { public void startup(String container, String deployPath, String contextPath) { StandardLogger logger = null; try { long before = System.currentTimeMillis(); System.out.println("MDW initialization..."); // depends on control dependency: [try], data = [none] System.out.println(" deployPath: " + deployPath); // depends on control dependency: [try], data = [none] System.out.println(" contextPath: " + contextPath); // depends on control dependency: [try], data = [none] logger = LoggerUtil.getStandardLogger(); // depends on control dependency: [try], data = [none] ApplicationContext.setDeployPath(deployPath); // depends on control dependency: [try], data = [none] // initialize ApplicationContext logger.info("Initialize " + ApplicationContext.class.getName()); // depends on control dependency: [try], data = [none] ApplicationContext.onStartup(container, null); // depends on control dependency: [try], data = [none] // initialize db access and set database time try { DatabaseAccess db = new DatabaseAccess(null); db.checkAndUpgradeSchema(); // depends on control dependency: [try], data = [none] // set db time difference so that later call does not go to db long dbtime = db.getDatabaseTime(); System.out.println("Database time: " + StringHelper.dateToString(new Date(dbtime))); // depends on control dependency: [try], data = [none] // automatically update the ASSET_REF table as a safety check DataAccess.updateAssetRefs(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new StartupException("Failed to connect through database connection pool", e); } // depends on control dependency: [catch], data = [none] String v = PropertyManager.getProperty(PropertyNames.MDW_DB_VERSION_SUPPORTED); if (v!=null) DataAccess.supportedSchemaVersion = Integer.parseInt(v); logger.info("Initialize " + CacheRegistration.class.getName()); // depends on control dependency: [try], data = [none] (new CacheRegistration()).onStartup(); // depends on control dependency: [try], data = [none] DatabaseAccess.initDocumentDb(); // depends on control dependency: [try], data = [none] logger.info("Starting Thread Pool"); // depends on control dependency: [try], data = [none] threadPool = ApplicationContext.getThreadPoolProvider(); // depends on control dependency: [try], data = [none] threadPool.start(); // depends on control dependency: [try], data = [none] MessengerFactory.init(contextPath); // depends on control dependency: [try], data = [none] logger.info("Initialize " + RMIListener.class.getName()); // depends on control dependency: [try], data = [none] try { listener = new RMIListenerImpl(threadPool); // depends on control dependency: [try], data = [none] ApplicationContext.getNamingProvider().bind(RMIListener.JNDI_NAME, listener); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new StartupException("Failed to start RMI listener", e); } // depends on control dependency: [catch], data = [none] if (MessengerFactory.internalMessageUsingJms()) { internalEventListener = new InternalEventListener(threadPool); // depends on control dependency: [if], data = [none] internalEventListener.start(); // depends on control dependency: [if], data = [none] } if (ApplicationContext.getJmsProvider() != null) { intraMdwEventListener = new ExternalEventListener(JMSDestinationNames.INTRA_MDW_EVENT_HANDLER_QUEUE, threadPool); // depends on control dependency: [if], data = [none] intraMdwEventListener.start(); // depends on control dependency: [if], data = [none] externalEventListener = new ExternalEventListener(JMSDestinationNames.EXTERNAL_EVENT_HANDLER_QUEUE, threadPool); // depends on control dependency: [if], data = [none] externalEventListener.start(); // depends on control dependency: [if], data = [none] configurationEventListener = new ConfigurationEventListener(); // depends on control dependency: [if], data = [none] configurationEventListener.start(); // depends on control dependency: [if], data = [none] } logger.info("Initialize " + TimerTaskRegistration.class.getName()); // depends on control dependency: [try], data = [none] (new TimerTaskRegistration()).onStartup(); // depends on control dependency: [try], data = [none] List<StartupService> startupServices = StartupRegistry.getInstance().getDynamicStartupServices(); for (StartupService startupService : startupServices) { if (startupService.isEnabled()) { logger.info("Running startup service " + startupService.getClass()); // depends on control dependency: [if], data = [none] startupService.onStartup(); // depends on control dependency: [if], data = [none] } } logger.info("Initialize " + AssetImportMonitor.class.getName()); // depends on control dependency: [try], data = [none] (new AssetImportMonitor()).onStartup(); // depends on control dependency: [try], data = [none] logger.info("Initialize " + UserGroupMonitor.class.getName()); // depends on control dependency: [try], data = [none] (new UserGroupMonitor()).onStartup(); // depends on control dependency: [try], data = [none] logger.info("MDW initialization completed after " + (System.currentTimeMillis() - before) + " ms"); // depends on control dependency: [try], data = [none] } catch (Exception e) { e.printStackTrace(); if (logger != null) logger.severeException(e.getMessage(), e); System.out.println("Starting up MDW failed, shut down now - " + e.getMessage()); shutdown(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void measureComponents() { // TODO This would be a good place to check that all settings have sensible values or throw // illegal state exception. // TODO Put this somewhere so it only gets created once. BufferedImage chartImage = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB); Graphics2D tempGraphics = chartImage.createGraphics(); // Calculate title dimensions. if (title != null) { tempGraphics.setFont(titleFont); FontMetrics metrics = tempGraphics.getFontMetrics(); titleSize = new Dimension(metrics.stringWidth(title), metrics.getHeight()); titleAscent = metrics.getAscent(); } else { titleSize = new Dimension(0, 0); } // Calculate x-axis label dimensions. if (xAxisLabel != null) { tempGraphics.setFont(axisLabelsFont); FontMetrics metrics = tempGraphics.getFontMetrics(); xAxisLabelSize = new Dimension(metrics.stringWidth(xAxisLabel), metrics.getHeight()); xAxisLabelDescent = metrics.getDescent(); } else { xAxisLabelSize = new Dimension(0, 0); } // Calculate y-axis label dimensions. if (yAxisLabel != null) { tempGraphics.setFont(axisLabelsFont); FontMetrics metrics = tempGraphics.getFontMetrics(); yAxisLabelSize = new Dimension(metrics.stringWidth(yAxisLabel), metrics.getHeight()); yAxisLabelAscent = metrics.getAscent(); } else { yAxisLabelSize = new Dimension(0, 0); } // Calculate x-axis value dimensions. if (showXAxisValues) { tempGraphics.setFont(axisValuesFont); FontMetrics metrics = tempGraphics.getFontMetrics(); xAxisValuesHeight = metrics.getHeight(); xAxisValuesWidthMax = 0; for (Object o : xValues) { int w = metrics.stringWidth(o.toString()); if (w > xAxisValuesWidthMax) { xAxisValuesWidthMax = w; } } } else { xAxisValuesHeight = 0; } // Calculate y-axis value dimensions. if (showYAxisValues) { tempGraphics.setFont(axisValuesFont); FontMetrics metrics = tempGraphics.getFontMetrics(); yAxisValuesHeight = metrics.getHeight(); yAxisValuesAscent = metrics.getAscent(); yAxisValuesWidthMax = 0; for (Object o : yValues) { int w = metrics.stringWidth(o.toString()); if (w > yAxisValuesWidthMax) { yAxisValuesWidthMax = w; } } } else { yAxisValuesHeight = 0; } // Calculate heatmap dimensions. int heatMapWidth = (zValues[0].length * cellSize.width); int heatMapHeight = (zValues.length * cellSize.height); heatMapSize = new Dimension(heatMapWidth, heatMapHeight); int yValuesHorizontalSize = 0; if (yValuesHorizontal) { yValuesHorizontalSize = yAxisValuesWidthMax; } else { yValuesHorizontalSize = yAxisValuesHeight; } int xValuesVerticalSize = 0; if (xValuesHorizontal) { xValuesVerticalSize = xAxisValuesHeight; } else { xValuesVerticalSize = xAxisValuesWidthMax; } // Calculate chart dimensions. int chartWidth = heatMapWidth + (2 * margin) + yAxisLabelSize.height + yValuesHorizontalSize + axisThickness; int chartHeight = heatMapHeight + (2 * margin) + xAxisLabelSize.height + xValuesVerticalSize + titleSize.height + axisThickness; chartSize = new Dimension(chartWidth, chartHeight); } }
public class class_name { private void measureComponents() { // TODO This would be a good place to check that all settings have sensible values or throw // illegal state exception. // TODO Put this somewhere so it only gets created once. BufferedImage chartImage = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB); Graphics2D tempGraphics = chartImage.createGraphics(); // Calculate title dimensions. if (title != null) { tempGraphics.setFont(titleFont); // depends on control dependency: [if], data = [(title] FontMetrics metrics = tempGraphics.getFontMetrics(); titleSize = new Dimension(metrics.stringWidth(title), metrics.getHeight()); // depends on control dependency: [if], data = [(title] titleAscent = metrics.getAscent(); // depends on control dependency: [if], data = [none] } else { titleSize = new Dimension(0, 0); // depends on control dependency: [if], data = [none] } // Calculate x-axis label dimensions. if (xAxisLabel != null) { tempGraphics.setFont(axisLabelsFont); // depends on control dependency: [if], data = [none] FontMetrics metrics = tempGraphics.getFontMetrics(); xAxisLabelSize = new Dimension(metrics.stringWidth(xAxisLabel), metrics.getHeight()); // depends on control dependency: [if], data = [(xAxisLabel] xAxisLabelDescent = metrics.getDescent(); // depends on control dependency: [if], data = [none] } else { xAxisLabelSize = new Dimension(0, 0); // depends on control dependency: [if], data = [none] } // Calculate y-axis label dimensions. if (yAxisLabel != null) { tempGraphics.setFont(axisLabelsFont); // depends on control dependency: [if], data = [none] FontMetrics metrics = tempGraphics.getFontMetrics(); yAxisLabelSize = new Dimension(metrics.stringWidth(yAxisLabel), metrics.getHeight()); // depends on control dependency: [if], data = [(yAxisLabel] yAxisLabelAscent = metrics.getAscent(); // depends on control dependency: [if], data = [none] } else { yAxisLabelSize = new Dimension(0, 0); // depends on control dependency: [if], data = [none] } // Calculate x-axis value dimensions. if (showXAxisValues) { tempGraphics.setFont(axisValuesFont); // depends on control dependency: [if], data = [none] FontMetrics metrics = tempGraphics.getFontMetrics(); xAxisValuesHeight = metrics.getHeight(); // depends on control dependency: [if], data = [none] xAxisValuesWidthMax = 0; // depends on control dependency: [if], data = [none] for (Object o : xValues) { int w = metrics.stringWidth(o.toString()); if (w > xAxisValuesWidthMax) { xAxisValuesWidthMax = w; // depends on control dependency: [if], data = [none] } } } else { xAxisValuesHeight = 0; // depends on control dependency: [if], data = [none] } // Calculate y-axis value dimensions. if (showYAxisValues) { tempGraphics.setFont(axisValuesFont); // depends on control dependency: [if], data = [none] FontMetrics metrics = tempGraphics.getFontMetrics(); yAxisValuesHeight = metrics.getHeight(); // depends on control dependency: [if], data = [none] yAxisValuesAscent = metrics.getAscent(); // depends on control dependency: [if], data = [none] yAxisValuesWidthMax = 0; // depends on control dependency: [if], data = [none] for (Object o : yValues) { int w = metrics.stringWidth(o.toString()); if (w > yAxisValuesWidthMax) { yAxisValuesWidthMax = w; // depends on control dependency: [if], data = [none] } } } else { yAxisValuesHeight = 0; // depends on control dependency: [if], data = [none] } // Calculate heatmap dimensions. int heatMapWidth = (zValues[0].length * cellSize.width); int heatMapHeight = (zValues.length * cellSize.height); heatMapSize = new Dimension(heatMapWidth, heatMapHeight); int yValuesHorizontalSize = 0; if (yValuesHorizontal) { yValuesHorizontalSize = yAxisValuesWidthMax; // depends on control dependency: [if], data = [none] } else { yValuesHorizontalSize = yAxisValuesHeight; // depends on control dependency: [if], data = [none] } int xValuesVerticalSize = 0; if (xValuesHorizontal) { xValuesVerticalSize = xAxisValuesHeight; // depends on control dependency: [if], data = [none] } else { xValuesVerticalSize = xAxisValuesWidthMax; // depends on control dependency: [if], data = [none] } // Calculate chart dimensions. int chartWidth = heatMapWidth + (2 * margin) + yAxisLabelSize.height + yValuesHorizontalSize + axisThickness; int chartHeight = heatMapHeight + (2 * margin) + xAxisLabelSize.height + xValuesVerticalSize + titleSize.height + axisThickness; chartSize = new Dimension(chartWidth, chartHeight); } }
public class class_name { public HalResource setLink(String relation, Link link) { if (link == null) { return this; } return addResources(HalResourceType.LINKS, relation, false, new Link[] { link }); } }
public class class_name { public HalResource setLink(String relation, Link link) { if (link == null) { return this; // depends on control dependency: [if], data = [none] } return addResources(HalResourceType.LINKS, relation, false, new Link[] { link }); } }
public class class_name { static public File computeBinDir(File installDir) { if( null != installDir ) { // Command-line package File binDir = new File(installDir, "bin"); if( binDir.exists() && binDir.isDirectory() ) { return binDir; } // Development environment File nunaliit2Dir = computeNunaliitDir(installDir); binDir = new File(nunaliit2Dir, "nunaliit2-couch-sdk/target/appassembler/bin"); if( binDir.exists() && binDir.isDirectory() ) { return binDir; } } return null; } }
public class class_name { static public File computeBinDir(File installDir) { if( null != installDir ) { // Command-line package File binDir = new File(installDir, "bin"); if( binDir.exists() && binDir.isDirectory() ) { return binDir; // depends on control dependency: [if], data = [none] } // Development environment File nunaliit2Dir = computeNunaliitDir(installDir); binDir = new File(nunaliit2Dir, "nunaliit2-couch-sdk/target/appassembler/bin"); // depends on control dependency: [if], data = [none] if( binDir.exists() && binDir.isDirectory() ) { return binDir; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { @SuppressWarnings("unchecked") protected StorableProperty<S>[] gatherAllDataProperties() { Map<String, ? extends StorableProperty<S>> map = StorableIntrospector.examine(mType).getDataProperties(); List<StorableProperty<S>> list = new ArrayList<StorableProperty<S>>(map.size()); for (StorableProperty<S> property : map.values()) { if (!property.isDerived()) { list.add(property); } } return list.toArray(new StorableProperty[list.size()]); } }
public class class_name { @SuppressWarnings("unchecked") protected StorableProperty<S>[] gatherAllDataProperties() { Map<String, ? extends StorableProperty<S>> map = StorableIntrospector.examine(mType).getDataProperties(); List<StorableProperty<S>> list = new ArrayList<StorableProperty<S>>(map.size()); for (StorableProperty<S> property : map.values()) { if (!property.isDerived()) { list.add(property); // depends on control dependency: [if], data = [none] } } return list.toArray(new StorableProperty[list.size()]); } }
public class class_name { private void constructContentMapper() { // Initialize the request resolver String contentMapperClassName = getParam(INIT_CONTENT_MAPPER_CLASS_NAME); Logger.getLogger(getClass()).debug("WebDAV Servlet content mapper class name = " + contentMapperClassName); if (contentMapperClassName == null) { this.contentMapper = new DefaultContentMapper(); } else { try { Class<? extends ContentMapper> clazz = Class.forName(contentMapperClassName).asSubclass(ContentMapper.class); this.contentMapper = clazz.newInstance(); } catch (Exception ex) { throw new IllegalStateException(ex); } } Logger.getLogger(getClass()).debug("WebDAV Servlet using content mapper class = " + contentMapper.getClass().getName()); this.contentMapper.initialize(getServletContext()); } }
public class class_name { private void constructContentMapper() { // Initialize the request resolver String contentMapperClassName = getParam(INIT_CONTENT_MAPPER_CLASS_NAME); Logger.getLogger(getClass()).debug("WebDAV Servlet content mapper class name = " + contentMapperClassName); if (contentMapperClassName == null) { this.contentMapper = new DefaultContentMapper(); // depends on control dependency: [if], data = [none] } else { try { Class<? extends ContentMapper> clazz = Class.forName(contentMapperClassName).asSubclass(ContentMapper.class); this.contentMapper = clazz.newInstance(); // depends on control dependency: [try], data = [none] } catch (Exception ex) { throw new IllegalStateException(ex); } // depends on control dependency: [catch], data = [none] } Logger.getLogger(getClass()).debug("WebDAV Servlet using content mapper class = " + contentMapper.getClass().getName()); this.contentMapper.initialize(getServletContext()); } }
public class class_name { public static String hostNameOfDeployment(final WarMetaData metaData, final String defaultHost) { Collection<String> hostNames = null; if (metaData.getMergedJBossWebMetaData() != null) { hostNames = metaData.getMergedJBossWebMetaData().getVirtualHosts(); } if (hostNames == null || hostNames.isEmpty()) { hostNames = Collections.singleton(defaultHost); } String hostName = hostNames.iterator().next(); if (hostName == null) { throw ROOT_LOGGER.nullHostName(); } return hostName; } }
public class class_name { public static String hostNameOfDeployment(final WarMetaData metaData, final String defaultHost) { Collection<String> hostNames = null; if (metaData.getMergedJBossWebMetaData() != null) { hostNames = metaData.getMergedJBossWebMetaData().getVirtualHosts(); // depends on control dependency: [if], data = [none] } if (hostNames == null || hostNames.isEmpty()) { hostNames = Collections.singleton(defaultHost); // depends on control dependency: [if], data = [none] } String hostName = hostNames.iterator().next(); if (hostName == null) { throw ROOT_LOGGER.nullHostName(); } return hostName; } }
public class class_name { @Override protected void onSharedObject(RTMPConnection conn, Channel channel, Header source, SharedObjectMessage object) { log.trace("onSharedObject"); ClientSharedObject so = sharedObjects.get(object.getName()); if (so != null) { if (so.isPersistent() == object.isPersistent()) { log.debug("Received SO request: {}", object); so.dispatchEvent(object); } else { log.error("Ignoring request for wrong-persistent SO: {}", object); } } else { log.error("Ignoring request for non-existend SO: {}", object); } } }
public class class_name { @Override protected void onSharedObject(RTMPConnection conn, Channel channel, Header source, SharedObjectMessage object) { log.trace("onSharedObject"); ClientSharedObject so = sharedObjects.get(object.getName()); if (so != null) { if (so.isPersistent() == object.isPersistent()) { log.debug("Received SO request: {}", object); // depends on control dependency: [if], data = [none] so.dispatchEvent(object); // depends on control dependency: [if], data = [none] } else { log.error("Ignoring request for wrong-persistent SO: {}", object); // depends on control dependency: [if], data = [none] } } else { log.error("Ignoring request for non-existend SO: {}", object); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void listFilesRecursive(final List<Class> classes, final File base, final ClassLoader cld, final String pckgname) { base.listFiles(new FileFilter() { public boolean accept(File file) { if (file.isDirectory()) { listFilesRecursive(classes, file, cld, pckgname + "." + file.getName()); return false; } if (!file.getName().toLowerCase().endsWith(".class")) { return false; } String className = filenameToClassname(pckgname + "." + file.getName()); loadClass(classes, cld, className); return false; } }); } }
public class class_name { protected void listFilesRecursive(final List<Class> classes, final File base, final ClassLoader cld, final String pckgname) { base.listFiles(new FileFilter() { public boolean accept(File file) { if (file.isDirectory()) { listFilesRecursive(classes, file, cld, pckgname + "." + file.getName()); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } if (!file.getName().toLowerCase().endsWith(".class")) { return false; // depends on control dependency: [if], data = [none] } String className = filenameToClassname(pckgname + "." + file.getName()); loadClass(classes, cld, className); return false; } }); } }
public class class_name { private void cacheLinkedObject(String owningObjID, String linkFieldName, DBObject linkedObject) { // Find or create map for the owning object. Map<String, Map<String, DBObject>> objMap = m_linkedObjectMap.get(owningObjID); if (objMap == null) { objMap = new HashMap<String, Map<String, DBObject>>(); m_linkedObjectMap.put(owningObjID, objMap); } // Find or create map for the link field. Map<String, DBObject> linkMap = objMap.get(linkFieldName); if (linkMap == null) { linkMap = new HashMap<String, DBObject>(); objMap.put(linkFieldName, linkMap); } // Add the object to the link map. linkMap.put(linkedObject.getObjectID(), linkedObject); } }
public class class_name { private void cacheLinkedObject(String owningObjID, String linkFieldName, DBObject linkedObject) { // Find or create map for the owning object. Map<String, Map<String, DBObject>> objMap = m_linkedObjectMap.get(owningObjID); if (objMap == null) { objMap = new HashMap<String, Map<String, DBObject>>(); // depends on control dependency: [if], data = [none] m_linkedObjectMap.put(owningObjID, objMap); // depends on control dependency: [if], data = [none] } // Find or create map for the link field. Map<String, DBObject> linkMap = objMap.get(linkFieldName); if (linkMap == null) { linkMap = new HashMap<String, DBObject>(); // depends on control dependency: [if], data = [none] objMap.put(linkFieldName, linkMap); // depends on control dependency: [if], data = [none] } // Add the object to the link map. linkMap.put(linkedObject.getObjectID(), linkedObject); } }
public class class_name { public static Cookie findCookie(HttpServletRequest request, String name) { if (request != null) { Cookie[] cookies = request.getCookies(); if (cookies != null && cookies.length > 0) { for (Cookie cookie : cookies) { if (cookie.getName().equals(name)) { return cookie; } } } } return null; } }
public class class_name { public static Cookie findCookie(HttpServletRequest request, String name) { if (request != null) { Cookie[] cookies = request.getCookies(); if (cookies != null && cookies.length > 0) { for (Cookie cookie : cookies) { if (cookie.getName().equals(name)) { return cookie; // depends on control dependency: [if], data = [none] } } } } return null; } }
public class class_name { public static @NotNull String externalizeUrlWithoutMapping(@NotNull String url, @Nullable SlingHttpServletRequest request) { // apply externalization only path part String path = url; // split off query string or fragment that may be appended to the URL String urlRemainder = null; int urlRemainderPos = StringUtils.indexOfAny(path, '?', '#'); if (urlRemainderPos >= 0) { urlRemainder = path.substring(urlRemainderPos); path = path.substring(0, urlRemainderPos); } // apply namespace mangling (e.g. replace jcr: with _jcr_) path = mangleNamespaces(path); // add webapp context path if (request != null) { path = StringUtils.defaultString(request.getContextPath()) + path; //NOPMD } // url-encode path path = Escape.urlEncode(path); path = StringUtils.replace(path, "+", "%20"); // replace %2F back to / for better readability path = StringUtils.replace(path, "%2F", "/"); // build full URL again return path + (urlRemainder != null ? urlRemainder : ""); } }
public class class_name { public static @NotNull String externalizeUrlWithoutMapping(@NotNull String url, @Nullable SlingHttpServletRequest request) { // apply externalization only path part String path = url; // split off query string or fragment that may be appended to the URL String urlRemainder = null; int urlRemainderPos = StringUtils.indexOfAny(path, '?', '#'); if (urlRemainderPos >= 0) { urlRemainder = path.substring(urlRemainderPos); // depends on control dependency: [if], data = [(urlRemainderPos] path = path.substring(0, urlRemainderPos); // depends on control dependency: [if], data = [none] } // apply namespace mangling (e.g. replace jcr: with _jcr_) path = mangleNamespaces(path); // add webapp context path if (request != null) { path = StringUtils.defaultString(request.getContextPath()) + path; //NOPMD // depends on control dependency: [if], data = [(request] } // url-encode path path = Escape.urlEncode(path); path = StringUtils.replace(path, "+", "%20"); // replace %2F back to / for better readability path = StringUtils.replace(path, "%2F", "/"); // build full URL again return path + (urlRemainder != null ? urlRemainder : ""); } }
public class class_name { public boolean addDynamicRequire(String require) { if (!dynamicRequires.contains(require)) { dynamicRequires.add(require); return true; } return false; } }
public class class_name { public boolean addDynamicRequire(String require) { if (!dynamicRequires.contains(require)) { dynamicRequires.add(require); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { protected void loadImage(Image image) { MediaTracker mTracker = getTracker(); synchronized(mTracker) { int id = getNextID(); mTracker.addImage(image, id); try { mTracker.waitForID(id, 0); } catch (InterruptedException e) { System.out.println("INTERRUPTED while loading Image"); } //?int loadStatus = mTracker.statusID(id, false); mTracker.removeImage(image, id); } } }
public class class_name { protected void loadImage(Image image) { MediaTracker mTracker = getTracker(); synchronized(mTracker) { int id = getNextID(); mTracker.addImage(image, id); try { mTracker.waitForID(id, 0); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { System.out.println("INTERRUPTED while loading Image"); } // depends on control dependency: [catch], data = [none] //?int loadStatus = mTracker.statusID(id, false); mTracker.removeImage(image, id); } } }
public class class_name { @Override public boolean isTimeIncluded (final long timeStamp) { // Test the base calendar first. Only if the base calendar not already // excludes the time/date, continue evaluating this calendar instance. if (super.isTimeIncluded (timeStamp) == false) { return false; } final Calendar day = createJavaCalendar (timeStamp); return !(isDayExcluded (day)); } }
public class class_name { @Override public boolean isTimeIncluded (final long timeStamp) { // Test the base calendar first. Only if the base calendar not already // excludes the time/date, continue evaluating this calendar instance. if (super.isTimeIncluded (timeStamp) == false) { return false; // depends on control dependency: [if], data = [none] } final Calendar day = createJavaCalendar (timeStamp); return !(isDayExcluded (day)); } }
public class class_name { public void doEdit(@FormGroup("systemParameterDetailInfo") Group systemParameterDetailInfo, Navigator nav) throws Exception { SystemParameter systemParameter = new SystemParameter(); systemParameterDetailInfo.setProperties(systemParameter); String defaultAlarmReceiver = systemParameterDetailInfo.getField("defaultAlarmReceiver").getStringValue(); String defaultAlarmReceiverStrs[] = StringUtils.split(defaultAlarmReceiver, "="); if (defaultAlarmReceiverStrs.length != 2) { throw new ManagerException("defaultAlarmReceiver[" + defaultAlarmReceiver + "] is not valid!"); } systemParameter.setDefaultAlarmReceiveKey(defaultAlarmReceiverStrs[0]); systemParameter.setDefaultAlarmReceiver(defaultAlarmReceiverStrs[1]); String alarmReceiver = systemParameterDetailInfo.getField("alarmReceiver").getStringValue(); List<String> alarmReceivers = new ArrayList<String>(); String alarmReceiver1[] = StringUtils.split(alarmReceiver, "\n"); for (String alarmReceiverStr : alarmReceiver1) { String[] alarmReceiver2 = StringUtils.split(alarmReceiverStr, ";"); for (String alarmReceiverStr2 : alarmReceiver2) { alarmReceivers.add(alarmReceiverStr2); } } Map<String, String> alarmReceiverMap = new LinkedHashMap<String, String>(); for (String alarmReceiverStr : alarmReceivers) { String alarmReceiverStrs[] = StringUtils.split(alarmReceiverStr, "="); if (alarmReceiverStrs.length != 2) { throw new ManagerException("alarmReceiver[" + alarmReceiver + "] is not valid!"); } alarmReceiverMap.put(alarmReceiverStrs[0], alarmReceiverStrs[1]); } systemParameter.setAlarmReceiver(alarmReceiverMap); systemParameterService.createOrUpdate(systemParameter); nav.redirectToLocation("systemParameter.htm?edit=true"); } }
public class class_name { public void doEdit(@FormGroup("systemParameterDetailInfo") Group systemParameterDetailInfo, Navigator nav) throws Exception { SystemParameter systemParameter = new SystemParameter(); systemParameterDetailInfo.setProperties(systemParameter); String defaultAlarmReceiver = systemParameterDetailInfo.getField("defaultAlarmReceiver").getStringValue(); String defaultAlarmReceiverStrs[] = StringUtils.split(defaultAlarmReceiver, "="); if (defaultAlarmReceiverStrs.length != 2) { throw new ManagerException("defaultAlarmReceiver[" + defaultAlarmReceiver + "] is not valid!"); } systemParameter.setDefaultAlarmReceiveKey(defaultAlarmReceiverStrs[0]); systemParameter.setDefaultAlarmReceiver(defaultAlarmReceiverStrs[1]); String alarmReceiver = systemParameterDetailInfo.getField("alarmReceiver").getStringValue(); List<String> alarmReceivers = new ArrayList<String>(); String alarmReceiver1[] = StringUtils.split(alarmReceiver, "\n"); for (String alarmReceiverStr : alarmReceiver1) { String[] alarmReceiver2 = StringUtils.split(alarmReceiverStr, ";"); for (String alarmReceiverStr2 : alarmReceiver2) { alarmReceivers.add(alarmReceiverStr2); // depends on control dependency: [for], data = [alarmReceiverStr2] } } Map<String, String> alarmReceiverMap = new LinkedHashMap<String, String>(); for (String alarmReceiverStr : alarmReceivers) { String alarmReceiverStrs[] = StringUtils.split(alarmReceiverStr, "="); if (alarmReceiverStrs.length != 2) { throw new ManagerException("alarmReceiver[" + alarmReceiver + "] is not valid!"); } alarmReceiverMap.put(alarmReceiverStrs[0], alarmReceiverStrs[1]); } systemParameter.setAlarmReceiver(alarmReceiverMap); systemParameterService.createOrUpdate(systemParameter); nav.redirectToLocation("systemParameter.htm?edit=true"); } }
public class class_name { private void releaseJobs(String nodePath, MasterSlaveNodeData.Data nodeData) { if (ListHelper.isEmpty(nodeData.getJobPaths())) { return; } for (String path : nodeData.getJobPaths()) { MasterSlaveJobData.Data data = masterSlaveApiFactory.jobApi().getJob(path).getData(); if (this.nodePath.equals(nodePath)) { schedulerManager.shutdown(data.getGroupName(), data.getJobName()); } data.release(); masterSlaveApiFactory.jobApi().updateJob(data.getGroupName(), data.getJobName(), data); } } }
public class class_name { private void releaseJobs(String nodePath, MasterSlaveNodeData.Data nodeData) { if (ListHelper.isEmpty(nodeData.getJobPaths())) { return; // depends on control dependency: [if], data = [none] } for (String path : nodeData.getJobPaths()) { MasterSlaveJobData.Data data = masterSlaveApiFactory.jobApi().getJob(path).getData(); if (this.nodePath.equals(nodePath)) { schedulerManager.shutdown(data.getGroupName(), data.getJobName()); // depends on control dependency: [if], data = [none] } data.release(); // depends on control dependency: [for], data = [none] masterSlaveApiFactory.jobApi().updateJob(data.getGroupName(), data.getJobName(), data); // depends on control dependency: [for], data = [none] } } }
public class class_name { public byte[] getHash() { // digester.digest() resets the digester, so it cannot be called multiple times, // therefore we cache the hash if(hash == null) { hash = hasher.getDigester().digest(); if(hasher.getSalt() != null) { hasher.getDigester().update(hasher.getSalt()); } } return hash; } }
public class class_name { public byte[] getHash() { // digester.digest() resets the digester, so it cannot be called multiple times, // therefore we cache the hash if(hash == null) { hash = hasher.getDigester().digest(); // depends on control dependency: [if], data = [none] if(hasher.getSalt() != null) { hasher.getDigester().update(hasher.getSalt()); // depends on control dependency: [if], data = [(hasher.getSalt()] } } return hash; } }
public class class_name { public static String getFirstUpperName(String name) { if (StringUtils.isBlank(name)) { return null; } String firstChar = name.substring(0, 1).toUpperCase(); return firstChar + name.substring(1); } }
public class class_name { public static String getFirstUpperName(String name) { if (StringUtils.isBlank(name)) { return null; // depends on control dependency: [if], data = [none] } String firstChar = name.substring(0, 1).toUpperCase(); return firstChar + name.substring(1); } }
public class class_name { public String date (String format, Object arg) { Date when = massageDate(arg); if (when == null) { return format; } SimpleDateFormat fmt = new SimpleDateFormat(format, getLocale()); return fmt.format(when); } }
public class class_name { public String date (String format, Object arg) { Date when = massageDate(arg); if (when == null) { return format; // depends on control dependency: [if], data = [none] } SimpleDateFormat fmt = new SimpleDateFormat(format, getLocale()); return fmt.format(when); } }
public class class_name { public EClass getMGO() { if (mgoEClass == null) { mgoEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(296); } return mgoEClass; } }
public class class_name { public EClass getMGO() { if (mgoEClass == null) { mgoEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(296); // depends on control dependency: [if], data = [none] } return mgoEClass; } }
public class class_name { @Trivial @Override public String getLocalName() { String addr = this.connection.getLocalHostName(true); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "getLocalName: " + addr); } return addr; } }
public class class_name { @Trivial @Override public String getLocalName() { String addr = this.connection.getLocalHostName(true); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "getLocalName: " + addr); // depends on control dependency: [if], data = [none] } return addr; } }
public class class_name { private HtmlToken collapseSubsequent(HtmlToken token) { HtmlToken collapsed = token; for (HtmlToken next; (next= peekToken(0)) != null && next.type == token.type; readToken()) { collapsed = join(collapsed, next); } return collapsed; } }
public class class_name { private HtmlToken collapseSubsequent(HtmlToken token) { HtmlToken collapsed = token; for (HtmlToken next; (next= peekToken(0)) != null && next.type == token.type; readToken()) { collapsed = join(collapsed, next); // depends on control dependency: [for], data = [none] } return collapsed; } }
public class class_name { public EClass getIfcFlowMeterType() { if (ifcFlowMeterTypeEClass == null) { ifcFlowMeterTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(245); } return ifcFlowMeterTypeEClass; } }
public class class_name { public EClass getIfcFlowMeterType() { if (ifcFlowMeterTypeEClass == null) { ifcFlowMeterTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(245); // depends on control dependency: [if], data = [none] } return ifcFlowMeterTypeEClass; } }
public class class_name { public void marshall(Event event, ProtocolMarshaller protocolMarshaller) { if (event == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(event.getAppPackageName(), APPPACKAGENAME_BINDING); protocolMarshaller.marshall(event.getAppTitle(), APPTITLE_BINDING); protocolMarshaller.marshall(event.getAppVersionCode(), APPVERSIONCODE_BINDING); protocolMarshaller.marshall(event.getAttributes(), ATTRIBUTES_BINDING); protocolMarshaller.marshall(event.getClientSdkVersion(), CLIENTSDKVERSION_BINDING); protocolMarshaller.marshall(event.getEventType(), EVENTTYPE_BINDING); protocolMarshaller.marshall(event.getMetrics(), METRICS_BINDING); protocolMarshaller.marshall(event.getSdkName(), SDKNAME_BINDING); protocolMarshaller.marshall(event.getSession(), SESSION_BINDING); protocolMarshaller.marshall(event.getTimestamp(), TIMESTAMP_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(Event event, ProtocolMarshaller protocolMarshaller) { if (event == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(event.getAppPackageName(), APPPACKAGENAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getAppTitle(), APPTITLE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getAppVersionCode(), APPVERSIONCODE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getAttributes(), ATTRIBUTES_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getClientSdkVersion(), CLIENTSDKVERSION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getEventType(), EVENTTYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getMetrics(), METRICS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getSdkName(), SDKNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getSession(), SESSION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(event.getTimestamp(), TIMESTAMP_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void addWorkPanel(AbstractPanel panel) { if (workPanelList == null) { workPanelList = createList(); } workPanelList.add(panel); } }
public class class_name { public void addWorkPanel(AbstractPanel panel) { if (workPanelList == null) { workPanelList = createList(); // depends on control dependency: [if], data = [none] } workPanelList.add(panel); } }
public class class_name { public ClusterListener createClusterListener(boolean localOnly) { if (localOnly) return new NoClusterListener(); HazelcastInstance hazelcast = getHazelcastInstance(vertx); if (hazelcast != null) { return new HazelcastClusterListener(hazelcast, vertx); } else { return new NoClusterListener(); } } }
public class class_name { public ClusterListener createClusterListener(boolean localOnly) { if (localOnly) return new NoClusterListener(); HazelcastInstance hazelcast = getHazelcastInstance(vertx); if (hazelcast != null) { return new HazelcastClusterListener(hazelcast, vertx); // depends on control dependency: [if], data = [(hazelcast] } else { return new NoClusterListener(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static EncodingUtils getEncodingUtils() { HttpDispatcher f = instance.get().get(); EncodingUtils svc = null; if (f != null) { svc = f.encodingSvc; } if (svc == null) svc = new EncodingUtilsImpl(); return svc; } }
public class class_name { public static EncodingUtils getEncodingUtils() { HttpDispatcher f = instance.get().get(); EncodingUtils svc = null; if (f != null) { svc = f.encodingSvc; // depends on control dependency: [if], data = [none] } if (svc == null) svc = new EncodingUtilsImpl(); return svc; } }
public class class_name { private static String decodeFormFields(final String content, final Charset charset) { if (content == null) { return null; } return urlDecode(content, (charset != null) ? charset : Charsets.UTF_8, true); } }
public class class_name { private static String decodeFormFields(final String content, final Charset charset) { if (content == null) { return null; // depends on control dependency: [if], data = [none] } return urlDecode(content, (charset != null) ? charset : Charsets.UTF_8, true); } }
public class class_name { public SymbolReference<? extends ResolvedValueDeclaration> solveSymbolInType(ResolvedTypeDeclaration typeDeclaration, String name) { if (typeDeclaration instanceof JavaParserClassDeclaration) { Context ctx = ((JavaParserClassDeclaration) typeDeclaration).getContext(); return ctx.solveSymbol(name, typeSolver); } if (typeDeclaration instanceof JavaParserInterfaceDeclaration) { Context ctx = ((JavaParserInterfaceDeclaration) typeDeclaration).getContext(); return ctx.solveSymbol(name, typeSolver); } if (typeDeclaration instanceof JavaParserEnumDeclaration) { Context ctx = ((JavaParserEnumDeclaration) typeDeclaration).getContext(); return ctx.solveSymbol(name, typeSolver); } if (typeDeclaration instanceof ReflectionClassDeclaration) { return ((ReflectionClassDeclaration) typeDeclaration).solveSymbol(name, typeSolver); } if (typeDeclaration instanceof ReflectionInterfaceDeclaration) { return ((ReflectionInterfaceDeclaration) typeDeclaration).solveSymbol(name, typeSolver); } if (typeDeclaration instanceof JavassistClassDeclaration) { return ((JavassistClassDeclaration) typeDeclaration).solveSymbol(name, typeSolver); } if (typeDeclaration instanceof JavassistEnumDeclaration) { return ((JavassistEnumDeclaration) typeDeclaration).solveSymbol(name, typeSolver); } if (typeDeclaration instanceof JavassistInterfaceDeclaration) { return ((JavassistInterfaceDeclaration) typeDeclaration).solveSymbol(name, typeSolver); } return SymbolReference.unsolved(ResolvedValueDeclaration.class); } }
public class class_name { public SymbolReference<? extends ResolvedValueDeclaration> solveSymbolInType(ResolvedTypeDeclaration typeDeclaration, String name) { if (typeDeclaration instanceof JavaParserClassDeclaration) { Context ctx = ((JavaParserClassDeclaration) typeDeclaration).getContext(); return ctx.solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof JavaParserInterfaceDeclaration) { Context ctx = ((JavaParserInterfaceDeclaration) typeDeclaration).getContext(); return ctx.solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof JavaParserEnumDeclaration) { Context ctx = ((JavaParserEnumDeclaration) typeDeclaration).getContext(); return ctx.solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof ReflectionClassDeclaration) { return ((ReflectionClassDeclaration) typeDeclaration).solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof ReflectionInterfaceDeclaration) { return ((ReflectionInterfaceDeclaration) typeDeclaration).solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof JavassistClassDeclaration) { return ((JavassistClassDeclaration) typeDeclaration).solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof JavassistEnumDeclaration) { return ((JavassistEnumDeclaration) typeDeclaration).solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } if (typeDeclaration instanceof JavassistInterfaceDeclaration) { return ((JavassistInterfaceDeclaration) typeDeclaration).solveSymbol(name, typeSolver); // depends on control dependency: [if], data = [none] } return SymbolReference.unsolved(ResolvedValueDeclaration.class); } }
public class class_name { @Override public void removeByG_U(long groupId, long userId) { for (CommerceWishList commerceWishList : findByG_U(groupId, userId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commerceWishList); } } }
public class class_name { @Override public void removeByG_U(long groupId, long userId) { for (CommerceWishList commerceWishList : findByG_U(groupId, userId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commerceWishList); // depends on control dependency: [for], data = [commerceWishList] } } }
public class class_name { protected void startSelecting() { if (GuiScreen.isShiftKeyDown()) { if (!selectingText) selectionCursor.from(cursor); selectingText = true; } else selectingText = false; } }
public class class_name { protected void startSelecting() { if (GuiScreen.isShiftKeyDown()) { if (!selectingText) selectionCursor.from(cursor); selectingText = true; // depends on control dependency: [if], data = [none] } else selectingText = false; } }
public class class_name { @Override public Extension resolve(ExtensionId extensionId) throws ResolveException { ResolveException lastException = null; for (ExtensionRepository repository : this.repositories) { try { return repository.resolve(extensionId); } catch (ExtensionNotFoundException e1) { this.logger.debug("Could not find extension [{}] in repository [{}]", extensionId, repository.getDescriptor(), e1); } catch (ResolveException e2) { this.logger.error("Unexpected error when trying to find extension [{}] in repository [{}]", extensionId, repository.getDescriptor(), e2); lastException = e2; } } if (lastException != null) { throw new ResolveException(MessageFormat.format("Failed to resolve extension [{0}]", extensionId), lastException); } else { throw new ExtensionNotFoundException(MessageFormat.format("Could not find extension [{0}]", extensionId)); } } }
public class class_name { @Override public Extension resolve(ExtensionId extensionId) throws ResolveException { ResolveException lastException = null; for (ExtensionRepository repository : this.repositories) { try { return repository.resolve(extensionId); // depends on control dependency: [try], data = [none] } catch (ExtensionNotFoundException e1) { this.logger.debug("Could not find extension [{}] in repository [{}]", extensionId, repository.getDescriptor(), e1); } catch (ResolveException e2) { // depends on control dependency: [catch], data = [none] this.logger.error("Unexpected error when trying to find extension [{}] in repository [{}]", extensionId, repository.getDescriptor(), e2); lastException = e2; } // depends on control dependency: [catch], data = [none] } if (lastException != null) { throw new ResolveException(MessageFormat.format("Failed to resolve extension [{0}]", extensionId), lastException); } else { throw new ExtensionNotFoundException(MessageFormat.format("Could not find extension [{0}]", extensionId)); } } }
public class class_name { public Map<String, Object> getDebugInfo() { long currTime = System.currentTimeMillis(); Object notSuccessfullySyncedFor; if (lastSuccessfulSyncTime == 0) { notSuccessfullySyncedFor = "Never Successfully Synced"; } else { notSuccessfullySyncedFor = (currTime - lastSuccessfulSyncTime) / 1000; } return ImmutableMap.of( "notSyncedForSecs", lastSyncTime == 0 ? "Never Synced" : (currTime - lastSyncTime) / 1000, "notSuccessfullySyncedFor", notSuccessfullySyncedFor, "consecutiveFailedAttemptCount", consecutiveFailedAttemptCount, "syncScheduled", startStopLock.isStarted() ); } }
public class class_name { public Map<String, Object> getDebugInfo() { long currTime = System.currentTimeMillis(); Object notSuccessfullySyncedFor; if (lastSuccessfulSyncTime == 0) { notSuccessfullySyncedFor = "Never Successfully Synced"; // depends on control dependency: [if], data = [none] } else { notSuccessfullySyncedFor = (currTime - lastSuccessfulSyncTime) / 1000; // depends on control dependency: [if], data = [none] } return ImmutableMap.of( "notSyncedForSecs", lastSyncTime == 0 ? "Never Synced" : (currTime - lastSyncTime) / 1000, "notSuccessfullySyncedFor", notSuccessfullySyncedFor, "consecutiveFailedAttemptCount", consecutiveFailedAttemptCount, "syncScheduled", startStopLock.isStarted() ); } }
public class class_name { private Description checkSubtype(ClassTree tree, VisitorState state) { ClassSymbol sym = ASTHelpers.getSymbol(tree); if (sym == null) { return NO_MATCH; } Type superType = immutableSupertype(sym, state); if (superType == null) { return NO_MATCH; } String message = String.format( "Class extends @Immutable type %s, but is not annotated as immutable", superType); Fix fix = SuggestedFix.builder() .prefixWith(tree, "@Immutable ") .addImport(Immutable.class.getName()) .build(); return buildDescription(tree).setMessage(message).addFix(fix).build(); } }
public class class_name { private Description checkSubtype(ClassTree tree, VisitorState state) { ClassSymbol sym = ASTHelpers.getSymbol(tree); if (sym == null) { return NO_MATCH; // depends on control dependency: [if], data = [none] } Type superType = immutableSupertype(sym, state); if (superType == null) { return NO_MATCH; // depends on control dependency: [if], data = [none] } String message = String.format( "Class extends @Immutable type %s, but is not annotated as immutable", superType); Fix fix = SuggestedFix.builder() .prefixWith(tree, "@Immutable ") .addImport(Immutable.class.getName()) .build(); return buildDescription(tree).setMessage(message).addFix(fix).build(); } }
public class class_name { public static Map<String, Type> getReflectiveMappingTypes(Class<? extends MailChimpObject> clazz) { Map<String, Type> types = reflectiveMappingsTypes.get(clazz); if (types == null) { types = new LinkedHashMap<String, Type>(); for(Map.Entry<String, java.lang.reflect.Field> entry: getReflectiveMapping(clazz).entrySet()) { types.put(entry.getKey(), entry.getValue().getGenericType()); } reflectiveMappingsTypes.put(clazz, Collections.unmodifiableMap(types)); } return types; } }
public class class_name { public static Map<String, Type> getReflectiveMappingTypes(Class<? extends MailChimpObject> clazz) { Map<String, Type> types = reflectiveMappingsTypes.get(clazz); if (types == null) { types = new LinkedHashMap<String, Type>(); // depends on control dependency: [if], data = [none] for(Map.Entry<String, java.lang.reflect.Field> entry: getReflectiveMapping(clazz).entrySet()) { types.put(entry.getKey(), entry.getValue().getGenericType()); // depends on control dependency: [for], data = [entry] } reflectiveMappingsTypes.put(clazz, Collections.unmodifiableMap(types)); // depends on control dependency: [if], data = [(types] } return types; } }
public class class_name { @Override protected void _fit(Dataframe trainingData) { ModelParameters modelParameters = knowledgeBase.getModelParameters(); int n = trainingData.size(); Map<List<Object>, Double> lambdas = modelParameters.getLambdas(); Set<Object> classesSet = modelParameters.getClasses(); double Cmax = 0.0; //max number of activated features in the dataset. Required from the IIS algorithm //first we need to find all the classes for(Record r : trainingData) { Object theClass=r.getY(); classesSet.add(theClass); //counts the number of non-zero (active) features of the record int activeFeatures=(int) r.getX().values().stream().filter(e -> e !=null && TypeInference.toDouble(e) > 0.0).count(); //NOTE: we try to find the Cmax the maximum number of active featured in the training dataset. The total number of features in original IIS were required to be constant. NEVERTHELESS as it is mentioned here http://acl.ldc.upenn.edu/P/P02/P02-1002.pdf the Cmax only needs to constrain the number of features and not necessarily to be equal to them. //NOTE2: In this implementation the Cmax is equal to the maximum number of features that were found in the training dataset. We don't need to go through all the classes to find the Cmax. This is because of the way that the features are selected. if(activeFeatures>Cmax) { Cmax=activeFeatures; } } //create a temporary map for the observed probabilities in training set StorageEngine storageEngine = knowledgeBase.getStorageEngine(); Map<List<Object>, Double> tmp_EpFj_observed = storageEngine.getBigMap("tmp_EpFj_observed", (Class<List<Object>>)(Class<?>)List.class, Double.class, MapType.HASHMAP, StorageHint.IN_MEMORY, true, true); //Loop through all the classes to ensure that the feature-class combination is initialized for ALL the classes //The math REQUIRE us to have scores for all classes to make the probabilities comparable. streamExecutor.forEach(StreamMethods.stream(trainingData.getXDataTypes().keySet().stream(), isParallelized()), feature -> { for(Object theClass : classesSet) { List<Object> featureClassTuple = Arrays.asList(feature, theClass); tmp_EpFj_observed.put(featureClassTuple, 0.0); lambdas.put(featureClassTuple, 0.0); } }); double increment = 1.0/n; //this is done for speed reasons. We don't want to repeat the same division over and over //then we calculate the observed probabilities in training set streamExecutor.forEach(StreamMethods.stream(trainingData.stream(), isParallelized()), r -> { Object theClass = r.getY(); //store the occurrances of the features for(Map.Entry<Object, Object> entry : r.getX().entrySet()) { Double occurrences=TypeInference.toDouble(entry.getValue()); if (occurrences!=null && occurrences>0.0) { Object feature = entry.getKey(); //find the class of this particular example List<Object> featureClassTuple = Arrays.asList(feature, theClass); synchronized(tmp_EpFj_observed) { tmp_EpFj_observed.put(featureClassTuple, tmp_EpFj_observed.get(featureClassTuple) + increment); } } } }); //IIS algorithm IIS(trainingData, tmp_EpFj_observed, Cmax); //Drop the temporary Collection storageEngine.dropBigMap("tmp_EpFj_observed", tmp_EpFj_observed); } }
public class class_name { @Override protected void _fit(Dataframe trainingData) { ModelParameters modelParameters = knowledgeBase.getModelParameters(); int n = trainingData.size(); Map<List<Object>, Double> lambdas = modelParameters.getLambdas(); Set<Object> classesSet = modelParameters.getClasses(); double Cmax = 0.0; //max number of activated features in the dataset. Required from the IIS algorithm //first we need to find all the classes for(Record r : trainingData) { Object theClass=r.getY(); classesSet.add(theClass); // depends on control dependency: [for], data = [none] //counts the number of non-zero (active) features of the record int activeFeatures=(int) r.getX().values().stream().filter(e -> e !=null && TypeInference.toDouble(e) > 0.0).count(); //NOTE: we try to find the Cmax the maximum number of active featured in the training dataset. The total number of features in original IIS were required to be constant. NEVERTHELESS as it is mentioned here http://acl.ldc.upenn.edu/P/P02/P02-1002.pdf the Cmax only needs to constrain the number of features and not necessarily to be equal to them. //NOTE2: In this implementation the Cmax is equal to the maximum number of features that were found in the training dataset. We don't need to go through all the classes to find the Cmax. This is because of the way that the features are selected. if(activeFeatures>Cmax) { Cmax=activeFeatures; // depends on control dependency: [if], data = [none] } } //create a temporary map for the observed probabilities in training set StorageEngine storageEngine = knowledgeBase.getStorageEngine(); Map<List<Object>, Double> tmp_EpFj_observed = storageEngine.getBigMap("tmp_EpFj_observed", (Class<List<Object>>)(Class<?>)List.class, Double.class, MapType.HASHMAP, StorageHint.IN_MEMORY, true, true); //Loop through all the classes to ensure that the feature-class combination is initialized for ALL the classes //The math REQUIRE us to have scores for all classes to make the probabilities comparable. streamExecutor.forEach(StreamMethods.stream(trainingData.getXDataTypes().keySet().stream(), isParallelized()), feature -> { for(Object theClass : classesSet) { List<Object> featureClassTuple = Arrays.asList(feature, theClass); tmp_EpFj_observed.put(featureClassTuple, 0.0); lambdas.put(featureClassTuple, 0.0); } }); double increment = 1.0/n; //this is done for speed reasons. We don't want to repeat the same division over and over //then we calculate the observed probabilities in training set streamExecutor.forEach(StreamMethods.stream(trainingData.stream(), isParallelized()), r -> { Object theClass = r.getY(); //store the occurrances of the features for(Map.Entry<Object, Object> entry : r.getX().entrySet()) { Double occurrences=TypeInference.toDouble(entry.getValue()); if (occurrences!=null && occurrences>0.0) { Object feature = entry.getKey(); //find the class of this particular example List<Object> featureClassTuple = Arrays.asList(feature, theClass); synchronized(tmp_EpFj_observed) { tmp_EpFj_observed.put(featureClassTuple, tmp_EpFj_observed.get(featureClassTuple) + increment); } } } }); //IIS algorithm IIS(trainingData, tmp_EpFj_observed, Cmax); //Drop the temporary Collection storageEngine.dropBigMap("tmp_EpFj_observed", tmp_EpFj_observed); } }
public class class_name { @SuppressWarnings("unchecked") private <T> Callback<T> getFirstCallback(BeanMessageID type) { List<Callback<?>> callbacks = beanCallbacks.get(type); if (callbacks == null || callbacks.isEmpty()) { Log.w(TAG, "Got response without callback!"); return null; } return (Callback<T>) callbacks.remove(0); } }
public class class_name { @SuppressWarnings("unchecked") private <T> Callback<T> getFirstCallback(BeanMessageID type) { List<Callback<?>> callbacks = beanCallbacks.get(type); if (callbacks == null || callbacks.isEmpty()) { Log.w(TAG, "Got response without callback!"); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } return (Callback<T>) callbacks.remove(0); } }
public class class_name { public ValidationResult validateWithResult(IBaseResource theResource) { Validate.notNull(theResource, "theResource must not be null"); applyDefaultValidators(); IValidationContext<IBaseResource> ctx = ValidationContext.forResource(myContext, theResource); for (IValidatorModule next : myValidators) { next.validateResource(ctx); } return ctx.toResult(); } }
public class class_name { public ValidationResult validateWithResult(IBaseResource theResource) { Validate.notNull(theResource, "theResource must not be null"); applyDefaultValidators(); IValidationContext<IBaseResource> ctx = ValidationContext.forResource(myContext, theResource); for (IValidatorModule next : myValidators) { next.validateResource(ctx); // depends on control dependency: [for], data = [next] } return ctx.toResult(); } }
public class class_name { public static String getSLDUL(String[] soilParas) { if (soilParas != null && soilParas.length >= 3) { return divide(calcMoisture33Kpa(soilParas[0], soilParas[1], soilParas[2]), "100", 3); } else { return null; } } }
public class class_name { public static String getSLDUL(String[] soilParas) { if (soilParas != null && soilParas.length >= 3) { return divide(calcMoisture33Kpa(soilParas[0], soilParas[1], soilParas[2]), "100", 3); // depends on control dependency: [if], data = [(soilParas] } else { return null; // depends on control dependency: [if], data = [none] } } }
public class class_name { private boolean hasConstantField (ClassDoc classDoc) { VisibleMemberMap visibleMemberMapFields = new VisibleMemberMap(classDoc, VisibleMemberMap.FIELDS, configuration); List<?> fields = visibleMemberMapFields.getLeafClassMembers(configuration); for (Object f : fields) { FieldDoc field = (FieldDoc) f; if (field.constantValueExpression() != null) { classDocsWithConstFields.add(classDoc); return true; } } return false; } }
public class class_name { private boolean hasConstantField (ClassDoc classDoc) { VisibleMemberMap visibleMemberMapFields = new VisibleMemberMap(classDoc, VisibleMemberMap.FIELDS, configuration); List<?> fields = visibleMemberMapFields.getLeafClassMembers(configuration); for (Object f : fields) { FieldDoc field = (FieldDoc) f; if (field.constantValueExpression() != null) { classDocsWithConstFields.add(classDoc); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { protected void handleInternalException( AbortCompilation abortException, CompilationUnitDeclaration unit) { /* special treatment for SilentAbort: silently cancelling the compilation process */ if (abortException.isSilent) { if (abortException.silentException == null) { return; } throw abortException.silentException; } /* uncomment following line to see where the abort came from */ // abortException.printStackTrace(); // Exception may tell which compilation result it is related, and which problem caused it CompilationResult result = abortException.compilationResult; if (result == null && unit != null) { result = unit.compilationResult; // current unit being processed ? } // Lookup environment may be in middle of connecting types if (result == null && this.lookupEnvironment.unitBeingCompleted != null) { result = this.lookupEnvironment.unitBeingCompleted.compilationResult; } if (result == null) { synchronized (this) { if (this.unitsToProcess != null && this.totalUnits > 0) result = this.unitsToProcess[this.totalUnits - 1].compilationResult; } } // last unit in beginToCompile ? if (result != null && !result.hasBeenAccepted) { /* distant problem which could not be reported back there? */ if (abortException.problem != null) { recordDistantProblem: { CategorizedProblem distantProblem = abortException.problem; CategorizedProblem[] knownProblems = result.problems; for (int i = 0; i < result.problemCount; i++) { if (knownProblems[i] == distantProblem) { // already recorded break recordDistantProblem; } } if (distantProblem instanceof DefaultProblem) { // fixup filename TODO (philippe) should improve API to make this official ((DefaultProblem) distantProblem).setOriginatingFileName(result.getFileName()); } result.record(distantProblem, unit, true); } } else { /* distant internal exception which could not be reported back there */ if (abortException.exception != null) { this.handleInternalException(abortException.exception, null, result); return; } } /* hand back the compilation result */ if (!result.hasBeenAccepted) { this.requestor.acceptResult(result.tagAsAccepted()); } } else { abortException.printStackTrace(); } } }
public class class_name { protected void handleInternalException( AbortCompilation abortException, CompilationUnitDeclaration unit) { /* special treatment for SilentAbort: silently cancelling the compilation process */ if (abortException.isSilent) { if (abortException.silentException == null) { return; // depends on control dependency: [if], data = [none] } throw abortException.silentException; } /* uncomment following line to see where the abort came from */ // abortException.printStackTrace(); // Exception may tell which compilation result it is related, and which problem caused it CompilationResult result = abortException.compilationResult; if (result == null && unit != null) { result = unit.compilationResult; // current unit being processed ? // depends on control dependency: [if], data = [none] } // Lookup environment may be in middle of connecting types if (result == null && this.lookupEnvironment.unitBeingCompleted != null) { result = this.lookupEnvironment.unitBeingCompleted.compilationResult; // depends on control dependency: [if], data = [none] } if (result == null) { synchronized (this) { // depends on control dependency: [if], data = [none] if (this.unitsToProcess != null && this.totalUnits > 0) result = this.unitsToProcess[this.totalUnits - 1].compilationResult; } } // last unit in beginToCompile ? if (result != null && !result.hasBeenAccepted) { /* distant problem which could not be reported back there? */ if (abortException.problem != null) { recordDistantProblem: { CategorizedProblem distantProblem = abortException.problem; CategorizedProblem[] knownProblems = result.problems; for (int i = 0; i < result.problemCount; i++) { if (knownProblems[i] == distantProblem) { // already recorded break recordDistantProblem; } } if (distantProblem instanceof DefaultProblem) { // fixup filename TODO (philippe) should improve API to make this official ((DefaultProblem) distantProblem).setOriginatingFileName(result.getFileName()); // depends on control dependency: [if], data = [none] } result.record(distantProblem, unit, true); } } else { /* distant internal exception which could not be reported back there */ if (abortException.exception != null) { this.handleInternalException(abortException.exception, null, result); // depends on control dependency: [if], data = [(abortException.exception] return; // depends on control dependency: [if], data = [none] } } /* hand back the compilation result */ if (!result.hasBeenAccepted) { this.requestor.acceptResult(result.tagAsAccepted()); // depends on control dependency: [if], data = [none] } } else { abortException.printStackTrace(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static int read( final UnsafeBuffer termBuffer, final int termOffset, final FragmentHandler handler, final int fragmentsLimit, final Header header, final ErrorHandler errorHandler, final long currentPosition, final Position subscriberPosition) { int fragmentsRead = 0; int offset = termOffset; final int capacity = termBuffer.capacity(); header.buffer(termBuffer); try { while (fragmentsRead < fragmentsLimit && offset < capacity) { final int frameLength = frameLengthVolatile(termBuffer, offset); if (frameLength <= 0) { break; } final int frameOffset = offset; offset += BitUtil.align(frameLength, FRAME_ALIGNMENT); if (!isPaddingFrame(termBuffer, frameOffset)) { header.offset(frameOffset); handler.onFragment(termBuffer, frameOffset + HEADER_LENGTH, frameLength - HEADER_LENGTH, header); ++fragmentsRead; } } } catch (final Throwable t) { errorHandler.onError(t); } finally { final long newPosition = currentPosition + (offset - termOffset); if (newPosition > currentPosition) { subscriberPosition.setOrdered(newPosition); } } return fragmentsRead; } }
public class class_name { public static int read( final UnsafeBuffer termBuffer, final int termOffset, final FragmentHandler handler, final int fragmentsLimit, final Header header, final ErrorHandler errorHandler, final long currentPosition, final Position subscriberPosition) { int fragmentsRead = 0; int offset = termOffset; final int capacity = termBuffer.capacity(); header.buffer(termBuffer); try { while (fragmentsRead < fragmentsLimit && offset < capacity) { final int frameLength = frameLengthVolatile(termBuffer, offset); if (frameLength <= 0) { break; } final int frameOffset = offset; offset += BitUtil.align(frameLength, FRAME_ALIGNMENT); // depends on control dependency: [while], data = [none] if (!isPaddingFrame(termBuffer, frameOffset)) { header.offset(frameOffset); // depends on control dependency: [if], data = [none] handler.onFragment(termBuffer, frameOffset + HEADER_LENGTH, frameLength - HEADER_LENGTH, header); // depends on control dependency: [if], data = [none] ++fragmentsRead; // depends on control dependency: [if], data = [none] } } } catch (final Throwable t) { errorHandler.onError(t); } // depends on control dependency: [catch], data = [none] finally { final long newPosition = currentPosition + (offset - termOffset); if (newPosition > currentPosition) { subscriberPosition.setOrdered(newPosition); // depends on control dependency: [if], data = [(newPosition] } } return fragmentsRead; } }
public class class_name { FormattingContext concat(FormattingContext other) { if (isEmpty()) { return other; } else if (other.isEmpty()) { return this; } else { curIndent = ""; // don't serialize trailing whitespace in front of the next FormattingContext. return append(other.toString()); } } }
public class class_name { FormattingContext concat(FormattingContext other) { if (isEmpty()) { return other; // depends on control dependency: [if], data = [none] } else if (other.isEmpty()) { return this; // depends on control dependency: [if], data = [none] } else { curIndent = ""; // don't serialize trailing whitespace in front of the next FormattingContext. // depends on control dependency: [if], data = [none] return append(other.toString()); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void addClassPathes(final GroovyClassLoader classLoader) { if (classpath != null) { for (int i = 0; i < classpath.list().length; i++) { classLoader.addClasspath(classpath.list()[i]); } } } }
public class class_name { protected void addClassPathes(final GroovyClassLoader classLoader) { if (classpath != null) { for (int i = 0; i < classpath.list().length; i++) { classLoader.addClasspath(classpath.list()[i]); // depends on control dependency: [for], data = [i] } } } }
public class class_name { @Override protected Statement withAfters(final FrameworkMethod method, final Object target, final Statement statement) { final Statement afters = super.withAfters(method, target, new NoopStatement()); return new Statement() { @Override public void evaluate() throws Throwable { statement.evaluate(); afters.evaluate(); TestClass tc = getTestClass(); // Non-static @Resource List<FrameworkField> fields = tc.getAnnotatedFields(Resource.class); if (fields != null && !fields.isEmpty()) { for (FrameworkField f : fields) { SecurityActions.setAccessible(f.getField()); if (!Modifier.isStatic(f.getField().getModifiers()) && !f.getField().getDeclaringClass().isPrimitive()) { f.getField().set(target, null); } } } // Non-static @Inject / @Named fields = tc.getAnnotatedFields(javax.inject.Inject.class); if (fields != null && !fields.isEmpty()) { for (FrameworkField f : fields) { SecurityActions.setAccessible(f.getField()); if (!Modifier.isStatic(f.getField().getModifiers()) && !f.getField().getDeclaringClass().isPrimitive()) { f.getField().set(target, null); } } } // Non-static @Deployment if (!deployments.isEmpty()) { for (int i = deployments.size() - 1; i >= 0; i--) { Object deployment = deployments.get(i); if (deployment instanceof URL) { embedded.undeploy((URL)deployment); } else if (deployment instanceof ResourceAdapterArchive) { embedded.undeploy((ResourceAdapterArchive)deployment); } else if (deployment instanceof Descriptor) { embedded.undeploy((Descriptor)deployment); } } } deployments.clear(); } }; } }
public class class_name { @Override protected Statement withAfters(final FrameworkMethod method, final Object target, final Statement statement) { final Statement afters = super.withAfters(method, target, new NoopStatement()); return new Statement() { @Override public void evaluate() throws Throwable { statement.evaluate(); afters.evaluate(); TestClass tc = getTestClass(); // Non-static @Resource List<FrameworkField> fields = tc.getAnnotatedFields(Resource.class); if (fields != null && !fields.isEmpty()) { for (FrameworkField f : fields) { SecurityActions.setAccessible(f.getField()); if (!Modifier.isStatic(f.getField().getModifiers()) && !f.getField().getDeclaringClass().isPrimitive()) { f.getField().set(target, null); // depends on control dependency: [if], data = [none] } } } // Non-static @Inject / @Named fields = tc.getAnnotatedFields(javax.inject.Inject.class); if (fields != null && !fields.isEmpty()) { for (FrameworkField f : fields) { SecurityActions.setAccessible(f.getField()); // depends on control dependency: [for], data = [f] if (!Modifier.isStatic(f.getField().getModifiers()) && !f.getField().getDeclaringClass().isPrimitive()) { f.getField().set(target, null); // depends on control dependency: [if], data = [none] } } } // Non-static @Deployment if (!deployments.isEmpty()) { for (int i = deployments.size() - 1; i >= 0; i--) { Object deployment = deployments.get(i); if (deployment instanceof URL) { embedded.undeploy((URL)deployment); // depends on control dependency: [if], data = [none] } else if (deployment instanceof ResourceAdapterArchive) { embedded.undeploy((ResourceAdapterArchive)deployment); // depends on control dependency: [if], data = [none] } else if (deployment instanceof Descriptor) { embedded.undeploy((Descriptor)deployment); // depends on control dependency: [if], data = [none] } } } deployments.clear(); } }; } }
public class class_name { @Override public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException { LOG.trace("incrementColumnValue(byte[], byte[], byte[], long)"); try ( Scope scope = TRACER.spanBuilder("BigtableTable.incrementColumnValue").startScopedSpan()) { Increment incr = new Increment(row); incr.addColumn(family, qualifier, amount); Result result = increment(incr); Cell cell = result.getColumnLatestCell(family, qualifier); if (cell == null) { LOG.error("Failed to find a incremented value in result of increment"); throw new IOException( makeGenericExceptionMessage( "increment", options.getProjectId(), tableName.getQualifierAsString(), row)); } return Bytes.toLong(CellUtil.cloneValue(cell)); } } }
public class class_name { @Override public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException { LOG.trace("incrementColumnValue(byte[], byte[], byte[], long)"); try ( Scope scope = TRACER.spanBuilder("BigtableTable.incrementColumnValue").startScopedSpan()) { Increment incr = new Increment(row); incr.addColumn(family, qualifier, amount); Result result = increment(incr); Cell cell = result.getColumnLatestCell(family, qualifier); if (cell == null) { LOG.error("Failed to find a incremented value in result of increment"); // depends on control dependency: [if], data = [none] throw new IOException( makeGenericExceptionMessage( "increment", options.getProjectId(), tableName.getQualifierAsString(), row)); } return Bytes.toLong(CellUtil.cloneValue(cell)); } } }
public class class_name { public static List<Nucleotide> getNucleotideList(PolymerNotation polymer) throws RNAUtilsException, HELM2HandledException, ChemistryException { checkRNA(polymer); List<Nucleotide> nucleotides = new ArrayList<Nucleotide>(); /* check for HELM2Elements */ List<MonomerNotation> monomerNotations = polymer.getPolymerElements().getListOfElements(); for (int i = 0; i < monomerNotations.size(); i++) { MonomerNotation monomerNotation = monomerNotations.get(i); if ((!(monomerNotation instanceof MonomerNotationUnitRNA)) || Integer.parseInt(monomerNotation.getCount()) != 1) { LOG.info("MonomerNotation contains HELM2 Elements " + monomerNotation); throw new HELM2HandledException("HELM2 Elements are involved"); } try { boolean last = false; if (i == monomerNotations.size() - 1) { last = true; } nucleotides.add(NucleotideParser.convertToNucleotide(monomerNotation.getUnit(), last)); } catch (MonomerException | NucleotideLoadingException | NotationException | org.helm.notation2.exception.NotationException e) { e.printStackTrace(); throw new RNAUtilsException("Nucleotide can not be read " + e.getMessage()); } } return nucleotides; } }
public class class_name { public static List<Nucleotide> getNucleotideList(PolymerNotation polymer) throws RNAUtilsException, HELM2HandledException, ChemistryException { checkRNA(polymer); List<Nucleotide> nucleotides = new ArrayList<Nucleotide>(); /* check for HELM2Elements */ List<MonomerNotation> monomerNotations = polymer.getPolymerElements().getListOfElements(); for (int i = 0; i < monomerNotations.size(); i++) { MonomerNotation monomerNotation = monomerNotations.get(i); if ((!(monomerNotation instanceof MonomerNotationUnitRNA)) || Integer.parseInt(monomerNotation.getCount()) != 1) { LOG.info("MonomerNotation contains HELM2 Elements " + monomerNotation); throw new HELM2HandledException("HELM2 Elements are involved"); } try { boolean last = false; if (i == monomerNotations.size() - 1) { last = true; // depends on control dependency: [if], data = [none] } nucleotides.add(NucleotideParser.convertToNucleotide(monomerNotation.getUnit(), last)); } catch (MonomerException | NucleotideLoadingException | NotationException | org.helm.notation2.exception.NotationException e) { e.printStackTrace(); throw new RNAUtilsException("Nucleotide can not be read " + e.getMessage()); } } return nucleotides; } }
public class class_name { private static void setContentTypeHeader(HttpResponse response, File file) { String contentType = StringKit.mimeType(file.getName()); if (null == contentType) { contentType = URLConnection.guessContentTypeFromName(file.getName()); } response.headers().set(HttpConst.CONTENT_TYPE, contentType); } }
public class class_name { private static void setContentTypeHeader(HttpResponse response, File file) { String contentType = StringKit.mimeType(file.getName()); if (null == contentType) { contentType = URLConnection.guessContentTypeFromName(file.getName()); // depends on control dependency: [if], data = [none] } response.headers().set(HttpConst.CONTENT_TYPE, contentType); } }
public class class_name { private static int parseUGO(CharSequenceScanner parse) { int ugo = 0; while (true) { char c = parse.forceNext(); if (c == 'u') { ugo = ugo | MASK_USER; } else if (c == 'g') { ugo = ugo | MASK_GROUP; } else if (c == 'o') { ugo = ugo | MASK_OTHERS; } else if (c == 'a') { ugo = MASK_ALL; } else { if (ugo == 0) { // if none of u/g/o/a was specified, then 'a' is the default ugo = MASK_ALL; } if (c != 0) { // we read too far parse.stepBack(); } return ugo; } } } }
public class class_name { private static int parseUGO(CharSequenceScanner parse) { int ugo = 0; while (true) { char c = parse.forceNext(); if (c == 'u') { ugo = ugo | MASK_USER; // depends on control dependency: [if], data = [none] } else if (c == 'g') { ugo = ugo | MASK_GROUP; // depends on control dependency: [if], data = [none] } else if (c == 'o') { ugo = ugo | MASK_OTHERS; // depends on control dependency: [if], data = [none] } else if (c == 'a') { ugo = MASK_ALL; // depends on control dependency: [if], data = [none] } else { if (ugo == 0) { // if none of u/g/o/a was specified, then 'a' is the default ugo = MASK_ALL; // depends on control dependency: [if], data = [none] } if (c != 0) { // we read too far parse.stepBack(); // depends on control dependency: [if], data = [none] } return ugo; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static RectL getBounds(final RectL pIn, final long pCenterX, final long pCenterY, final double pDegrees, final RectL pReuse) { final RectL out = pReuse != null ? pReuse : new RectL(); if (pDegrees == 0) { // optimization out.top = pIn.top; out.left = pIn.left; out.bottom = pIn.bottom; out.right = pIn.right; return out; } final double radians = pDegrees * Math.PI / 180.; final double cos = Math.cos(radians); final double sin = Math.sin(radians); long inputX; long inputY; long outputX; long outputY; inputX = pIn.left; // corner 1 inputY = pIn.top; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); out.top = out.bottom = outputY; out.left = out.right = outputX; inputX = pIn.right; // corner 2 inputY = pIn.top; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; } if (out.bottom < outputY) { out.bottom = outputY; } if (out.left > outputX) { out.left = outputX; } if (out.right < outputX) { out.right = outputX; } inputX = pIn.right; // corner 3 inputY = pIn.bottom; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; } if (out.bottom < outputY) { out.bottom = outputY; } if (out.left > outputX) { out.left = outputX; } if (out.right < outputX) { out.right = outputX; } inputX = pIn.left; // corner 4 inputY = pIn.bottom; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; } if (out.bottom < outputY) { out.bottom = outputY; } if (out.left > outputX) { out.left = outputX; } if (out.right < outputX) { out.right = outputX; } return out; } }
public class class_name { public static RectL getBounds(final RectL pIn, final long pCenterX, final long pCenterY, final double pDegrees, final RectL pReuse) { final RectL out = pReuse != null ? pReuse : new RectL(); if (pDegrees == 0) { // optimization out.top = pIn.top; // depends on control dependency: [if], data = [none] out.left = pIn.left; // depends on control dependency: [if], data = [none] out.bottom = pIn.bottom; // depends on control dependency: [if], data = [none] out.right = pIn.right; // depends on control dependency: [if], data = [none] return out; // depends on control dependency: [if], data = [none] } final double radians = pDegrees * Math.PI / 180.; final double cos = Math.cos(radians); final double sin = Math.sin(radians); long inputX; long inputY; long outputX; long outputY; inputX = pIn.left; // corner 1 inputY = pIn.top; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); out.top = out.bottom = outputY; out.left = out.right = outputX; inputX = pIn.right; // corner 2 inputY = pIn.top; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; // depends on control dependency: [if], data = [none] } if (out.bottom < outputY) { out.bottom = outputY; // depends on control dependency: [if], data = [none] } if (out.left > outputX) { out.left = outputX; // depends on control dependency: [if], data = [none] } if (out.right < outputX) { out.right = outputX; // depends on control dependency: [if], data = [none] } inputX = pIn.right; // corner 3 inputY = pIn.bottom; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; // depends on control dependency: [if], data = [none] } if (out.bottom < outputY) { out.bottom = outputY; // depends on control dependency: [if], data = [none] } if (out.left > outputX) { out.left = outputX; // depends on control dependency: [if], data = [none] } if (out.right < outputX) { out.right = outputX; // depends on control dependency: [if], data = [none] } inputX = pIn.left; // corner 4 inputY = pIn.bottom; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; // depends on control dependency: [if], data = [none] } if (out.bottom < outputY) { out.bottom = outputY; // depends on control dependency: [if], data = [none] } if (out.left > outputX) { out.left = outputX; // depends on control dependency: [if], data = [none] } if (out.right < outputX) { out.right = outputX; // depends on control dependency: [if], data = [none] } return out; } }
public class class_name { @Deprecated public static Matrix[] svd(Matrix m, Algorithm algorithm, int dimensions) { // Determine which algorithm is the fastest in order to decide which // format the matrix file should be written if (algorithm.equals(Algorithm.ANY)) algorithm = getFastestAvailableAlgorithm(); if (algorithm == null) throw new UnsupportedOperationException( "No SVD algorithm is available on this system"); Format fmt = null; switch (algorithm) { // In the case of COLT or JAMA, avoid writing the matrix to disk // altogether, and just pass it in directly. This avoids the I/O // overhead, althought both methods require that the matrix be converted // into arrays first. case COLT: return coltSVD(m.toDenseArray(), !(m instanceof SparseMatrix), dimensions); case JAMA: return jamaSVD(m.toDenseArray(), dimensions); // Otherwise, covert to binary SVDLIBC case SVDLIBC: fmt = Format.SVDLIBC_SPARSE_BINARY; break; // Or to Matlab sparse to cover Matlab and Octave default: fmt = Format.MATLAB_SPARSE; } try { File tmpFile = File.createTempFile("matrix-svd", ".dat"); tmpFile.deleteOnExit(); MatrixIO.writeMatrix(m, tmpFile, fmt); return svd(tmpFile, algorithm, fmt, dimensions); } catch (IOException ioe) { SVD_LOGGER.log(Level.SEVERE, "convertFormat", ioe); } throw new UnsupportedOperationException( "SVD algorithm failed in writing matrix to disk"); } }
public class class_name { @Deprecated public static Matrix[] svd(Matrix m, Algorithm algorithm, int dimensions) { // Determine which algorithm is the fastest in order to decide which // format the matrix file should be written if (algorithm.equals(Algorithm.ANY)) algorithm = getFastestAvailableAlgorithm(); if (algorithm == null) throw new UnsupportedOperationException( "No SVD algorithm is available on this system"); Format fmt = null; switch (algorithm) { // In the case of COLT or JAMA, avoid writing the matrix to disk // altogether, and just pass it in directly. This avoids the I/O // overhead, althought both methods require that the matrix be converted // into arrays first. case COLT: return coltSVD(m.toDenseArray(), !(m instanceof SparseMatrix), dimensions); case JAMA: return jamaSVD(m.toDenseArray(), dimensions); // Otherwise, covert to binary SVDLIBC case SVDLIBC: fmt = Format.SVDLIBC_SPARSE_BINARY; break; // Or to Matlab sparse to cover Matlab and Octave default: fmt = Format.MATLAB_SPARSE; } try { File tmpFile = File.createTempFile("matrix-svd", ".dat"); tmpFile.deleteOnExit(); // depends on control dependency: [try], data = [none] MatrixIO.writeMatrix(m, tmpFile, fmt); // depends on control dependency: [try], data = [none] return svd(tmpFile, algorithm, fmt, dimensions); // depends on control dependency: [try], data = [none] } catch (IOException ioe) { SVD_LOGGER.log(Level.SEVERE, "convertFormat", ioe); } // depends on control dependency: [catch], data = [none] throw new UnsupportedOperationException( "SVD algorithm failed in writing matrix to disk"); } }
public class class_name { private void appendSourceFilesFromSchemaResolver(ParseResult result) { for (Map.Entry<String, DataSchemaLocation> entry : _schemaResolver.nameToDataSchemaLocations().entrySet()) { final File sourceFile = entry.getValue().getSourceFile(); if (sourceFile != null) { result.getSourceFiles().add(sourceFile); } } } }
public class class_name { private void appendSourceFilesFromSchemaResolver(ParseResult result) { for (Map.Entry<String, DataSchemaLocation> entry : _schemaResolver.nameToDataSchemaLocations().entrySet()) { final File sourceFile = entry.getValue().getSourceFile(); if (sourceFile != null) { result.getSourceFiles().add(sourceFile); // depends on control dependency: [if], data = [(sourceFile] } } } }
public class class_name { public ReadOnlyStyledDocument<PS, SEG, S> mapParagraphs(UnaryOperator<Paragraph<PS, SEG, S>> mapper) { int n = tree.getLeafCount(); List<Paragraph<PS, SEG, S>> pars = new ArrayList<>(n); for(int i = 0; i < n; ++i) { pars.add(mapper.apply(tree.getLeaf(i))); } return new ReadOnlyStyledDocument<>(pars); } }
public class class_name { public ReadOnlyStyledDocument<PS, SEG, S> mapParagraphs(UnaryOperator<Paragraph<PS, SEG, S>> mapper) { int n = tree.getLeafCount(); List<Paragraph<PS, SEG, S>> pars = new ArrayList<>(n); for(int i = 0; i < n; ++i) { pars.add(mapper.apply(tree.getLeaf(i))); // depends on control dependency: [for], data = [i] } return new ReadOnlyStyledDocument<>(pars); } }
public class class_name { public String encrypt(final String transform, final byte[] bytes) { String encodedJSON = null; if (secretKey == null) throw new OSecurityException("OSymmetricKey.encrypt() SecretKey is null"); if (transform == null) throw new OSecurityException("OSymmetricKey.encrypt() Cannot determine cipher transformation"); try { // Throws NoSuchAlgorithmException and NoSuchPaddingException. Cipher cipher = Cipher.getInstance(transform); // If the cipher transformation requires an initialization vector then init() will create a random one. // (Use cipher.getIV() to retrieve the IV, if it exists.) cipher.init(Cipher.ENCRYPT_MODE, secretKey); // If the cipher does not use an IV, this will be null. byte[] initVector = cipher.getIV(); // byte[] initVector = encCipher.getParameters().getParameterSpec(IvParameterSpec.class).getIV(); byte[] encrypted = cipher.doFinal(bytes); encodedJSON = encodeJSON(encrypted, initVector); } catch (Exception ex) { throw OException.wrapException(new OSecurityException("OSymmetricKey.encrypt() Exception: " + ex.getMessage()), ex); } return encodedJSON; } }
public class class_name { public String encrypt(final String transform, final byte[] bytes) { String encodedJSON = null; if (secretKey == null) throw new OSecurityException("OSymmetricKey.encrypt() SecretKey is null"); if (transform == null) throw new OSecurityException("OSymmetricKey.encrypt() Cannot determine cipher transformation"); try { // Throws NoSuchAlgorithmException and NoSuchPaddingException. Cipher cipher = Cipher.getInstance(transform); // If the cipher transformation requires an initialization vector then init() will create a random one. // (Use cipher.getIV() to retrieve the IV, if it exists.) cipher.init(Cipher.ENCRYPT_MODE, secretKey); // depends on control dependency: [try], data = [none] // If the cipher does not use an IV, this will be null. byte[] initVector = cipher.getIV(); // byte[] initVector = encCipher.getParameters().getParameterSpec(IvParameterSpec.class).getIV(); byte[] encrypted = cipher.doFinal(bytes); encodedJSON = encodeJSON(encrypted, initVector); // depends on control dependency: [try], data = [none] } catch (Exception ex) { throw OException.wrapException(new OSecurityException("OSymmetricKey.encrypt() Exception: " + ex.getMessage()), ex); } // depends on control dependency: [catch], data = [none] return encodedJSON; } }
public class class_name { public static final RuntimeException addSuppressed(RuntimeException original, Throwable suppressed) { if (original == suppressed) { return original; } if (original == REJECTED_EXECUTION || original == NOT_TIME_CAPABLE_REJECTED_EXECUTION) { RejectedExecutionException ree = new RejectedExecutionException(original.getMessage()); ree.addSuppressed(suppressed); return ree; } else { original.addSuppressed(suppressed); return original; } } }
public class class_name { public static final RuntimeException addSuppressed(RuntimeException original, Throwable suppressed) { if (original == suppressed) { return original; // depends on control dependency: [if], data = [none] } if (original == REJECTED_EXECUTION || original == NOT_TIME_CAPABLE_REJECTED_EXECUTION) { RejectedExecutionException ree = new RejectedExecutionException(original.getMessage()); ree.addSuppressed(suppressed); // depends on control dependency: [if], data = [none] return ree; // depends on control dependency: [if], data = [none] } else { original.addSuppressed(suppressed); // depends on control dependency: [if], data = [none] return original; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override protected Class resolveClass(ObjectStreamClass classDesc) throws IOException, ClassNotFoundException { for (ClassLoader loader : classLoaderRegistration) { try { return resolveClass(classDesc, loader); } catch (ClassNotFoundException e) { // ignore } } return super.resolveClass(classDesc); } }
public class class_name { @Override protected Class resolveClass(ObjectStreamClass classDesc) throws IOException, ClassNotFoundException { for (ClassLoader loader : classLoaderRegistration) { try { return resolveClass(classDesc, loader); // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException e) { // ignore } // depends on control dependency: [catch], data = [none] } return super.resolveClass(classDesc); } }
public class class_name { protected void makeCoordinateSystemsMaximal(NetcdfDataset ncDataset) { boolean requireCompleteCoordSys = !ncDataset.getEnhanceMode().contains(NetcdfDataset.Enhance.IncompleteCoordSystems); for (VarProcess vp : varList) { VariableEnhanced ve = (VariableEnhanced) vp.v; if (vp.hasCoordinateSystem() || !vp.isData()) continue; // look through all axes that fit List<CoordinateAxis> axisList = new ArrayList<>(); List<CoordinateAxis> axes = ncDataset.getCoordinateAxes(); for (CoordinateAxis axis : axes) { if (isCoordinateAxisForVariable(axis, ve)) axisList.add(axis); } if (axisList.size() < 2) continue; String csName = CoordinateSystem.makeName(axisList); CoordinateSystem cs = ncDataset.findCoordinateSystem(csName); boolean okToBuild = false; // do coordinate systems need to be complete? // default enhance mode is yes, they must be complete if (requireCompleteCoordSys) { if (cs != null) { // only build if coordinate system is complete okToBuild = cs.isComplete(ve); } } else { // coordinate system can be incomplete, so we're ok to build if we find something okToBuild = true; } if (cs != null && okToBuild) { ve.addCoordinateSystem(cs); parseInfo.format(" assigned maximal CoordSystem '%s' for var= %s%n", cs.getName(), ve.getFullName()); } else { CoordinateSystem csnew = new CoordinateSystem(ncDataset, axisList, null); // again, do coordinate systems need to be complete? // default enhance mode is yes, they must be complete if (requireCompleteCoordSys) { // only build if new coordinate system is complete okToBuild = csnew.isComplete(ve); } if (okToBuild) { csnew.setImplicit(true); ve.addCoordinateSystem(csnew); ncDataset.addCoordinateSystem(csnew); parseInfo.format(" created maximal CoordSystem '%s' for var= %s%n", csnew.getName(), ve.getFullName()); } } } } }
public class class_name { protected void makeCoordinateSystemsMaximal(NetcdfDataset ncDataset) { boolean requireCompleteCoordSys = !ncDataset.getEnhanceMode().contains(NetcdfDataset.Enhance.IncompleteCoordSystems); for (VarProcess vp : varList) { VariableEnhanced ve = (VariableEnhanced) vp.v; if (vp.hasCoordinateSystem() || !vp.isData()) continue; // look through all axes that fit List<CoordinateAxis> axisList = new ArrayList<>(); List<CoordinateAxis> axes = ncDataset.getCoordinateAxes(); for (CoordinateAxis axis : axes) { if (isCoordinateAxisForVariable(axis, ve)) axisList.add(axis); } if (axisList.size() < 2) continue; String csName = CoordinateSystem.makeName(axisList); CoordinateSystem cs = ncDataset.findCoordinateSystem(csName); boolean okToBuild = false; // do coordinate systems need to be complete? // default enhance mode is yes, they must be complete if (requireCompleteCoordSys) { if (cs != null) { // only build if coordinate system is complete okToBuild = cs.isComplete(ve); // depends on control dependency: [if], data = [none] } } else { // coordinate system can be incomplete, so we're ok to build if we find something okToBuild = true; // depends on control dependency: [if], data = [none] } if (cs != null && okToBuild) { ve.addCoordinateSystem(cs); // depends on control dependency: [if], data = [(cs] parseInfo.format(" assigned maximal CoordSystem '%s' for var= %s%n", cs.getName(), ve.getFullName()); // depends on control dependency: [if], data = [none] } else { CoordinateSystem csnew = new CoordinateSystem(ncDataset, axisList, null); // again, do coordinate systems need to be complete? // default enhance mode is yes, they must be complete if (requireCompleteCoordSys) { // only build if new coordinate system is complete okToBuild = csnew.isComplete(ve); // depends on control dependency: [if], data = [none] } if (okToBuild) { csnew.setImplicit(true); // depends on control dependency: [if], data = [none] ve.addCoordinateSystem(csnew); // depends on control dependency: [if], data = [none] ncDataset.addCoordinateSystem(csnew); // depends on control dependency: [if], data = [none] parseInfo.format(" created maximal CoordSystem '%s' for var= %s%n", csnew.getName(), ve.getFullName()); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public ThinPhysicalDatabaseParent getPDatabaseParent(Map<String,Object> properties, boolean bCreateIfNew) { if (m_PhysicalDatabaseParent == null) if (bCreateIfNew) { Map<String,Object> map = new Hashtable<String,Object>(); if (properties != null) map.putAll(properties); if (map.get(PhysicalDatabaseParent.APP) == null) map.put(PhysicalDatabaseParent.APP, this); // Access to the server, etc. m_PhysicalDatabaseParent = (ThinPhysicalDatabaseParent)ClassServiceUtility.getClassService().makeObjectFromClassName(Constants.ROOT_PACKAGE + "thin.base.db.mem.base.PhysicalDatabaseParent"); if (m_PhysicalDatabaseParent != null) m_PhysicalDatabaseParent.init(map); // Init } if (properties != null) { for (String strKey : properties.keySet()) { m_PhysicalDatabaseParent.setProperty(strKey, properties.get(strKey)); } } return m_PhysicalDatabaseParent; } }
public class class_name { public ThinPhysicalDatabaseParent getPDatabaseParent(Map<String,Object> properties, boolean bCreateIfNew) { if (m_PhysicalDatabaseParent == null) if (bCreateIfNew) { Map<String,Object> map = new Hashtable<String,Object>(); if (properties != null) map.putAll(properties); if (map.get(PhysicalDatabaseParent.APP) == null) map.put(PhysicalDatabaseParent.APP, this); // Access to the server, etc. m_PhysicalDatabaseParent = (ThinPhysicalDatabaseParent)ClassServiceUtility.getClassService().makeObjectFromClassName(Constants.ROOT_PACKAGE + "thin.base.db.mem.base.PhysicalDatabaseParent"); if (m_PhysicalDatabaseParent != null) m_PhysicalDatabaseParent.init(map); // Init // depends on control dependency: [if], data = [none] } if (properties != null) { for (String strKey : properties.keySet()) { m_PhysicalDatabaseParent.setProperty(strKey, properties.get(strKey)); // depends on control dependency: [for], data = [strKey] } } return m_PhysicalDatabaseParent; } }
public class class_name { private Queue<List<Data>> createBatchChunks(List<Data> keys) { Queue<List<Data>> chunks = new LinkedList<>(); int loadBatchSize = getLoadBatchSize(); int page = 0; List<Data> tmpKeys; while ((tmpKeys = getBatchChunk(keys, loadBatchSize, page++)) != null) { chunks.add(tmpKeys); } return chunks; } }
public class class_name { private Queue<List<Data>> createBatchChunks(List<Data> keys) { Queue<List<Data>> chunks = new LinkedList<>(); int loadBatchSize = getLoadBatchSize(); int page = 0; List<Data> tmpKeys; while ((tmpKeys = getBatchChunk(keys, loadBatchSize, page++)) != null) { chunks.add(tmpKeys); // depends on control dependency: [while], data = [none] } return chunks; } }
public class class_name { public static String decode(String path, String encoding) { try { return URLDecoder.decode(path, encoding); } catch (UnsupportedEncodingException e) { throw MESSAGES.cannotDecode(path,encoding,e); } } }
public class class_name { public static String decode(String path, String encoding) { try { return URLDecoder.decode(path, encoding); // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException e) { throw MESSAGES.cannotDecode(path,encoding,e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected TableConfig getTimeSeriesProfileConfig(NetcdfDataset ds, EncodingInfo info, Formatter errlog) throws IOException { if (!identifyEncodingTimeSeriesProfile(ds, info, CF.FeatureType.timeSeriesProfile, errlog)) return null; VariableDS time = CoordSysEvaluator.findCoordByType(ds, AxisType.Time); if (time == null) return null; if (time.getRank() == 0 && time.getParentStructure() == null) { errlog.format("CFpointObs timeSeriesProfile cannot have a scalar time coordinate%n"); // why ? return null; } /* distinguish multidim from flat if ((info.encoding == Encoding.multidim) && (time.getRank() < 3) && (z.getRank() < 3)) { Variable parentId = identifyParent(ds, CF.FeatureType.timeSeriesProfile); if ((parentId != null) && (parentId.getRank() == 1) && (parentId.getDimension(0).equals(time.getDimension(0)))) { if (time.getRank() == 1) // multidim time must be 2 or 3 dim info = new EncodingInfo(Encoding.flat, parentId); else if (time.getRank() == 2) { Dimension zDim = z.getDimension(z.getRank() - 1); // may be z(z) or z(profile, z) if (zDim.equals(time.getDimension(1))) // flat 2D time will have time as inner dim info = new EncodingInfo(Encoding.flat, parentId); } } } */ TableConfig stationTable = makeStationTable(ds, FeatureType.STATION_PROFILE, info, errlog); if (stationTable == null) return null; //Dimension stationDim = ds.findDimension(stationTable.dimName); //Dimension profileDim = null; //Dimension zDim = null; VariableDS z = info.alt; switch (info.encoding) { case single: { assert ((time.getRank() >= 1) && (time.getRank() <= 2)) : "time must be rank 1 or 2"; assert ((z.getRank() >= 1) && (z.getRank() <= 2)) : "z must be rank 1 or 2"; if (time.getRank() == 2) { if (z.getRank() == 2) // 2d time, 2d z assert time.getDimensions().equals(z.getDimensions()) : "rank-2 time and z dimensions must be the same"; else // 2d time, 1d z assert time.getDimension(1).equals(z.getDimension(0)) : "rank-2 time must have z inner dimension"; //profileDim = time.getDimension(0); //zDim = time.getDimension(1); } else { // 1d time if (z.getRank() == 2) { // 1d time, 2d z assert z.getDimension(0).equals(time.getDimension(0)) : "rank-2 z must have time outer dimension"; //profileDim = z.getDimension(0); //zDim = z.getDimension(1); } else { // 1d time, 1d z assert !time.getDimension(0).equals(z.getDimension(0)) : "time and z dimensions must be different"; //profileDim = time.getDimension(0); //zDim = z.getDimension(0); } } // make profile table TableConfig profileTable = makeStructTable(ds, FeatureType.PROFILE, new EncodingInfo().set(Encoding.multidim, info.childDim), errlog); if (profileTable == null) return null; if (time.getRank() == 1) {// join time(time) //profileTable.addJoin(new JoinArray(time, JoinArray.Type.raw, 0)); profileTable.addJoin(new JoinArray(time, JoinArray.Type.level, 1)); profileTable.time = time.getFullName(); } stationTable.addChild(profileTable); // make the inner (z) table TableConfig zTable = makeMultidimInner(ds, profileTable, info.grandChildDim, info, errlog); if (z.getRank() == 1) { // join z(z) zTable.addJoin(new JoinArray(z, JoinArray.Type.raw, 0)); zTable.elev = z.getFullName(); } profileTable.addChild(zTable); break; } case multidim: { assert ((time.getRank() >= 1) && (time.getRank() <= 3)) : "time must be rank 2 or 3"; assert ((z.getRank() == 1) || (z.getRank() == 3)) : "z must be rank 1 or 3"; if (time.getRank() == 3) { if (z.getRank() == 3) // 3d time, 3d z assert time.getDimensions().equals(z.getDimensions()) : "rank-3 time and z dimensions must be the same"; else // 3d time, 1d z assert time.getDimension(2).equals(z.getDimension(0)) : "rank-3 time must have z inner dimension"; //profileDim = time.getDimension(1); //zDim = time.getDimension(2); } else if (time.getRank() == 2) { // 2d time if (z.getRank() == 3) { // 2d time, 3d z assert z.getDimension(1).equals(time.getDimension(1)) : "rank-2 time must have time inner dimension"; //profileDim = z.getDimension(1); //zDim = z.getDimension(2); } else { // 2d time, 1d z assert !time.getDimension(0).equals(z.getDimension(0)) : "time and z dimensions must be different"; assert !time.getDimension(1).equals(z.getDimension(0)) : "time and z dimensions must be different"; //profileDim = time.getDimension(1); //zDim = z.getDimension(0); } } else { // 1d time if (z.getRank() == 1) { assert !time.getDimension(0).equals(z.getDimension(0)) : "time and z dimensions must be different"; } } TableConfig profileTable = makeMultidimInner(ds, stationTable, info.childDim, info, errlog); if (profileTable == null) return null; if (time.getRank() == 1) {// join time(time) profileTable.addJoin(new JoinArray(time, JoinArray.Type.level, 1)); profileTable.time = time.getFullName(); } stationTable.addChild(profileTable); // make the inner (z) table TableConfig zTable = makeMultidimInner3D(ds, stationTable, profileTable, info.grandChildDim, errlog); if (z.getRank() == 1) { // join z(z) zTable.addJoin(new JoinArray(z, JoinArray.Type.raw, 0)); zTable.elev = z.getFullName(); } profileTable.addChild(zTable); break; } case raggedIndex: { TableConfig profileTable = makeRaggedIndexChildTable(ds, info.parentDim, info.childDim, info.ragged_parentIndex, errlog); stationTable.addChild(profileTable); profileTable.numRecords = info.ragged_rowSize.getFullName(); TableConfig obsTable = makeRaggedContiguousChildTable(ds, info.childDim, info.grandChildDim, info.grandChildStruct, errlog); profileTable.addChild(obsTable); break; } case raggedContiguous: // NOT USED throw new UnsupportedOperationException("CFpointObs: timeSeriesProfile raggedContiguous encoding not allowed"); /* case flat: //profileDim = time.getDimension(0); // may be time(profile) or time(profile, z) Variable parentId = identifyParent(ds, CF.FeatureType.timeSeriesProfile); TableConfig profileTable = makeStructTable(ds, FeatureType.PROFILE, info, errlog); profileTable.parentIndex = parentId.getName(); profileTable.stnId = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_ID, info.childDim, errlog); profileTable.stnDesc = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_DESC, info.childDim, errlog); profileTable.stnWmoId = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_WMOID, info.childDim, errlog); profileTable.stnAlt = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_ALTITUDE, info.childDim, errlog); stationTable.addChild(profileTable); //zDim = z.getDimension(z.getRank() - 1); // may be z(z) or z(profile, z) TableConfig zTable = makeMultidimInner(ds, profileTable, info.grandChildDim, errlog); if (z.getRank() == 1) // z(z) zTable.addJoin(new JoinArray(z, JoinArray.Type.raw, 0)); profileTable.addChild(zTable); break; */ } return stationTable; } }
public class class_name { protected TableConfig getTimeSeriesProfileConfig(NetcdfDataset ds, EncodingInfo info, Formatter errlog) throws IOException { if (!identifyEncodingTimeSeriesProfile(ds, info, CF.FeatureType.timeSeriesProfile, errlog)) return null; VariableDS time = CoordSysEvaluator.findCoordByType(ds, AxisType.Time); if (time == null) return null; if (time.getRank() == 0 && time.getParentStructure() == null) { errlog.format("CFpointObs timeSeriesProfile cannot have a scalar time coordinate%n"); // why ? return null; } /* distinguish multidim from flat if ((info.encoding == Encoding.multidim) && (time.getRank() < 3) && (z.getRank() < 3)) { Variable parentId = identifyParent(ds, CF.FeatureType.timeSeriesProfile); if ((parentId != null) && (parentId.getRank() == 1) && (parentId.getDimension(0).equals(time.getDimension(0)))) { if (time.getRank() == 1) // multidim time must be 2 or 3 dim info = new EncodingInfo(Encoding.flat, parentId); else if (time.getRank() == 2) { Dimension zDim = z.getDimension(z.getRank() - 1); // may be z(z) or z(profile, z) if (zDim.equals(time.getDimension(1))) // flat 2D time will have time as inner dim info = new EncodingInfo(Encoding.flat, parentId); } } } */ TableConfig stationTable = makeStationTable(ds, FeatureType.STATION_PROFILE, info, errlog); if (stationTable == null) return null; //Dimension stationDim = ds.findDimension(stationTable.dimName); //Dimension profileDim = null; //Dimension zDim = null; VariableDS z = info.alt; switch (info.encoding) { case single: { assert ((time.getRank() >= 1) && (time.getRank() <= 2)) : "time must be rank 1 or 2"; assert ((z.getRank() >= 1) && (z.getRank() <= 2)) : "z must be rank 1 or 2"; if (time.getRank() == 2) { if (z.getRank() == 2) // 2d time, 2d z assert time.getDimensions().equals(z.getDimensions()) : "rank-2 time and z dimensions must be the same"; // depends on control dependency: [if], data = [none] else // 2d time, 1d z assert time.getDimension(1).equals(z.getDimension(0)) : "rank-2 time must have z inner dimension"; // depends on control dependency: [if], data = [none] //profileDim = time.getDimension(0); //zDim = time.getDimension(1); } else { // 1d time if (z.getRank() == 2) { // 1d time, 2d z assert z.getDimension(0).equals(time.getDimension(0)) : "rank-2 z must have time outer dimension"; // depends on control dependency: [if], data = [none] //profileDim = z.getDimension(0); //zDim = z.getDimension(1); } else { // 1d time, 1d z assert !time.getDimension(0).equals(z.getDimension(0)) : "time and z dimensions must be different"; //profileDim = time.getDimension(0); //zDim = z.getDimension(0); } } // make profile table TableConfig profileTable = makeStructTable(ds, FeatureType.PROFILE, new EncodingInfo().set(Encoding.multidim, info.childDim), errlog); if (profileTable == null) return null; if (time.getRank() == 1) {// join time(time) //profileTable.addJoin(new JoinArray(time, JoinArray.Type.raw, 0)); profileTable.addJoin(new JoinArray(time, JoinArray.Type.level, 1)); // depends on control dependency: [if], data = [1)] profileTable.time = time.getFullName(); // depends on control dependency: [if], data = [none] } stationTable.addChild(profileTable); // make the inner (z) table TableConfig zTable = makeMultidimInner(ds, profileTable, info.grandChildDim, info, errlog); if (z.getRank() == 1) { // join z(z) zTable.addJoin(new JoinArray(z, JoinArray.Type.raw, 0)); // depends on control dependency: [if], data = [none] zTable.elev = z.getFullName(); // depends on control dependency: [if], data = [none] } profileTable.addChild(zTable); break; } case multidim: { assert ((time.getRank() >= 1) && (time.getRank() <= 3)) : "time must be rank 2 or 3"; assert ((z.getRank() == 1) || (z.getRank() == 3)) : "z must be rank 1 or 3"; if (time.getRank() == 3) { if (z.getRank() == 3) // 3d time, 3d z assert time.getDimensions().equals(z.getDimensions()) : "rank-3 time and z dimensions must be the same"; else // 3d time, 1d z assert time.getDimension(2).equals(z.getDimension(0)) : "rank-3 time must have z inner dimension"; //profileDim = time.getDimension(1); //zDim = time.getDimension(2); } else if (time.getRank() == 2) { // 2d time if (z.getRank() == 3) { // 2d time, 3d z assert z.getDimension(1).equals(time.getDimension(1)) : "rank-2 time must have time inner dimension"; //profileDim = z.getDimension(1); //zDim = z.getDimension(2); } else { // 2d time, 1d z assert !time.getDimension(0).equals(z.getDimension(0)) : "time and z dimensions must be different"; assert !time.getDimension(1).equals(z.getDimension(0)) : "time and z dimensions must be different"; //profileDim = time.getDimension(1); //zDim = z.getDimension(0); } } else { // 1d time if (z.getRank() == 1) { assert !time.getDimension(0).equals(z.getDimension(0)) : "time and z dimensions must be different"; } } TableConfig profileTable = makeMultidimInner(ds, stationTable, info.childDim, info, errlog); if (profileTable == null) return null; if (time.getRank() == 1) {// join time(time) profileTable.addJoin(new JoinArray(time, JoinArray.Type.level, 1)); profileTable.time = time.getFullName(); } stationTable.addChild(profileTable); // make the inner (z) table TableConfig zTable = makeMultidimInner3D(ds, stationTable, profileTable, info.grandChildDim, errlog); if (z.getRank() == 1) { // join z(z) zTable.addJoin(new JoinArray(z, JoinArray.Type.raw, 0)); zTable.elev = z.getFullName(); } profileTable.addChild(zTable); break; } case raggedIndex: { TableConfig profileTable = makeRaggedIndexChildTable(ds, info.parentDim, info.childDim, info.ragged_parentIndex, errlog); stationTable.addChild(profileTable); profileTable.numRecords = info.ragged_rowSize.getFullName(); TableConfig obsTable = makeRaggedContiguousChildTable(ds, info.childDim, info.grandChildDim, info.grandChildStruct, errlog); profileTable.addChild(obsTable); break; } case raggedContiguous: // NOT USED throw new UnsupportedOperationException("CFpointObs: timeSeriesProfile raggedContiguous encoding not allowed"); /* case flat: //profileDim = time.getDimension(0); // may be time(profile) or time(profile, z) Variable parentId = identifyParent(ds, CF.FeatureType.timeSeriesProfile); TableConfig profileTable = makeStructTable(ds, FeatureType.PROFILE, info, errlog); profileTable.parentIndex = parentId.getName(); profileTable.stnId = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_ID, info.childDim, errlog); profileTable.stnDesc = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_DESC, info.childDim, errlog); profileTable.stnWmoId = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_WMOID, info.childDim, errlog); profileTable.stnAlt = findNameVariableWithStandardNameAndDimension(ds, CF.STATION_ALTITUDE, info.childDim, errlog); stationTable.addChild(profileTable); //zDim = z.getDimension(z.getRank() - 1); // may be z(z) or z(profile, z) TableConfig zTable = makeMultidimInner(ds, profileTable, info.grandChildDim, errlog); if (z.getRank() == 1) // z(z) zTable.addJoin(new JoinArray(z, JoinArray.Type.raw, 0)); profileTable.addChild(zTable); break; */ } return stationTable; } }
public class class_name { public static void beginSfsbCreation() { SFSBCallStackThreadData data = CURRENT.get(); int no = data.creationBeanNestingLevel; if (no == 0) { data.creationTimeXPCRegistration = new HashMap<String, ExtendedEntityManager>(); // create new tracking structure (passing in parent levels tracking structure or null if toplevel) data.creationTimeInjectedXPCs = new SFSBInjectedXPCs(data.creationTimeInjectedXPCs, null); } else { // create new tracking structure (passing in parent levels tracking structure or null if toplevel) SFSBInjectedXPCs parent = data.creationTimeInjectedXPCs; data.creationTimeInjectedXPCs = new SFSBInjectedXPCs(parent, parent.getTopLevel()); } data.creationBeanNestingLevel++; } }
public class class_name { public static void beginSfsbCreation() { SFSBCallStackThreadData data = CURRENT.get(); int no = data.creationBeanNestingLevel; if (no == 0) { data.creationTimeXPCRegistration = new HashMap<String, ExtendedEntityManager>(); // depends on control dependency: [if], data = [none] // create new tracking structure (passing in parent levels tracking structure or null if toplevel) data.creationTimeInjectedXPCs = new SFSBInjectedXPCs(data.creationTimeInjectedXPCs, null); // depends on control dependency: [if], data = [none] } else { // create new tracking structure (passing in parent levels tracking structure or null if toplevel) SFSBInjectedXPCs parent = data.creationTimeInjectedXPCs; data.creationTimeInjectedXPCs = new SFSBInjectedXPCs(parent, parent.getTopLevel()); // depends on control dependency: [if], data = [none] } data.creationBeanNestingLevel++; } }
public class class_name { public void onBindCell(int offset, int position, boolean showFromEnd) { if (!mIsExposed && serviceManager != null) { ExposureSupport exposureSupport = serviceManager.getService(ExposureSupport.class); if (exposureSupport != null) { mIsExposed = true; exposureSupport.onExposure(this, offset, position); } } } }
public class class_name { public void onBindCell(int offset, int position, boolean showFromEnd) { if (!mIsExposed && serviceManager != null) { ExposureSupport exposureSupport = serviceManager.getService(ExposureSupport.class); if (exposureSupport != null) { mIsExposed = true; // depends on control dependency: [if], data = [none] exposureSupport.onExposure(this, offset, position); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public CmsSite getSite(String rootPath, String fallbackSiteRoot) { CmsSite result = getSiteForRootPath(rootPath); if (result == null) { result = getSiteForSiteRoot(fallbackSiteRoot); if (result == null) { result = getDefaultSite(); } } return result; } }
public class class_name { public CmsSite getSite(String rootPath, String fallbackSiteRoot) { CmsSite result = getSiteForRootPath(rootPath); if (result == null) { result = getSiteForSiteRoot(fallbackSiteRoot); // depends on control dependency: [if], data = [none] if (result == null) { result = getDefaultSite(); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { protected final int addNode(int type, int expandedTypeID, int parentIndex, int previousSibling, int dataOrPrefix, boolean canHaveFirstChild) { // Common to all nodes: int nodeIndex = m_size++; // Have we overflowed a DTM Identity's addressing range? //if(m_dtmIdent.size() == (nodeIndex>>>DTMManager.IDENT_DTM_NODE_BITS)) if (nodeIndex == m_maxNodeIndex) { addNewDTMID(nodeIndex); m_maxNodeIndex += (1 << DTMManager.IDENT_DTM_NODE_BITS); } m_firstch.addElement(DTM.NULL); m_nextsib.addElement(DTM.NULL); m_parent.addElement(parentIndex); m_exptype.addElement(expandedTypeID); m_dataOrQName.addElement(dataOrPrefix); if (m_prevsib != null) { m_prevsib.addElement(previousSibling); } if (m_locator != null && m_useSourceLocationProperty) { setSourceLocation(); } // Note that nextSibling is not processed until charactersFlush() // is called, to handle successive characters() events. // Special handling by type: Declare namespaces, attach first child switch(type) { case DTM.NAMESPACE_NODE: declareNamespaceInContext(parentIndex,nodeIndex); break; case DTM.ATTRIBUTE_NODE: break; default: if (DTM.NULL != previousSibling) { m_nextsib.setElementAt(nodeIndex,previousSibling); } else if (DTM.NULL != parentIndex) { m_firstch.setElementAt(nodeIndex,parentIndex); } break; } return nodeIndex; } }
public class class_name { protected final int addNode(int type, int expandedTypeID, int parentIndex, int previousSibling, int dataOrPrefix, boolean canHaveFirstChild) { // Common to all nodes: int nodeIndex = m_size++; // Have we overflowed a DTM Identity's addressing range? //if(m_dtmIdent.size() == (nodeIndex>>>DTMManager.IDENT_DTM_NODE_BITS)) if (nodeIndex == m_maxNodeIndex) { addNewDTMID(nodeIndex); // depends on control dependency: [if], data = [(nodeIndex] m_maxNodeIndex += (1 << DTMManager.IDENT_DTM_NODE_BITS); // depends on control dependency: [if], data = [none] } m_firstch.addElement(DTM.NULL); m_nextsib.addElement(DTM.NULL); m_parent.addElement(parentIndex); m_exptype.addElement(expandedTypeID); m_dataOrQName.addElement(dataOrPrefix); if (m_prevsib != null) { m_prevsib.addElement(previousSibling); // depends on control dependency: [if], data = [none] } if (m_locator != null && m_useSourceLocationProperty) { setSourceLocation(); // depends on control dependency: [if], data = [none] } // Note that nextSibling is not processed until charactersFlush() // is called, to handle successive characters() events. // Special handling by type: Declare namespaces, attach first child switch(type) { case DTM.NAMESPACE_NODE: declareNamespaceInContext(parentIndex,nodeIndex); break; case DTM.ATTRIBUTE_NODE: break; default: if (DTM.NULL != previousSibling) { m_nextsib.setElementAt(nodeIndex,previousSibling); // depends on control dependency: [if], data = [previousSibling)] } else if (DTM.NULL != parentIndex) { m_firstch.setElementAt(nodeIndex,parentIndex); // depends on control dependency: [if], data = [parentIndex)] } break; } return nodeIndex; } }
public class class_name { public void remove(Com4jObject obj) { ListIterator<WeakReference<Com4jObject>> itr = objects.listIterator(); while(itr.hasNext()) { Com4jObject o = itr.next().get(); if(o == obj) { //Intentional identity compare...each Wrapper instance owns a single ref. itr.remove(); break; } } } }
public class class_name { public void remove(Com4jObject obj) { ListIterator<WeakReference<Com4jObject>> itr = objects.listIterator(); while(itr.hasNext()) { Com4jObject o = itr.next().get(); if(o == obj) { //Intentional identity compare...each Wrapper instance owns a single ref. itr.remove(); // depends on control dependency: [if], data = [none] break; } } } }
public class class_name { public void printInferredDependencies(ClassDoc c) { if (hidden(c)) return; Options opt = optionProvider.getOptionsFor(c); Set<Type> types = new HashSet<Type>(); // harvest method return and parameter types for (MethodDoc method : filterByVisibility(c.methods(false), opt.inferDependencyVisibility)) { types.add(method.returnType()); for (Parameter parameter : method.parameters()) { types.add(parameter.type()); } } // and the field types if (!opt.inferRelationships) { for (FieldDoc field : filterByVisibility(c.fields(false), opt.inferDependencyVisibility)) { types.add(field.type()); } } // see if there are some type parameters if (c.asParameterizedType() != null) { ParameterizedType pt = c.asParameterizedType(); types.addAll(Arrays.asList(pt.typeArguments())); } // see if type parameters extend something for(TypeVariable tv: c.typeParameters()) { if(tv.bounds().length > 0 ) types.addAll(Arrays.asList(tv.bounds())); } // and finally check for explicitly imported classes (this // assumes there are no unused imports...) if (opt.useImports) types.addAll(Arrays.asList(importedClasses(c))); // compute dependencies for (Type type : types) { // skip primitives and type variables, as well as dependencies // on the source class if (type.isPrimitive() || type instanceof WildcardType || type instanceof TypeVariable || c.toString().equals(type.asClassDoc().toString())) continue; // check if the destination is excluded from inference ClassDoc fc = type.asClassDoc(); if (hidden(fc)) continue; // check if source and destination are in the same package and if we are allowed // to infer dependencies between classes in the same package if(!opt.inferDepInPackage && c.containingPackage().equals(fc.containingPackage())) continue; // if source and dest are not already linked, add a dependency RelationPattern rp = getClassInfo(c, true).getRelation(fc.toString()); if (rp == null || rp.matchesOne(new RelationPattern(RelationDirection.OUT))) { relation(opt, RelationType.DEPEND, c, fc, "", "", ""); } } } }
public class class_name { public void printInferredDependencies(ClassDoc c) { if (hidden(c)) return; Options opt = optionProvider.getOptionsFor(c); Set<Type> types = new HashSet<Type>(); // harvest method return and parameter types for (MethodDoc method : filterByVisibility(c.methods(false), opt.inferDependencyVisibility)) { types.add(method.returnType()); // depends on control dependency: [for], data = [method] for (Parameter parameter : method.parameters()) { types.add(parameter.type()); // depends on control dependency: [for], data = [parameter] } } // and the field types if (!opt.inferRelationships) { for (FieldDoc field : filterByVisibility(c.fields(false), opt.inferDependencyVisibility)) { types.add(field.type()); // depends on control dependency: [for], data = [field] } } // see if there are some type parameters if (c.asParameterizedType() != null) { ParameterizedType pt = c.asParameterizedType(); types.addAll(Arrays.asList(pt.typeArguments())); // depends on control dependency: [if], data = [none] } // see if type parameters extend something for(TypeVariable tv: c.typeParameters()) { if(tv.bounds().length > 0 ) types.addAll(Arrays.asList(tv.bounds())); } // and finally check for explicitly imported classes (this // assumes there are no unused imports...) if (opt.useImports) types.addAll(Arrays.asList(importedClasses(c))); // compute dependencies for (Type type : types) { // skip primitives and type variables, as well as dependencies // on the source class if (type.isPrimitive() || type instanceof WildcardType || type instanceof TypeVariable || c.toString().equals(type.asClassDoc().toString())) continue; // check if the destination is excluded from inference ClassDoc fc = type.asClassDoc(); if (hidden(fc)) continue; // check if source and destination are in the same package and if we are allowed // to infer dependencies between classes in the same package if(!opt.inferDepInPackage && c.containingPackage().equals(fc.containingPackage())) continue; // if source and dest are not already linked, add a dependency RelationPattern rp = getClassInfo(c, true).getRelation(fc.toString()); if (rp == null || rp.matchesOne(new RelationPattern(RelationDirection.OUT))) { relation(opt, RelationType.DEPEND, c, fc, "", "", ""); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static String[] translatePathName(String path) { path = prettifyPath(path); if (path.indexOf('/') != 0) path = '/' + path; int index = path.lastIndexOf('/'); // remove slash at the end if (index == path.length() - 1) path = path.substring(0, path.length() - 1); index = path.lastIndexOf('/'); String name; if (index == -1) { name = path; path = "/"; } else { name = path.substring(index + 1); path = path.substring(0, index + 1); } return new String[] { path, name }; } }
public class class_name { public static String[] translatePathName(String path) { path = prettifyPath(path); if (path.indexOf('/') != 0) path = '/' + path; int index = path.lastIndexOf('/'); // remove slash at the end if (index == path.length() - 1) path = path.substring(0, path.length() - 1); index = path.lastIndexOf('/'); String name; if (index == -1) { name = path; // depends on control dependency: [if], data = [none] path = "/"; // depends on control dependency: [if], data = [none] } else { name = path.substring(index + 1); // depends on control dependency: [if], data = [(index] path = path.substring(0, index + 1); // depends on control dependency: [if], data = [none] } return new String[] { path, name }; } }
public class class_name { public void saveValue(ComponentParent recordOwner) { if (recordOwner != null) { BaseField field = this.getOwner(); String strCommand = ((ComponentParent)recordOwner).getParentScreen().popHistory(1, false); if (m_recordOwnerCache != null) if (strCommand != null) if (strCommand.indexOf(m_recordOwnerCache.getClass().getName()) != -1) { // Yes this is the command to open this window Map<String,Object> properties = new Hashtable<String,Object>(); Util.parseArgs(properties, strCommand); properties.put(field.getFieldName(), field.toString()); strCommand = Utility.propertiesToURL(null, properties); } ((ComponentParent)recordOwner).getParentScreen().pushHistory(strCommand, false); } } }
public class class_name { public void saveValue(ComponentParent recordOwner) { if (recordOwner != null) { BaseField field = this.getOwner(); String strCommand = ((ComponentParent)recordOwner).getParentScreen().popHistory(1, false); if (m_recordOwnerCache != null) if (strCommand != null) if (strCommand.indexOf(m_recordOwnerCache.getClass().getName()) != -1) { // Yes this is the command to open this window Map<String,Object> properties = new Hashtable<String,Object>(); Util.parseArgs(properties, strCommand); // depends on control dependency: [if], data = [none] properties.put(field.getFieldName(), field.toString()); // depends on control dependency: [if], data = [none] strCommand = Utility.propertiesToURL(null, properties); // depends on control dependency: [if], data = [none] } ((ComponentParent)recordOwner).getParentScreen().pushHistory(strCommand, false); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected void ensureAIsLowerPoint() { CoordinateSystem3D cs = CoordinateSystem3D.getDefaultCoordinateSystem(); boolean swap = false; if (cs.isZOnUp()) { swap = (this.getMedial1().getZ() > this.getMedial2().getZ()); } else if (cs.isYOnUp()){ swap = (this.getMedial1().getY() > this.getMedial2().getY()); } if (swap) { double x = this.getMedial1().getX(); double y = this.getMedial1().getY(); double z = this.getMedial1().getZ(); this.getMedial1().set(this.getMedial2()); this.getMedial2().set(x, y, z); } } }
public class class_name { protected void ensureAIsLowerPoint() { CoordinateSystem3D cs = CoordinateSystem3D.getDefaultCoordinateSystem(); boolean swap = false; if (cs.isZOnUp()) { swap = (this.getMedial1().getZ() > this.getMedial2().getZ()); // depends on control dependency: [if], data = [none] } else if (cs.isYOnUp()){ swap = (this.getMedial1().getY() > this.getMedial2().getY()); // depends on control dependency: [if], data = [none] } if (swap) { double x = this.getMedial1().getX(); double y = this.getMedial1().getY(); double z = this.getMedial1().getZ(); this.getMedial1().set(this.getMedial2()); // depends on control dependency: [if], data = [none] this.getMedial2().set(x, y, z); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static double dateTime2gps( double dateTimeMillis ) { dateTimeMillis = dateTimeMillis / 1000; double ipart = Math.floor(dateTimeMillis); double fpart = dateTimeMillis % 1; double gpsTime = ipart - 315964800; if (isunixtimeleap(Math.ceil(dateTimeMillis))) { fpart *= 2; } return gpsTime + fpart + countleaps(gpsTime, true); } }
public class class_name { public static double dateTime2gps( double dateTimeMillis ) { dateTimeMillis = dateTimeMillis / 1000; double ipart = Math.floor(dateTimeMillis); double fpart = dateTimeMillis % 1; double gpsTime = ipart - 315964800; if (isunixtimeleap(Math.ceil(dateTimeMillis))) { fpart *= 2; // depends on control dependency: [if], data = [none] } return gpsTime + fpart + countleaps(gpsTime, true); } }
public class class_name { void registerReceiver() { if (receiver == null) { receiver = new NetworkReceiver(new Listener() { @Override public void post(NetworkChangeEvent event) { if (onNetworkChangedListener != null) { onNetworkChangedListener.onChanged(event); } } }); } final Context context = contextRef.get(); if (context != null) { IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); filter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED); context.registerReceiver(receiver, filter); } } }
public class class_name { void registerReceiver() { if (receiver == null) { receiver = new NetworkReceiver(new Listener() { @Override public void post(NetworkChangeEvent event) { if (onNetworkChangedListener != null) { onNetworkChangedListener.onChanged(event); // depends on control dependency: [if], data = [none] } } }); // depends on control dependency: [if], data = [none] } final Context context = contextRef.get(); if (context != null) { IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); // depends on control dependency: [if], data = [none] filter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED); // depends on control dependency: [if], data = [none] context.registerReceiver(receiver, filter); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void setData(byte[] data) { // There are always two bytes of payload in the message -- the // slave ID and the run status indicator. if (data == null) { m_length = 2; m_data = new byte[0]; return; } if (data.length > 249) { throw new IllegalArgumentException("data length limit exceeded"); } m_length = data.length + 2; m_data = new byte[data.length]; System.arraycopy(data, 0, m_data, 0, data.length); } }
public class class_name { public void setData(byte[] data) { // There are always two bytes of payload in the message -- the // slave ID and the run status indicator. if (data == null) { m_length = 2; // depends on control dependency: [if], data = [none] m_data = new byte[0]; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (data.length > 249) { throw new IllegalArgumentException("data length limit exceeded"); } m_length = data.length + 2; m_data = new byte[data.length]; System.arraycopy(data, 0, m_data, 0, data.length); } }
public class class_name { public void marshall(ListPipelineExecutionsRequest listPipelineExecutionsRequest, ProtocolMarshaller protocolMarshaller) { if (listPipelineExecutionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listPipelineExecutionsRequest.getPipelineName(), PIPELINENAME_BINDING); protocolMarshaller.marshall(listPipelineExecutionsRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(listPipelineExecutionsRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ListPipelineExecutionsRequest listPipelineExecutionsRequest, ProtocolMarshaller protocolMarshaller) { if (listPipelineExecutionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listPipelineExecutionsRequest.getPipelineName(), PIPELINENAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listPipelineExecutionsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listPipelineExecutionsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public GetMembersRequest withAccountIds(String... accountIds) { if (this.accountIds == null) { setAccountIds(new java.util.ArrayList<String>(accountIds.length)); } for (String ele : accountIds) { this.accountIds.add(ele); } return this; } }
public class class_name { public GetMembersRequest withAccountIds(String... accountIds) { if (this.accountIds == null) { setAccountIds(new java.util.ArrayList<String>(accountIds.length)); // depends on control dependency: [if], data = [none] } for (String ele : accountIds) { this.accountIds.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public JSONObject insert(Map<String, String> params) throws Exception { OperationAccess operationAccess = tableSchema.getInsertAccess(); if( false == operationAccess.isAllowed() ) { throw new Exception("Attempting to insert a record while the privilege is not allowed: "+tableSchema.getLogicalName()+" ("+tableSchema.getPhysicalName()+")"); } // Create a list of parameters to retrieve the row after it is // inserted List<RecordSelector> whereClauses = new Vector<RecordSelector>(); // Create a list of all writable columns where a value is specified in // the parameters List<ColumnData> columnsWithParam = new Vector<ColumnData>(); for(String columnName : params.keySet()) { ColumnData columnData = tableSchema.getColumnFromName(columnName); if( null != columnData && false == columnData.isWriteable() ) { columnData = null; } if( null != columnData && columnData.isAutoIncrementInteger() ) { columnData = null; } if( null == columnData ) { throw new Exception("No write access to column "+columnName+" in table "+tableSchema.getLogicalName()+" ("+tableSchema.getPhysicalName()+")"); } else { columnsWithParam.add(columnData); } } // Get all columns that are auto fill List<ColumnData> autoIncrementIntegerColumns = new Vector<ColumnData>(); for( ColumnData columnData : tableSchema.getColumns() ) { if( columnData.isAutoIncrementInteger() ) { autoIncrementIntegerColumns.add(columnData); } } // Get all columns that are assigned a value on insert List<ColumnData> valueAssignedColumns = new Vector<ColumnData>(); for( ColumnData columnData : tableSchema.getColumns() ) { if( null != columnData.getAssignValueOnInsert() ) { valueAssignedColumns.add(columnData); } else if( null != columnData.getAssignVariableOnInsert() ) { valueAssignedColumns.add(columnData); } } // Sort according to column name. This offers greater reusability // of the prepared statement. Collections.sort(autoIncrementIntegerColumns,new ColumnDataComparator()); Collections.sort(columnsWithParam,new ColumnDataComparator()); Collections.sort(valueAssignedColumns,new ColumnDataComparator()); // Obtain all auto increment integers List<Integer> autoIncrementIntegerValues = new Vector<Integer>(); for( ColumnData autoIncrementIntegerColumn : autoIncrementIntegerColumns ) { int nextValue = ColumnDataUtils.obtainNextIncrementInteger(connection, autoIncrementIntegerColumn); Integer value = new Integer(nextValue); autoIncrementIntegerValues.add( value ); whereClauses.add( new RecordSelectorComparison( autoIncrementIntegerColumn.getColumnName() ,RecordSelectorComparison.Comparison.EQUAL ,new ExpressionConstantImpl(value.toString()) ) ); } // Create SQL command String sqlQuery = null; PreparedStatement pstmt = null; { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); pw.print("INSERT INTO "); pw.print(tableSchema.getPhysicalName()); pw.print(" ("); boolean first = true; for(ColumnData columnData : autoIncrementIntegerColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getColumnName()); } for(ColumnData columnData : columnsWithParam) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getColumnName()); } for(ColumnData columnData : valueAssignedColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getColumnName()); } pw.print(") VALUES ("); first = true; for(ColumnData columnData : autoIncrementIntegerColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getInsertWildcard()); } for(ColumnData columnData : columnsWithParam) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getInsertWildcard()); } for(ColumnData columnData : valueAssignedColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getInsertWildcard()); } pw.print(");"); pw.flush(); sqlQuery = sw.toString(); pstmt = connection.prepareStatement(sqlQuery); // Populate prepared statement int index = 1; for(Integer integerValue : autoIncrementIntegerValues) { pstmt.setInt(index, integerValue.intValue()); ++index; } for(ColumnData columnData : columnsWithParam) { // Compute value String value = params.get(columnData.getColumnName()); ColumnDataUtils.writeToPreparedStatement(pstmt, index, value, columnData.getColumnType()); ++index; } for(ColumnData columnData : valueAssignedColumns) { String value = columnData.getAssignValueOnInsert(); if( null == value && null != columnData.getAssignVariableOnInsert() ) { value = variables.getVariableValue( columnData.getAssignVariableOnInsert() ); } ColumnDataUtils.writeToPreparedStatement(pstmt, index, value, columnData.getColumnType()); ++index; } } // If there are no selector, there is no point in inserting the data since // we will not be able to retrieve it if( whereClauses.size() < 1 ) { throw new Exception("Refusing to insert data since it can not be selected: "+sqlQuery); } // Execute insert pstmt.execute(); // Now, we need to retrieve the object JSONArray array = query(whereClauses, null, null,null,null,null); // In INSERT, we expect only one element in array if( 1 != array.length() ) { throw new Exception("Expected only one element returned in an INSERT. Returned size:"+array.length()+" sql: "+sqlQuery); } return array.getJSONObject(0); } }
public class class_name { public JSONObject insert(Map<String, String> params) throws Exception { OperationAccess operationAccess = tableSchema.getInsertAccess(); if( false == operationAccess.isAllowed() ) { throw new Exception("Attempting to insert a record while the privilege is not allowed: "+tableSchema.getLogicalName()+" ("+tableSchema.getPhysicalName()+")"); } // Create a list of parameters to retrieve the row after it is // inserted List<RecordSelector> whereClauses = new Vector<RecordSelector>(); // Create a list of all writable columns where a value is specified in // the parameters List<ColumnData> columnsWithParam = new Vector<ColumnData>(); for(String columnName : params.keySet()) { ColumnData columnData = tableSchema.getColumnFromName(columnName); if( null != columnData && false == columnData.isWriteable() ) { columnData = null; } if( null != columnData && columnData.isAutoIncrementInteger() ) { columnData = null; } if( null == columnData ) { throw new Exception("No write access to column "+columnName+" in table "+tableSchema.getLogicalName()+" ("+tableSchema.getPhysicalName()+")"); } else { columnsWithParam.add(columnData); } } // Get all columns that are auto fill List<ColumnData> autoIncrementIntegerColumns = new Vector<ColumnData>(); for( ColumnData columnData : tableSchema.getColumns() ) { if( columnData.isAutoIncrementInteger() ) { autoIncrementIntegerColumns.add(columnData); } } // Get all columns that are assigned a value on insert List<ColumnData> valueAssignedColumns = new Vector<ColumnData>(); for( ColumnData columnData : tableSchema.getColumns() ) { if( null != columnData.getAssignValueOnInsert() ) { valueAssignedColumns.add(columnData); } else if( null != columnData.getAssignVariableOnInsert() ) { valueAssignedColumns.add(columnData); } } // Sort according to column name. This offers greater reusability // of the prepared statement. Collections.sort(autoIncrementIntegerColumns,new ColumnDataComparator()); Collections.sort(columnsWithParam,new ColumnDataComparator()); Collections.sort(valueAssignedColumns,new ColumnDataComparator()); // Obtain all auto increment integers List<Integer> autoIncrementIntegerValues = new Vector<Integer>(); for( ColumnData autoIncrementIntegerColumn : autoIncrementIntegerColumns ) { int nextValue = ColumnDataUtils.obtainNextIncrementInteger(connection, autoIncrementIntegerColumn); Integer value = new Integer(nextValue); autoIncrementIntegerValues.add( value ); whereClauses.add( new RecordSelectorComparison( autoIncrementIntegerColumn.getColumnName() ,RecordSelectorComparison.Comparison.EQUAL ,new ExpressionConstantImpl(value.toString()) ) ); } // Create SQL command String sqlQuery = null; PreparedStatement pstmt = null; { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); pw.print("INSERT INTO "); pw.print(tableSchema.getPhysicalName()); pw.print(" ("); boolean first = true; for(ColumnData columnData : autoIncrementIntegerColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getColumnName()); } for(ColumnData columnData : columnsWithParam) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getColumnName()); } for(ColumnData columnData : valueAssignedColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getColumnName()); } pw.print(") VALUES ("); first = true; for(ColumnData columnData : autoIncrementIntegerColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getInsertWildcard()); } for(ColumnData columnData : columnsWithParam) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getInsertWildcard()); } for(ColumnData columnData : valueAssignedColumns) { if( first ) { first = false; } else { pw.print(","); } pw.print(columnData.getInsertWildcard()); } pw.print(");"); pw.flush(); sqlQuery = sw.toString(); pstmt = connection.prepareStatement(sqlQuery); // Populate prepared statement int index = 1; for(Integer integerValue : autoIncrementIntegerValues) { pstmt.setInt(index, integerValue.intValue()); // depends on control dependency: [for], data = [integerValue] ++index; // depends on control dependency: [for], data = [none] } for(ColumnData columnData : columnsWithParam) { // Compute value String value = params.get(columnData.getColumnName()); ColumnDataUtils.writeToPreparedStatement(pstmt, index, value, columnData.getColumnType()); // depends on control dependency: [for], data = [columnData] ++index; // depends on control dependency: [for], data = [none] } for(ColumnData columnData : valueAssignedColumns) { String value = columnData.getAssignValueOnInsert(); if( null == value && null != columnData.getAssignVariableOnInsert() ) { value = variables.getVariableValue( columnData.getAssignVariableOnInsert() ); // depends on control dependency: [if], data = [none] } ColumnDataUtils.writeToPreparedStatement(pstmt, index, value, columnData.getColumnType()); // depends on control dependency: [for], data = [columnData] ++index; // depends on control dependency: [for], data = [none] } } // If there are no selector, there is no point in inserting the data since // we will not be able to retrieve it if( whereClauses.size() < 1 ) { throw new Exception("Refusing to insert data since it can not be selected: "+sqlQuery); } // Execute insert pstmt.execute(); // Now, we need to retrieve the object JSONArray array = query(whereClauses, null, null,null,null,null); // In INSERT, we expect only one element in array if( 1 != array.length() ) { throw new Exception("Expected only one element returned in an INSERT. Returned size:"+array.length()+" sql: "+sqlQuery); } return array.getJSONObject(0); } }
public class class_name { public MapBuilder add(String fieldName, boolean include, @Nullable Map<String, ?> mapValue) { if (include && mapValue != null && !mapValue.isEmpty()) { map.put(getFieldName(fieldName), mapValue); } return this; } }
public class class_name { public MapBuilder add(String fieldName, boolean include, @Nullable Map<String, ?> mapValue) { if (include && mapValue != null && !mapValue.isEmpty()) { map.put(getFieldName(fieldName), mapValue); // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { private ImmutableList<CharacterRegion> canonicalize(ImmutableList<CharacterRegion> inputRegions) { boolean mergedAnything = false; final ImmutableList.Builder<CharacterRegion> canonicalizedRegions = ImmutableList.builder(); CharacterRegion bufferRegion = null; for (final CharacterRegion curRegion : inputRegions) { if (bufferRegion == null) { bufferRegion = curRegion; } else { if (bufferRegion.mayMergeWithFollowing(curRegion)) { mergedAnything = true; bufferRegion = bufferRegion.mergeFollowingRegion(curRegion); } else { canonicalizedRegions.add(bufferRegion); bufferRegion = curRegion; } } } if (bufferRegion != null) { canonicalizedRegions.add(bufferRegion); } if (mergedAnything) { return canonicalizedRegions.build(); } else { return inputRegions; } } }
public class class_name { private ImmutableList<CharacterRegion> canonicalize(ImmutableList<CharacterRegion> inputRegions) { boolean mergedAnything = false; final ImmutableList.Builder<CharacterRegion> canonicalizedRegions = ImmutableList.builder(); CharacterRegion bufferRegion = null; for (final CharacterRegion curRegion : inputRegions) { if (bufferRegion == null) { bufferRegion = curRegion; // depends on control dependency: [if], data = [none] } else { if (bufferRegion.mayMergeWithFollowing(curRegion)) { mergedAnything = true; // depends on control dependency: [if], data = [none] bufferRegion = bufferRegion.mergeFollowingRegion(curRegion); // depends on control dependency: [if], data = [none] } else { canonicalizedRegions.add(bufferRegion); // depends on control dependency: [if], data = [none] bufferRegion = curRegion; // depends on control dependency: [if], data = [none] } } } if (bufferRegion != null) { canonicalizedRegions.add(bufferRegion); // depends on control dependency: [if], data = [(bufferRegion] } if (mergedAnything) { return canonicalizedRegions.build(); // depends on control dependency: [if], data = [none] } else { return inputRegions; // depends on control dependency: [if], data = [none] } } }
public class class_name { private static void loadExtensionBundles(ConfigServerImpl cs, ConfigImpl config, Document doc, Log log) { try { Element parent = getChildByName(doc.getDocumentElement(), "extensions"); Element[] children = getChildren(parent, "rhextension"); String strBundles; List<RHExtension> extensions = new ArrayList<RHExtension>(); RHExtension rhe; for (Element child: children) { BundleInfo[] bfsq; try { rhe = new RHExtension(config, child); if (rhe.getStartBundles()) rhe.deployBundles(config); extensions.add(rhe); } catch (Exception e) { log.error("load-extension", e); continue; } } config.setExtensions(extensions.toArray(new RHExtension[extensions.size()])); } catch (Exception e) { log(config, log, e); } } }
public class class_name { private static void loadExtensionBundles(ConfigServerImpl cs, ConfigImpl config, Document doc, Log log) { try { Element parent = getChildByName(doc.getDocumentElement(), "extensions"); Element[] children = getChildren(parent, "rhextension"); String strBundles; List<RHExtension> extensions = new ArrayList<RHExtension>(); RHExtension rhe; for (Element child: children) { BundleInfo[] bfsq; try { rhe = new RHExtension(config, child); // depends on control dependency: [try], data = [none] if (rhe.getStartBundles()) rhe.deployBundles(config); extensions.add(rhe); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.error("load-extension", e); continue; } // depends on control dependency: [catch], data = [none] } config.setExtensions(extensions.toArray(new RHExtension[extensions.size()])); // depends on control dependency: [try], data = [none] } catch (Exception e) { log(config, log, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings({"PMD.NullAssignment", "PMD.UseObjectForClearerAPI"}) public String prepareAttachment(final String name, final String type, final String fileExtension) { final String extension = Optional.ofNullable(fileExtension) .filter(ext -> !ext.isEmpty()) .map(ext -> ext.charAt(0) == '.' ? ext : "." + ext) .orElse(""); final String source = UUID.randomUUID().toString() + ATTACHMENT_FILE_SUFFIX + extension; final Optional<String> current = threadContext.getCurrent(); if (!current.isPresent()) { LOGGER.error("Could not add attachment: no test is running"); //backward compatibility: return source even if no attachment is going to be written. return source; } final Attachment attachment = new Attachment() .setName(isEmpty(name) ? null : name) .setType(isEmpty(type) ? null : type) .setSource(source); final String uuid = current.get(); storage.get(uuid, WithAttachments.class).ifPresent(withAttachments -> { synchronized (storage) { withAttachments.getAttachments().add(attachment); } }); return attachment.getSource(); } }
public class class_name { @SuppressWarnings({"PMD.NullAssignment", "PMD.UseObjectForClearerAPI"}) public String prepareAttachment(final String name, final String type, final String fileExtension) { final String extension = Optional.ofNullable(fileExtension) .filter(ext -> !ext.isEmpty()) .map(ext -> ext.charAt(0) == '.' ? ext : "." + ext) .orElse(""); final String source = UUID.randomUUID().toString() + ATTACHMENT_FILE_SUFFIX + extension; final Optional<String> current = threadContext.getCurrent(); if (!current.isPresent()) { LOGGER.error("Could not add attachment: no test is running"); // depends on control dependency: [if], data = [none] //backward compatibility: return source even if no attachment is going to be written. return source; // depends on control dependency: [if], data = [none] } final Attachment attachment = new Attachment() .setName(isEmpty(name) ? null : name) .setType(isEmpty(type) ? null : type) .setSource(source); final String uuid = current.get(); storage.get(uuid, WithAttachments.class).ifPresent(withAttachments -> { synchronized (storage) { withAttachments.getAttachments().add(attachment); } }); return attachment.getSource(); } }
public class class_name { protected void findBadlyScopedComponents() { synchronized (HAS_CHECKED_BEAN_DEFINITIONS) { if (HAS_CHECKED_BEAN_DEFINITIONS.get()) { return; } final ConfigurableListableBeanFactory beanFactory = applicationContext.getBeanFactory(); Stream.of(ComponentCellFxmlController.class, ComponentListCell.class) .map(beanFactory::getBeanNamesForType) .flatMap(Arrays::stream) .filter(beanName -> { final String effectiveScope = beanFactory.getBeanDefinition(beanName).getScope(); return !ConfigurableBeanFactory.SCOPE_PROTOTYPE.equals(effectiveScope); }).forEach(BADLY_SCOPED_BEANS::add); HAS_CHECKED_BEAN_DEFINITIONS.set(true); if (BADLY_SCOPED_BEANS.isEmpty()) { return; } final String faulties = String.join(",", BADLY_SCOPED_BEANS); LOG.warn( "Custom ListView cells wrappers and controllers " + "should be prototype-scoped bean. " + "See @Scope annotation.\n" + "Faulty beans were : [{}]", faulties ); } } }
public class class_name { protected void findBadlyScopedComponents() { synchronized (HAS_CHECKED_BEAN_DEFINITIONS) { if (HAS_CHECKED_BEAN_DEFINITIONS.get()) { return; // depends on control dependency: [if], data = [none] } final ConfigurableListableBeanFactory beanFactory = applicationContext.getBeanFactory(); Stream.of(ComponentCellFxmlController.class, ComponentListCell.class) .map(beanFactory::getBeanNamesForType) .flatMap(Arrays::stream) .filter(beanName -> { final String effectiveScope = beanFactory.getBeanDefinition(beanName).getScope(); return !ConfigurableBeanFactory.SCOPE_PROTOTYPE.equals(effectiveScope); }).forEach(BADLY_SCOPED_BEANS::add); HAS_CHECKED_BEAN_DEFINITIONS.set(true); if (BADLY_SCOPED_BEANS.isEmpty()) { return; // depends on control dependency: [if], data = [none] } final String faulties = String.join(",", BADLY_SCOPED_BEANS); LOG.warn( "Custom ListView cells wrappers and controllers " + "should be prototype-scoped bean. " + "See @Scope annotation.\n" + "Faulty beans were : [{}]", faulties ); } } }
public class class_name { public DateMidnight withZoneRetainFields(DateTimeZone newZone) { newZone = DateTimeUtils.getZone(newZone); DateTimeZone originalZone = DateTimeUtils.getZone(getZone()); if (newZone == originalZone) { return this; } long millis = originalZone.getMillisKeepLocal(newZone, getMillis()); return new DateMidnight(millis, getChronology().withZone(newZone)); } }
public class class_name { public DateMidnight withZoneRetainFields(DateTimeZone newZone) { newZone = DateTimeUtils.getZone(newZone); DateTimeZone originalZone = DateTimeUtils.getZone(getZone()); if (newZone == originalZone) { return this; // depends on control dependency: [if], data = [none] } long millis = originalZone.getMillisKeepLocal(newZone, getMillis()); return new DateMidnight(millis, getChronology().withZone(newZone)); } }
public class class_name { @Override public void close() { if (!this.closed.getAndSet(true)) { closeIndices(null, false); // This will close all registered indices, without cleaning the cache. this.cache.close(); log.info("{}: Closed.", this.traceObjectId); } } }
public class class_name { @Override public void close() { if (!this.closed.getAndSet(true)) { closeIndices(null, false); // This will close all registered indices, without cleaning the cache. // depends on control dependency: [if], data = [none] this.cache.close(); // depends on control dependency: [if], data = [none] log.info("{}: Closed.", this.traceObjectId); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String quoteCanonical(String s) { if (s == null || s.length() == 0) { return "\"\""; } int len = s.length(); StringBuilder sb = new StringBuilder(len + 4); sb.append('"'); for (int i = 0; i < len; i += 1) { char c = s.charAt(i); switch (c) { case '\\': case '"': sb.append('\\'); sb.append(c); break; default: if (c < ' ') { String t = "000" + Integer.toHexString(c); sb.append("\\u") .append(t.substring(t.length() - 4)); } else { sb.append(c); } } } sb.append('"'); return sb.toString(); } }
public class class_name { public static String quoteCanonical(String s) { if (s == null || s.length() == 0) { return "\"\""; // depends on control dependency: [if], data = [none] } int len = s.length(); StringBuilder sb = new StringBuilder(len + 4); sb.append('"'); for (int i = 0; i < len; i += 1) { char c = s.charAt(i); switch (c) { case '\\': case '"': sb.append('\\'); sb.append(c); break; default: if (c < ' ') { String t = "000" + Integer.toHexString(c); sb.append("\\u") .append(t.substring(t.length() - 4)); // depends on control dependency: [if], data = [none] } else { sb.append(c); // depends on control dependency: [if], data = [(c] } } } sb.append('"'); return sb.toString(); } }
public class class_name { private Object _getObject(String keyName) { Object result = null; try { result = keyValues.get( keyName); if (result == null) { // if failed, check the stored Defaults PreferencesExt sd = getStoredDefaults(); if (sd != null) result = sd.getObjectFromNode( absolutePath(), keyName); } } catch (Exception e) { // Ignoring exception causes default to be returned } return result; } }
public class class_name { private Object _getObject(String keyName) { Object result = null; try { result = keyValues.get( keyName); // depends on control dependency: [try], data = [none] if (result == null) { // if failed, check the stored Defaults PreferencesExt sd = getStoredDefaults(); if (sd != null) result = sd.getObjectFromNode( absolutePath(), keyName); } } catch (Exception e) { // Ignoring exception causes default to be returned } // depends on control dependency: [catch], data = [none] return result; } }