code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public boolean addLockedObjectOwner(String owner) { if (_owner == null) { _owner = new String[1]; } else { int size = _owner.length; String[] newLockObjectOwner = new String[size + 1]; // check if the owner is already here (that should actually not // happen) for (int i = 0; i < size; i++) { if (_owner[i].equals(owner)) { return false; } } System.arraycopy(_owner, 0, newLockObjectOwner, 0, size); _owner = newLockObjectOwner; } _owner[_owner.length - 1] = owner; return true; } }
public class class_name { public boolean addLockedObjectOwner(String owner) { if (_owner == null) { _owner = new String[1]; // depends on control dependency: [if], data = [none] } else { int size = _owner.length; String[] newLockObjectOwner = new String[size + 1]; // check if the owner is already here (that should actually not // happen) for (int i = 0; i < size; i++) { if (_owner[i].equals(owner)) { return false; // depends on control dependency: [if], data = [none] } } System.arraycopy(_owner, 0, newLockObjectOwner, 0, size); // depends on control dependency: [if], data = [(_owner] _owner = newLockObjectOwner; // depends on control dependency: [if], data = [none] } _owner[_owner.length - 1] = owner; return true; } }
public class class_name { public HandyDate date(Object expression, Object pattern, Object locale) { if (!(expression instanceof String)) { String msg = "First argument as three arguments should be String(expression): " + expression; throw new TemplateProcessingException(msg); } if (!(pattern instanceof String)) { String msg = "Second argument as three arguments should be TimeZone or String(pattern): " + pattern; throw new TemplateProcessingException(msg); } if (!(locale instanceof Locale)) { String msg = "Third argument as three arguments should be Locale: " + locale; throw new TemplateProcessingException(msg); } return create((String) expression, (String) pattern, (Locale) locale); } }
public class class_name { public HandyDate date(Object expression, Object pattern, Object locale) { if (!(expression instanceof String)) { String msg = "First argument as three arguments should be String(expression): " + expression; // depends on control dependency: [if], data = [none] throw new TemplateProcessingException(msg); } if (!(pattern instanceof String)) { String msg = "Second argument as three arguments should be TimeZone or String(pattern): " + pattern; // depends on control dependency: [if], data = [none] throw new TemplateProcessingException(msg); } if (!(locale instanceof Locale)) { String msg = "Third argument as three arguments should be Locale: " + locale; throw new TemplateProcessingException(msg); } return create((String) expression, (String) pattern, (Locale) locale); } }
public class class_name { public HttpResponse executeCommand(HttpUriRequest request, boolean apiHeaders) { if (apiHeaders) { setApiHeaders(request); } try { CloseableHttpResponse response = client.getHttpClient().execute(request); try { checkResponseStatus(response); return response; } finally { response.close(); } } catch (IOException e) { throw new UploadcareNetworkException(e); } } }
public class class_name { public HttpResponse executeCommand(HttpUriRequest request, boolean apiHeaders) { if (apiHeaders) { setApiHeaders(request); // depends on control dependency: [if], data = [none] } try { CloseableHttpResponse response = client.getHttpClient().execute(request); try { checkResponseStatus(response); // depends on control dependency: [try], data = [none] return response; // depends on control dependency: [try], data = [none] } finally { response.close(); } } catch (IOException e) { throw new UploadcareNetworkException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public Realm removeRealmApplicationId(String realmName, ApplicationId appId) { RealmSet set = this.realmNameToRealmSet.get(realmName); if (set != null) { Realm r = set.getRealm(appId); set.removeRealm(appId); if (set.size() == 0 && !realmName.equals(this.localRealmName)) { this.realmNameToRealmSet.remove(realmName); this.allRealmsSet.remove(realmName); } return r; } return null; } }
public class class_name { @Override public Realm removeRealmApplicationId(String realmName, ApplicationId appId) { RealmSet set = this.realmNameToRealmSet.get(realmName); if (set != null) { Realm r = set.getRealm(appId); set.removeRealm(appId); // depends on control dependency: [if], data = [none] if (set.size() == 0 && !realmName.equals(this.localRealmName)) { this.realmNameToRealmSet.remove(realmName); // depends on control dependency: [if], data = [none] this.allRealmsSet.remove(realmName); // depends on control dependency: [if], data = [none] } return r; // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { static int clamp(int value, int upperLimit) { if (value < 0) { return value + (-1 * (int) Math.floor(value / (float) upperLimit)) * upperLimit; } else { return value % upperLimit; } } }
public class class_name { static int clamp(int value, int upperLimit) { if (value < 0) { return value + (-1 * (int) Math.floor(value / (float) upperLimit)) * upperLimit; // depends on control dependency: [if], data = [(value] } else { return value % upperLimit; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static Cookie parseSetCookieHeader(final String headerValue) { String key = null; CookieImpl cookie = null; int state = 0; int current = 0; for (int i = 0; i < headerValue.length(); ++i) { char c = headerValue.charAt(i); switch (state) { case 0: { //reading key if (c == '=') { key = headerValue.substring(current, i); current = i + 1; state = 1; } else if ((c == ';' || c == ' ') && current == i) { current++; } else if (c == ';') { if (cookie == null) { throw UndertowMessages.MESSAGES.couldNotParseCookie(headerValue); } else { handleValue(cookie, headerValue.substring(current, i), null); } current = i + 1; } break; } case 1: { if (c == ';') { if (cookie == null) { cookie = new CookieImpl(key, headerValue.substring(current, i)); } else { handleValue(cookie, key, headerValue.substring(current, i)); } state = 0; current = i + 1; key = null; } else if (c == '"' && current == i) { current++; state = 2; } break; } case 2: { if (c == '"') { if (cookie == null) { cookie = new CookieImpl(key, headerValue.substring(current, i)); } else { handleValue(cookie, key, headerValue.substring(current, i)); } state = 0; current = i + 1; key = null; } break; } } } if (key == null) { if (current != headerValue.length()) { handleValue(cookie, headerValue.substring(current, headerValue.length()), null); } } else { if (current != headerValue.length()) { if(cookie == null) { cookie = new CookieImpl(key, headerValue.substring(current, headerValue.length())); } else { handleValue(cookie, key, headerValue.substring(current, headerValue.length())); } } else { handleValue(cookie, key, null); } } return cookie; } }
public class class_name { public static Cookie parseSetCookieHeader(final String headerValue) { String key = null; CookieImpl cookie = null; int state = 0; int current = 0; for (int i = 0; i < headerValue.length(); ++i) { char c = headerValue.charAt(i); switch (state) { case 0: { //reading key if (c == '=') { key = headerValue.substring(current, i); // depends on control dependency: [if], data = [(c] current = i + 1; // depends on control dependency: [if], data = [none] state = 1; // depends on control dependency: [if], data = [none] } else if ((c == ';' || c == ' ') && current == i) { current++; // depends on control dependency: [if], data = [none] } else if (c == ';') { if (cookie == null) { throw UndertowMessages.MESSAGES.couldNotParseCookie(headerValue); } else { handleValue(cookie, headerValue.substring(current, i), null); // depends on control dependency: [if], data = [(cookie] } current = i + 1; // depends on control dependency: [if], data = [none] } break; } case 1: { if (c == ';') { if (cookie == null) { cookie = new CookieImpl(key, headerValue.substring(current, i)); // depends on control dependency: [if], data = [none] } else { handleValue(cookie, key, headerValue.substring(current, i)); // depends on control dependency: [if], data = [(cookie] } state = 0; // depends on control dependency: [if], data = [none] current = i + 1; // depends on control dependency: [if], data = [none] key = null; // depends on control dependency: [if], data = [none] } else if (c == '"' && current == i) { current++; // depends on control dependency: [if], data = [none] state = 2; // depends on control dependency: [if], data = [none] } break; } case 2: { if (c == '"') { if (cookie == null) { cookie = new CookieImpl(key, headerValue.substring(current, i)); // depends on control dependency: [if], data = [none] } else { handleValue(cookie, key, headerValue.substring(current, i)); // depends on control dependency: [if], data = [(cookie] } state = 0; // depends on control dependency: [if], data = [none] current = i + 1; // depends on control dependency: [if], data = [none] key = null; // depends on control dependency: [if], data = [none] } break; } } } if (key == null) { if (current != headerValue.length()) { handleValue(cookie, headerValue.substring(current, headerValue.length()), null); // depends on control dependency: [if], data = [(current] } } else { if (current != headerValue.length()) { if(cookie == null) { cookie = new CookieImpl(key, headerValue.substring(current, headerValue.length())); // depends on control dependency: [if], data = [none] } else { handleValue(cookie, key, headerValue.substring(current, headerValue.length())); // depends on control dependency: [if], data = [(cookie] } } else { handleValue(cookie, key, null); // depends on control dependency: [if], data = [none] } } return cookie; } }
public class class_name { @Override public Object deserialize(Writable blob) { HiveBitcoinBlock result = null; if (blob instanceof BitcoinBlock) { result = convertToHiveBitcoinBlock((BitcoinBlock) blob); } return result; } }
public class class_name { @Override public Object deserialize(Writable blob) { HiveBitcoinBlock result = null; if (blob instanceof BitcoinBlock) { result = convertToHiveBitcoinBlock((BitcoinBlock) blob); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static void reverse(int[] a) { int i = 0, j = a.length - 1; while (i < j) { SortUtils.swap(a, i++, j--); // code for swap not shown, but easy enough } } }
public class class_name { public static void reverse(int[] a) { int i = 0, j = a.length - 1; while (i < j) { SortUtils.swap(a, i++, j--); // code for swap not shown, but easy enough // depends on control dependency: [while], data = [none] } } }
public class class_name { public String getUrl(String htmlLink) { String result = htmlLink; if (htmlLink != null) { Matcher linkMatcher = LINKPATTERN.matcher(htmlLink); Matcher imgMatcher = IMAGEPATTERN.matcher(htmlLink); if (linkMatcher.matches()) { String href = linkMatcher.group(2); href = StringEscapeUtils.unescapeHtml4(href); result = href + linkMatcher.group(4); } else if (imgMatcher.matches()) { String src = imgMatcher.group(2); result = StringEscapeUtils.unescapeHtml4(src); } } return result; } }
public class class_name { public String getUrl(String htmlLink) { String result = htmlLink; if (htmlLink != null) { Matcher linkMatcher = LINKPATTERN.matcher(htmlLink); Matcher imgMatcher = IMAGEPATTERN.matcher(htmlLink); if (linkMatcher.matches()) { String href = linkMatcher.group(2); href = StringEscapeUtils.unescapeHtml4(href); // depends on control dependency: [if], data = [none] result = href + linkMatcher.group(4); // depends on control dependency: [if], data = [none] } else if (imgMatcher.matches()) { String src = imgMatcher.group(2); result = StringEscapeUtils.unescapeHtml4(src); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { private Archive openArchive(File zipFileName, boolean useOptimizedZip) throws IOException { File origZipFileName = zipFileName; if (symbolFileEnabled && locations.isDefaultBootClassPathRtJar(zipFileName)) { File file = zipFileName.getParentFile().getParentFile(); // ${java.home} if (new File(file.getName()).equals(new File("jre"))) file = file.getParentFile(); // file == ${jdk.home} for (String name : symbolFileLocation) file = new File(file, name); // file == ${jdk.home}/lib/ct.sym if (file.exists()) zipFileName = file; } Archive archive; try { ZipFile zdir = null; boolean usePreindexedCache = false; String preindexCacheLocation = null; if (!useOptimizedZip) { zdir = new ZipFile(zipFileName); } else { usePreindexedCache = options.isSet("usezipindex"); preindexCacheLocation = options.get("java.io.tmpdir"); String optCacheLoc = options.get("cachezipindexdir"); if (optCacheLoc != null && optCacheLoc.length() != 0) { if (optCacheLoc.startsWith("\"")) { if (optCacheLoc.endsWith("\"")) { optCacheLoc = optCacheLoc.substring(1, optCacheLoc.length() - 1); } else { optCacheLoc = optCacheLoc.substring(1); } } File cacheDir = new File(optCacheLoc); if (cacheDir.exists() && cacheDir.canWrite()) { preindexCacheLocation = optCacheLoc; if (!preindexCacheLocation.endsWith("/") && !preindexCacheLocation.endsWith(File.separator)) { preindexCacheLocation += File.separator; } } } } if (origZipFileName == zipFileName) { if (!useOptimizedZip) { archive = new ZipArchive(this, zdir); } else { archive = new ZipFileIndexArchive(this, zipFileIndexCache.getZipFileIndex(zipFileName, null, usePreindexedCache, preindexCacheLocation, options.isSet("writezipindexfiles"))); } } else { if (!useOptimizedZip) { archive = new SymbolArchive(this, origZipFileName, zdir, symbolFilePrefix); } else { archive = new ZipFileIndexArchive(this, zipFileIndexCache.getZipFileIndex(zipFileName, symbolFilePrefix, usePreindexedCache, preindexCacheLocation, options.isSet("writezipindexfiles"))); } } } catch (FileNotFoundException ex) { archive = new MissingArchive(zipFileName); } catch (ZipFileIndex.ZipFormatException zfe) { throw zfe; } catch (IOException ex) { if (zipFileName.exists()) log.error("error.reading.file", zipFileName, getMessage(ex)); archive = new MissingArchive(zipFileName); } archives.put(origZipFileName, archive); return archive; } }
public class class_name { private Archive openArchive(File zipFileName, boolean useOptimizedZip) throws IOException { File origZipFileName = zipFileName; if (symbolFileEnabled && locations.isDefaultBootClassPathRtJar(zipFileName)) { File file = zipFileName.getParentFile().getParentFile(); // ${java.home} if (new File(file.getName()).equals(new File("jre"))) file = file.getParentFile(); // file == ${jdk.home} for (String name : symbolFileLocation) file = new File(file, name); // file == ${jdk.home}/lib/ct.sym if (file.exists()) zipFileName = file; } Archive archive; try { ZipFile zdir = null; boolean usePreindexedCache = false; String preindexCacheLocation = null; if (!useOptimizedZip) { zdir = new ZipFile(zipFileName); // depends on control dependency: [if], data = [none] } else { usePreindexedCache = options.isSet("usezipindex"); // depends on control dependency: [if], data = [none] preindexCacheLocation = options.get("java.io.tmpdir"); // depends on control dependency: [if], data = [none] String optCacheLoc = options.get("cachezipindexdir"); if (optCacheLoc != null && optCacheLoc.length() != 0) { if (optCacheLoc.startsWith("\"")) { if (optCacheLoc.endsWith("\"")) { optCacheLoc = optCacheLoc.substring(1, optCacheLoc.length() - 1); // depends on control dependency: [if], data = [none] } else { optCacheLoc = optCacheLoc.substring(1); // depends on control dependency: [if], data = [none] } } File cacheDir = new File(optCacheLoc); if (cacheDir.exists() && cacheDir.canWrite()) { preindexCacheLocation = optCacheLoc; // depends on control dependency: [if], data = [none] if (!preindexCacheLocation.endsWith("/") && !preindexCacheLocation.endsWith(File.separator)) { preindexCacheLocation += File.separator; // depends on control dependency: [if], data = [none] } } } } if (origZipFileName == zipFileName) { if (!useOptimizedZip) { archive = new ZipArchive(this, zdir); // depends on control dependency: [if], data = [none] } else { archive = new ZipFileIndexArchive(this, zipFileIndexCache.getZipFileIndex(zipFileName, null, usePreindexedCache, preindexCacheLocation, options.isSet("writezipindexfiles"))); // depends on control dependency: [if], data = [none] } } else { if (!useOptimizedZip) { archive = new SymbolArchive(this, origZipFileName, zdir, symbolFilePrefix); // depends on control dependency: [if], data = [none] } else { archive = new ZipFileIndexArchive(this, zipFileIndexCache.getZipFileIndex(zipFileName, symbolFilePrefix, usePreindexedCache, preindexCacheLocation, options.isSet("writezipindexfiles"))); // depends on control dependency: [if], data = [none] } } } catch (FileNotFoundException ex) { archive = new MissingArchive(zipFileName); } catch (ZipFileIndex.ZipFormatException zfe) { throw zfe; } catch (IOException ex) { if (zipFileName.exists()) log.error("error.reading.file", zipFileName, getMessage(ex)); archive = new MissingArchive(zipFileName); } archives.put(origZipFileName, archive); return archive; } }
public class class_name { public static String exampleOrderBy(Class<?> entityClass) { StringBuilder sql = new StringBuilder(); sql.append("<if test=\"orderByClause != null\">"); sql.append("order by ${orderByClause}"); sql.append("</if>"); String orderByClause = EntityHelper.getOrderByClause(entityClass); if (orderByClause.length() > 0) { sql.append("<if test=\"orderByClause == null\">"); sql.append("ORDER BY " + orderByClause); sql.append("</if>"); } return sql.toString(); } }
public class class_name { public static String exampleOrderBy(Class<?> entityClass) { StringBuilder sql = new StringBuilder(); sql.append("<if test=\"orderByClause != null\">"); sql.append("order by ${orderByClause}"); sql.append("</if>"); String orderByClause = EntityHelper.getOrderByClause(entityClass); if (orderByClause.length() > 0) { sql.append("<if test=\"orderByClause == null\">"); // depends on control dependency: [if], data = [none] sql.append("ORDER BY " + orderByClause); // depends on control dependency: [if], data = [none] sql.append("</if>"); // depends on control dependency: [if], data = [none] } return sql.toString(); } }
public class class_name { private void toggleDrawer() { if (drawerLayout.isDrawerVisible(GravityCompat.START)) { drawerLayout.closeDrawer(GravityCompat.START); } else { drawerLayout.openDrawer(GravityCompat.START); } } }
public class class_name { private void toggleDrawer() { if (drawerLayout.isDrawerVisible(GravityCompat.START)) { drawerLayout.closeDrawer(GravityCompat.START); // depends on control dependency: [if], data = [none] } else { drawerLayout.openDrawer(GravityCompat.START); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static IntToDoubleFunction intToDoubleFunction(CheckedIntToDoubleFunction function, Consumer<Throwable> handler) { return t -> { try { return function.applyAsDouble(t); } catch (Throwable e) { handler.accept(e); throw new IllegalStateException("Exception handler must throw a RuntimeException", e); } }; } }
public class class_name { public static IntToDoubleFunction intToDoubleFunction(CheckedIntToDoubleFunction function, Consumer<Throwable> handler) { return t -> { try { return function.applyAsDouble(t); // depends on control dependency: [try], data = [none] } catch (Throwable e) { handler.accept(e); throw new IllegalStateException("Exception handler must throw a RuntimeException", e); } // depends on control dependency: [catch], data = [none] }; } }
public class class_name { private static void softmax(double[] prob) { double max = Double.NEGATIVE_INFINITY; for (int i = 0; i < prob.length; i++) { if (prob[i] > max) { max = prob[i]; } } double Z = 0.0; for (int i = 0; i < prob.length; i++) { double p = Math.exp(prob[i] - max); prob[i] = p; Z += p; } for (int i = 0; i < prob.length; i++) { prob[i] /= Z; } } }
public class class_name { private static void softmax(double[] prob) { double max = Double.NEGATIVE_INFINITY; for (int i = 0; i < prob.length; i++) { if (prob[i] > max) { max = prob[i]; // depends on control dependency: [if], data = [none] } } double Z = 0.0; for (int i = 0; i < prob.length; i++) { double p = Math.exp(prob[i] - max); prob[i] = p; // depends on control dependency: [for], data = [i] Z += p; // depends on control dependency: [for], data = [none] } for (int i = 0; i < prob.length; i++) { prob[i] /= Z; // depends on control dependency: [for], data = [i] } } }
public class class_name { private IDLProxyObject doSetFieldValue(String fullField, String field, Object value, Object object, boolean useCache, Map<String, ReflectInfo> cachedFields) { Field f; // check cache if (useCache) { ReflectInfo info = cachedFields.get(fullField); if (info != null) { setField(value, info.target, info.field); return this; } } int index = field.indexOf('.'); if (index != -1) { String parent = field.substring(0, index); String sub = field.substring(index + 1); try { f = FieldUtils.findField(object.getClass(), parent); if (f == null) { throw new RuntimeException( "No field '" + parent + "' found at class " + object.getClass().getName()); } Class<?> type = f.getType(); f.setAccessible(true); Object o = f.get(object); if (o == null) { boolean memberClass = type.isMemberClass(); if (memberClass && Modifier.isStatic(type.getModifiers())) { Constructor<?> constructor = type.getConstructor(new Class[0]); constructor.setAccessible(true); o = constructor.newInstance(new Object[0]); } else if (memberClass) { Constructor<?> constructor = type.getConstructor(new Class[] { object.getClass() }); constructor.setAccessible(true); o = constructor.newInstance(new Object[] { object }); } else { o = type.newInstance(); } f.set(object, o); } return put(fullField, sub, value, o); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } } f = FieldUtils.findField(object.getClass(), field); if (f == null) { throw new RuntimeException("No field '" + field + "' found at class " + object.getClass().getName()); } if (useCache && !cachedFields.containsKey(fullField)) { cachedFields.put(fullField, new ReflectInfo(f, object)); } setField(value, object, f); return this; } }
public class class_name { private IDLProxyObject doSetFieldValue(String fullField, String field, Object value, Object object, boolean useCache, Map<String, ReflectInfo> cachedFields) { Field f; // check cache if (useCache) { ReflectInfo info = cachedFields.get(fullField); if (info != null) { setField(value, info.target, info.field); // depends on control dependency: [if], data = [none] return this; // depends on control dependency: [if], data = [none] } } int index = field.indexOf('.'); if (index != -1) { String parent = field.substring(0, index); String sub = field.substring(index + 1); try { f = FieldUtils.findField(object.getClass(), parent); // depends on control dependency: [try], data = [none] if (f == null) { throw new RuntimeException( "No field '" + parent + "' found at class " + object.getClass().getName()); } Class<?> type = f.getType(); f.setAccessible(true); // depends on control dependency: [try], data = [none] Object o = f.get(object); if (o == null) { boolean memberClass = type.isMemberClass(); if (memberClass && Modifier.isStatic(type.getModifiers())) { Constructor<?> constructor = type.getConstructor(new Class[0]); constructor.setAccessible(true); // depends on control dependency: [if], data = [none] o = constructor.newInstance(new Object[0]); // depends on control dependency: [if], data = [none] } else if (memberClass) { Constructor<?> constructor = type.getConstructor(new Class[] { object.getClass() }); constructor.setAccessible(true); // depends on control dependency: [if], data = [none] o = constructor.newInstance(new Object[] { object }); // depends on control dependency: [if], data = [none] } else { o = type.newInstance(); // depends on control dependency: [if], data = [none] } f.set(object, o); // depends on control dependency: [if], data = [(o] } return put(fullField, sub, value, o); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } f = FieldUtils.findField(object.getClass(), field); if (f == null) { throw new RuntimeException("No field '" + field + "' found at class " + object.getClass().getName()); } if (useCache && !cachedFields.containsKey(fullField)) { cachedFields.put(fullField, new ReflectInfo(f, object)); // depends on control dependency: [if], data = [none] } setField(value, object, f); return this; } }
public class class_name { protected static URL normalizeToEndWithSlash(URL url) { if(url.getPath().endsWith("/")) return url; // normalize String q = url.getQuery(); q = q!=null?('?'+q):""; try { return new URL(url,url.getPath()+'/'+q); } catch (MalformedURLException e) { // impossible throw new Error(e); } } }
public class class_name { protected static URL normalizeToEndWithSlash(URL url) { if(url.getPath().endsWith("/")) return url; // normalize String q = url.getQuery(); q = q!=null?('?'+q):""; try { return new URL(url,url.getPath()+'/'+q); // depends on control dependency: [try], data = [none] } catch (MalformedURLException e) { // impossible throw new Error(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public void alarm(DeployService2Impl<I> deploy, Result<I> result) { LifecycleState state = deploy.getState(); if (! state.isActive()) { result.ok(deploy.get()); } else if (deploy.isModifiedNow()) { // baratine/801g deploy.logModified(deploy.getLog()); deploy.restartImpl(result); } else { result.ok(deploy.get()); } } }
public class class_name { @Override public void alarm(DeployService2Impl<I> deploy, Result<I> result) { LifecycleState state = deploy.getState(); if (! state.isActive()) { result.ok(deploy.get()); // depends on control dependency: [if], data = [none] } else if (deploy.isModifiedNow()) { // baratine/801g deploy.logModified(deploy.getLog()); // depends on control dependency: [if], data = [none] deploy.restartImpl(result); // depends on control dependency: [if], data = [none] } else { result.ok(deploy.get()); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void processResource(final Group group, final Resource resource) { final UriLocator uriLocator = locatorFactory.getInstance(resource.getUri()); if (uriLocator instanceof WildcardUriLocatorSupport) { final WildcardStreamLocator wildcardStreamLocator = ((WildcardUriLocatorSupport) uriLocator).getWildcardStreamLocator(); // TODO should we probably handle the situation when wildcard is present, but the implementation is not // expandedHandledAware? if (wildcardStreamLocator.hasWildcard(resource.getUri()) && wildcardStreamLocator instanceof WildcardExpanderHandlerAware) { final WildcardExpanderHandlerAware expandedHandler = (WildcardExpanderHandlerAware) wildcardStreamLocator; LOG.debug("Expanding resource: {}", resource.getUri()); final String baseNameFolder = computeBaseNameFolder(resource, uriLocator, expandedHandler); LOG.debug("baseNameFolder: {}", baseNameFolder); expandedHandler.setWildcardExpanderHandler(createExpanderHandler(group, resource, baseNameFolder)); try { // trigger the wildcard replacement uriLocator.locate(resource.getUri()); } catch (final IOException e) { // log only LOG.debug("[FAIL] problem while trying to expand wildcard for the following resource uri: {}", resource.getUri()); } finally { // remove the handler, it is not needed anymore expandedHandler.setWildcardExpanderHandler(null); } } } } }
public class class_name { private void processResource(final Group group, final Resource resource) { final UriLocator uriLocator = locatorFactory.getInstance(resource.getUri()); if (uriLocator instanceof WildcardUriLocatorSupport) { final WildcardStreamLocator wildcardStreamLocator = ((WildcardUriLocatorSupport) uriLocator).getWildcardStreamLocator(); // TODO should we probably handle the situation when wildcard is present, but the implementation is not // expandedHandledAware? if (wildcardStreamLocator.hasWildcard(resource.getUri()) && wildcardStreamLocator instanceof WildcardExpanderHandlerAware) { final WildcardExpanderHandlerAware expandedHandler = (WildcardExpanderHandlerAware) wildcardStreamLocator; LOG.debug("Expanding resource: {}", resource.getUri()); // depends on control dependency: [if], data = [none] final String baseNameFolder = computeBaseNameFolder(resource, uriLocator, expandedHandler); LOG.debug("baseNameFolder: {}", baseNameFolder); // depends on control dependency: [if], data = [none] expandedHandler.setWildcardExpanderHandler(createExpanderHandler(group, resource, baseNameFolder)); // depends on control dependency: [if], data = [none] try { // trigger the wildcard replacement uriLocator.locate(resource.getUri()); // depends on control dependency: [try], data = [none] } catch (final IOException e) { // log only LOG.debug("[FAIL] problem while trying to expand wildcard for the following resource uri: {}", resource.getUri()); } finally { // depends on control dependency: [catch], data = [none] // remove the handler, it is not needed anymore expandedHandler.setWildcardExpanderHandler(null); } } } } }
public class class_name { static final DocumentFragment createContent(Document doc, String text) { // [#150] Text might hold XML content, which can be leniently identified by the presence // of either < or & characters (other entities, like >, ", ' are not stricly XML content) if (text != null && (text.contains("<") || text.contains("&"))) { DocumentBuilder builder = JOOX.builder(); try { // [#128] Trimming will get rid of leading and trailing whitespace, which would // otherwise cause a HIERARCHY_REQUEST_ERR raised by the parser text = text.trim(); // There is a processing instruction. We can safely assume // valid XML and parse it as such if (text.startsWith("<?xml")) { Document parsed = builder.parse(new InputSource(new StringReader(text))); DocumentFragment fragment = parsed.createDocumentFragment(); fragment.appendChild(parsed.getDocumentElement()); return (DocumentFragment) doc.importNode(fragment, true); } // Any XML document fragment. To be on the safe side, fragments // are wrapped in a dummy root node else { String wrapped = "<dummy>" + text + "</dummy>"; Document parsed = builder.parse(new InputSource(new StringReader(wrapped))); DocumentFragment fragment = parsed.createDocumentFragment(); NodeList children = parsed.getDocumentElement().getChildNodes(); // appendChild removes children also from NodeList! while (children.getLength() > 0) { fragment.appendChild(children.item(0)); } return (DocumentFragment) doc.importNode(fragment, true); } } // This does not occur catch (IOException ignore) {} // The XML content is invalid catch (SAXException ignore) {} } // Plain text or invalid XML return null; } }
public class class_name { static final DocumentFragment createContent(Document doc, String text) { // [#150] Text might hold XML content, which can be leniently identified by the presence // of either < or & characters (other entities, like >, ", ' are not stricly XML content) if (text != null && (text.contains("<") || text.contains("&"))) { DocumentBuilder builder = JOOX.builder(); try { // [#128] Trimming will get rid of leading and trailing whitespace, which would // otherwise cause a HIERARCHY_REQUEST_ERR raised by the parser text = text.trim(); // depends on control dependency: [try], data = [none] // There is a processing instruction. We can safely assume // valid XML and parse it as such if (text.startsWith("<?xml")) { Document parsed = builder.parse(new InputSource(new StringReader(text))); DocumentFragment fragment = parsed.createDocumentFragment(); fragment.appendChild(parsed.getDocumentElement()); // depends on control dependency: [if], data = [none] return (DocumentFragment) doc.importNode(fragment, true); // depends on control dependency: [if], data = [none] } // Any XML document fragment. To be on the safe side, fragments // are wrapped in a dummy root node else { String wrapped = "<dummy>" + text + "</dummy>"; Document parsed = builder.parse(new InputSource(new StringReader(wrapped))); DocumentFragment fragment = parsed.createDocumentFragment(); NodeList children = parsed.getDocumentElement().getChildNodes(); // appendChild removes children also from NodeList! while (children.getLength() > 0) { fragment.appendChild(children.item(0)); // depends on control dependency: [while], data = [0)] } return (DocumentFragment) doc.importNode(fragment, true); // depends on control dependency: [if], data = [none] } } // This does not occur catch (IOException ignore) {} // depends on control dependency: [catch], data = [none] // The XML content is invalid catch (SAXException ignore) {} // depends on control dependency: [catch], data = [none] } // Plain text or invalid XML return null; } }
public class class_name { protected void updatePluginContainer(PluginContainer value, String xmlTag, Counter counter, Element element) { boolean shouldExist = value != null; Element root = updateElement(counter, element, xmlTag, shouldExist); if (shouldExist) { Counter innerCount = new Counter(counter.getDepth() + 1); iteratePlugin(innerCount, root, value.getPlugins(), "plugins", "plugin"); } } }
public class class_name { protected void updatePluginContainer(PluginContainer value, String xmlTag, Counter counter, Element element) { boolean shouldExist = value != null; Element root = updateElement(counter, element, xmlTag, shouldExist); if (shouldExist) { Counter innerCount = new Counter(counter.getDepth() + 1); iteratePlugin(innerCount, root, value.getPlugins(), "plugins", "plugin"); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void map(final UserResource user, final Principal p, final Credential credential) { user.setUserName(p.getId()); if (credential instanceof UsernamePasswordCredential) { user.setPassword(UsernamePasswordCredential.class.cast(credential).getPassword()); } user.setActive(Boolean.TRUE); user.setNickName(getPrincipalAttributeValue(p, "nickName")); user.setDisplayName(getPrincipalAttributeValue(p, "displayName")); val name = new Name(getPrincipalAttributeValue(p, "formattedName"), getPrincipalAttributeValue(p, "familyName"), getPrincipalAttributeValue(p, "middleName"), getPrincipalAttributeValue(p, "givenName"), getPrincipalAttributeValue(p, "honorificPrefix"), getPrincipalAttributeValue(p, "honorificSuffix")); user.setName(name); val entry = new Entry(getPrincipalAttributeValue(p, "mail"), "primary"); user.setEmails(CollectionUtils.wrap(entry)); val entry2 = new Entry(getPrincipalAttributeValue(p, "phoneNumber"), "primary"); user.setPhoneNumbers(CollectionUtils.wrap(entry2)); } }
public class class_name { public void map(final UserResource user, final Principal p, final Credential credential) { user.setUserName(p.getId()); if (credential instanceof UsernamePasswordCredential) { user.setPassword(UsernamePasswordCredential.class.cast(credential).getPassword()); // depends on control dependency: [if], data = [none] } user.setActive(Boolean.TRUE); user.setNickName(getPrincipalAttributeValue(p, "nickName")); user.setDisplayName(getPrincipalAttributeValue(p, "displayName")); val name = new Name(getPrincipalAttributeValue(p, "formattedName"), getPrincipalAttributeValue(p, "familyName"), getPrincipalAttributeValue(p, "middleName"), getPrincipalAttributeValue(p, "givenName"), getPrincipalAttributeValue(p, "honorificPrefix"), getPrincipalAttributeValue(p, "honorificSuffix")); user.setName(name); val entry = new Entry(getPrincipalAttributeValue(p, "mail"), "primary"); user.setEmails(CollectionUtils.wrap(entry)); val entry2 = new Entry(getPrincipalAttributeValue(p, "phoneNumber"), "primary"); user.setPhoneNumbers(CollectionUtils.wrap(entry2)); } }
public class class_name { private JSONArray executeStatementToJson( PreparedStatement stmt, List<SelectedColumn> selectFields) throws Exception { //logger.info("about to execute: " + stmt.toString()); if( stmt.execute() ) { // There's a ResultSet to be had ResultSet rs = stmt.getResultSet(); JSONArray array = new JSONArray(); try { Map<Integer,JSONObject> contributorMap = new HashMap<Integer,JSONObject>(); while (rs.next()) { JSONObject obj = new JSONObject(); int index = 1; for(int loop=0; loop<selectFields.size(); ++loop) { SelectedColumn selCol = selectFields.get(loop); //logger.info("field: " + selCol.getName() + " (" + selCol.getType() + ")"); if (! "contributor_id".equalsIgnoreCase(selCol.getName())) { if( SelectedColumn.Type.INTEGER == selCol.getType() ) { obj.put(selCol.getName(),rs.getInt(index)); ++index; } else if( SelectedColumn.Type.STRING == selCol.getType() ) { obj.put(selCol.getName(),rs.getString(index)); ++index; } else if ( SelectedColumn.Type.DATE == selCol.getType() ) { Date date = rs.getDate(index); if (null != date) { String dateString = dateFormatter.format(date); obj.put(selCol.getName(), dateString); } ++index; } else { throw new Exception("Unkown selected column type"); } } else { // Convert contributor id into user info int contribId = rs.getInt(index); JSONObject userInfo = fetchContributorFromIdWithCache( contribId, contributorMap); if (null != userInfo) { obj.put("contributor", userInfo); } ++index; } } array.put(obj); } } catch (Exception je) { throw new ServletException("Error while executing statement",je); } return array; } else { // indicates an update count or no results - this must be no results throw new Exception("Query returned no results"); } } }
public class class_name { private JSONArray executeStatementToJson( PreparedStatement stmt, List<SelectedColumn> selectFields) throws Exception { //logger.info("about to execute: " + stmt.toString()); if( stmt.execute() ) { // There's a ResultSet to be had ResultSet rs = stmt.getResultSet(); JSONArray array = new JSONArray(); try { Map<Integer,JSONObject> contributorMap = new HashMap<Integer,JSONObject>(); while (rs.next()) { JSONObject obj = new JSONObject(); int index = 1; for(int loop=0; loop<selectFields.size(); ++loop) { SelectedColumn selCol = selectFields.get(loop); //logger.info("field: " + selCol.getName() + " (" + selCol.getType() + ")"); if (! "contributor_id".equalsIgnoreCase(selCol.getName())) { if( SelectedColumn.Type.INTEGER == selCol.getType() ) { obj.put(selCol.getName(),rs.getInt(index)); // depends on control dependency: [if], data = [none] ++index; // depends on control dependency: [if], data = [none] } else if( SelectedColumn.Type.STRING == selCol.getType() ) { obj.put(selCol.getName(),rs.getString(index)); // depends on control dependency: [if], data = [none] ++index; // depends on control dependency: [if], data = [none] } else if ( SelectedColumn.Type.DATE == selCol.getType() ) { Date date = rs.getDate(index); if (null != date) { String dateString = dateFormatter.format(date); obj.put(selCol.getName(), dateString); // depends on control dependency: [if], data = [none] } ++index; // depends on control dependency: [if], data = [none] } else { throw new Exception("Unkown selected column type"); } } else { // Convert contributor id into user info int contribId = rs.getInt(index); JSONObject userInfo = fetchContributorFromIdWithCache( contribId, contributorMap); if (null != userInfo) { obj.put("contributor", userInfo); // depends on control dependency: [if], data = [userInfo)] } ++index; // depends on control dependency: [if], data = [none] } } array.put(obj); // depends on control dependency: [while], data = [none] } } catch (Exception je) { throw new ServletException("Error while executing statement",je); } return array; } else { // indicates an update count or no results - this must be no results throw new Exception("Query returned no results"); } } }
public class class_name { public String get(byte b) { Object o = m_infos.get(key(b)); String o2 = null; if (m_bookInfos != null) o2 = m_bookInfos.get(b); if ((o == null) && (o2 == null)) return null; else { String ret = ""; if (o != null) { ret += (String)o; if (o2 != null) ret += lineSeparator; } if (o2 != null) ret += o2; return ret; } } }
public class class_name { public String get(byte b) { Object o = m_infos.get(key(b)); String o2 = null; if (m_bookInfos != null) o2 = m_bookInfos.get(b); if ((o == null) && (o2 == null)) return null; else { String ret = ""; if (o != null) { ret += (String)o; // depends on control dependency: [if], data = [none] if (o2 != null) ret += lineSeparator; } if (o2 != null) ret += o2; return ret; // depends on control dependency: [if], data = [none] } } }
public class class_name { public ValidationResult validate(X509Certificate[] certChain) { certChainSanityChecks(certChain); String certFingerprint = null; try { certFingerprint = FingerprintHelper .getFingerprint(certChain[certChain.length - 1]); } catch (Throwable t) { String errorMsg = String.format("Error computing fingerprint for " + "certificate: %s. Cause: %s", CertificateUtils.format(certChain[0], FormatMode.COMPACT_ONE_LINE), t.getMessage()); throw new VOMSError(errorMsg, t); } ValidationResult res = getCachedResult(certFingerprint); if (res == null) { res = validator.validate(certChain); validationResultsCache.putIfAbsent(certFingerprint, new CachedValidationResult(certFingerprint, res)); } return res; } }
public class class_name { public ValidationResult validate(X509Certificate[] certChain) { certChainSanityChecks(certChain); String certFingerprint = null; try { certFingerprint = FingerprintHelper .getFingerprint(certChain[certChain.length - 1]); // depends on control dependency: [try], data = [none] } catch (Throwable t) { String errorMsg = String.format("Error computing fingerprint for " + "certificate: %s. Cause: %s", CertificateUtils.format(certChain[0], FormatMode.COMPACT_ONE_LINE), t.getMessage()); throw new VOMSError(errorMsg, t); } // depends on control dependency: [catch], data = [none] ValidationResult res = getCachedResult(certFingerprint); if (res == null) { res = validator.validate(certChain); // depends on control dependency: [if], data = [none] validationResultsCache.putIfAbsent(certFingerprint, new CachedValidationResult(certFingerprint, res)); // depends on control dependency: [if], data = [none] } return res; } }
public class class_name { public Proxy setPort( int port ) { if ( this.port == port ) { return this; } return new Proxy( type, host, port, auth ); } }
public class class_name { public Proxy setPort( int port ) { if ( this.port == port ) { return this; // depends on control dependency: [if], data = [none] } return new Proxy( type, host, port, auth ); } }
public class class_name { @Override public CommerceWishList fetchByPrimaryKey(Serializable primaryKey) { Serializable serializable = entityCache.getResult(CommerceWishListModelImpl.ENTITY_CACHE_ENABLED, CommerceWishListImpl.class, primaryKey); if (serializable == nullModel) { return null; } CommerceWishList commerceWishList = (CommerceWishList)serializable; if (commerceWishList == null) { Session session = null; try { session = openSession(); commerceWishList = (CommerceWishList)session.get(CommerceWishListImpl.class, primaryKey); if (commerceWishList != null) { cacheResult(commerceWishList); } else { entityCache.putResult(CommerceWishListModelImpl.ENTITY_CACHE_ENABLED, CommerceWishListImpl.class, primaryKey, nullModel); } } catch (Exception e) { entityCache.removeResult(CommerceWishListModelImpl.ENTITY_CACHE_ENABLED, CommerceWishListImpl.class, primaryKey); throw processException(e); } finally { closeSession(session); } } return commerceWishList; } }
public class class_name { @Override public CommerceWishList fetchByPrimaryKey(Serializable primaryKey) { Serializable serializable = entityCache.getResult(CommerceWishListModelImpl.ENTITY_CACHE_ENABLED, CommerceWishListImpl.class, primaryKey); if (serializable == nullModel) { return null; // depends on control dependency: [if], data = [none] } CommerceWishList commerceWishList = (CommerceWishList)serializable; if (commerceWishList == null) { Session session = null; try { session = openSession(); // depends on control dependency: [try], data = [none] commerceWishList = (CommerceWishList)session.get(CommerceWishListImpl.class, primaryKey); // depends on control dependency: [try], data = [none] if (commerceWishList != null) { cacheResult(commerceWishList); // depends on control dependency: [if], data = [(commerceWishList] } else { entityCache.putResult(CommerceWishListModelImpl.ENTITY_CACHE_ENABLED, CommerceWishListImpl.class, primaryKey, nullModel); // depends on control dependency: [if], data = [none] } } catch (Exception e) { entityCache.removeResult(CommerceWishListModelImpl.ENTITY_CACHE_ENABLED, CommerceWishListImpl.class, primaryKey); throw processException(e); } // depends on control dependency: [catch], data = [none] finally { closeSession(session); } } return commerceWishList; } }
public class class_name { public static List<LogLine> setConsumer(Consumer<LogLine> subscriber) { // Switch to the new subscriber LOG_SUBSCRIBER = subscriber; // If there are queued messages (if this is the first consumer to register) then we should take a copy for them if (QUEUE != null) { List<LogLine> queued = new ArrayList<>(QUEUE); QUEUE = null; return queued; } else { return Collections.emptyList(); } } }
public class class_name { public static List<LogLine> setConsumer(Consumer<LogLine> subscriber) { // Switch to the new subscriber LOG_SUBSCRIBER = subscriber; // If there are queued messages (if this is the first consumer to register) then we should take a copy for them if (QUEUE != null) { List<LogLine> queued = new ArrayList<>(QUEUE); QUEUE = null; // depends on control dependency: [if], data = [none] return queued; // depends on control dependency: [if], data = [none] } else { return Collections.emptyList(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public String getBrowserVersion() { if (wd != null) { Capabilities capabilities = ((HasCapabilities) wd) .getCapabilities(); if (capabilities != null) { return capabilities.getVersion(); } return null; } return null; } }
public class class_name { @Override public String getBrowserVersion() { if (wd != null) { Capabilities capabilities = ((HasCapabilities) wd) .getCapabilities(); if (capabilities != null) { return capabilities.getVersion(); // depends on control dependency: [if], data = [none] } return null; // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { @Override public EClass getIfcSlippageConnectionCondition() { if (ifcSlippageConnectionConditionEClass == null) { ifcSlippageConnectionConditionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(606); } return ifcSlippageConnectionConditionEClass; } }
public class class_name { @Override public EClass getIfcSlippageConnectionCondition() { if (ifcSlippageConnectionConditionEClass == null) { ifcSlippageConnectionConditionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(606); // depends on control dependency: [if], data = [none] } return ifcSlippageConnectionConditionEClass; } }
public class class_name { private synchronized void onContextReady(final ContextStatusPOJO contextStatus, final boolean notifyClientOnNewActiveContext) { assert ContextState.READY == contextStatus.getContextState(); final String contextID = contextStatus.getContextId(); // This could be the first message we get from that context if (this.isUnknownContextId(contextID)) { this.onNewContext(contextStatus, notifyClientOnNewActiveContext); } // Dispatch the messages to the application, if there are any. for (final ContextMessagePOJO contextMessage : contextStatus.getContextMessageList()) { final byte[] theMessage = contextMessage.getMessage(); final String sourceID = contextMessage.getSourceId(); final long sequenceNumber = contextMessage.getSequenceNumber(); this.messageDispatcher.onContextMessage(new ContextMessageImpl(theMessage, contextID, sourceID, sequenceNumber)); } } }
public class class_name { private synchronized void onContextReady(final ContextStatusPOJO contextStatus, final boolean notifyClientOnNewActiveContext) { assert ContextState.READY == contextStatus.getContextState(); final String contextID = contextStatus.getContextId(); // This could be the first message we get from that context if (this.isUnknownContextId(contextID)) { this.onNewContext(contextStatus, notifyClientOnNewActiveContext); // depends on control dependency: [if], data = [none] } // Dispatch the messages to the application, if there are any. for (final ContextMessagePOJO contextMessage : contextStatus.getContextMessageList()) { final byte[] theMessage = contextMessage.getMessage(); final String sourceID = contextMessage.getSourceId(); final long sequenceNumber = contextMessage.getSequenceNumber(); this.messageDispatcher.onContextMessage(new ContextMessageImpl(theMessage, contextID, sourceID, sequenceNumber)); // depends on control dependency: [for], data = [none] } } }
public class class_name { public void adjustToParentHeight() { Element tableElt = getElement(); Element wrapper = null; Element scrollBody = null; Element elt = tableElt.getParentElement(); while (elt != null) { if (scrollBody == null) { if (JQMCommon.hasStyle(elt, SCROLL_BODY)) { scrollBody = elt; } } else if (wrapper == null) { if (JQMCommon.hasStyle(elt, WRAPPER)) { wrapper = elt; break; } } elt = elt.getParentElement(); } if (wrapper != null && scrollBody != null) { Element wrapParent = wrapper.getParentElement(); if (wrapParent != null) { int h = wrapParent.getClientHeight(); int wrapH = wrapper.getOffsetHeight(); String s = scrollBody.getStyle().getHeight(); s = StrUtils.getDigitsOnly(s); if (!Empty.is(s)) { int scrollBodyH = Integer.parseInt(s); int newH = (h - wrapH) + scrollBodyH - 1; if (newH < 0) newH = 0; if (scrollYnum > 0 && newH < scrollYnum) newH = scrollYnum; scrollBody.getStyle().setHeight(newH, Unit.PX); } } } } }
public class class_name { public void adjustToParentHeight() { Element tableElt = getElement(); Element wrapper = null; Element scrollBody = null; Element elt = tableElt.getParentElement(); while (elt != null) { if (scrollBody == null) { if (JQMCommon.hasStyle(elt, SCROLL_BODY)) { scrollBody = elt; // depends on control dependency: [if], data = [none] } } else if (wrapper == null) { if (JQMCommon.hasStyle(elt, WRAPPER)) { wrapper = elt; // depends on control dependency: [if], data = [none] break; } } elt = elt.getParentElement(); // depends on control dependency: [while], data = [none] } if (wrapper != null && scrollBody != null) { Element wrapParent = wrapper.getParentElement(); if (wrapParent != null) { int h = wrapParent.getClientHeight(); int wrapH = wrapper.getOffsetHeight(); String s = scrollBody.getStyle().getHeight(); s = StrUtils.getDigitsOnly(s); // depends on control dependency: [if], data = [none] if (!Empty.is(s)) { int scrollBodyH = Integer.parseInt(s); int newH = (h - wrapH) + scrollBodyH - 1; if (newH < 0) newH = 0; if (scrollYnum > 0 && newH < scrollYnum) newH = scrollYnum; scrollBody.getStyle().setHeight(newH, Unit.PX); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public static Object runGetter(Object object, Field field) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { final Class<?> clazz = object.getClass(); final String fieldName = field.getName(); Map<String, Method> fieldMethodMap = METHOD_FIELD_MAP_CACHE.get(object.getClass().getCanonicalName()); if (fieldMethodMap != null) { final Method fieldMethod = fieldMethodMap.get(fieldName); if (fieldMethod != null) { return fieldMethod.invoke(object); } } else { fieldMethodMap = new HashMap<>(); } try { final Method method = clazz.getMethod(JStringUtils.GET + JStringUtils.upperCaseFirst(fieldName), null); fieldMethodMap.put(fieldName, method); METHOD_FIELD_MAP_CACHE.put(object.getClass().getCanonicalName(), fieldMethodMap); return method.invoke(object); } catch (Exception e) { // Swallow exception so that we loop through the rest. } for (Method method : clazz.getMethods()) { final String methodName = method.getName(); if (((methodName.startsWith(JStringUtils.GET)) && (methodName.length() == (fieldName.length() + JStringUtils.GET.length()))) || ((methodName.startsWith(JStringUtils.IS)) && (methodName.length() == (fieldName.length() + JStringUtils.IS.length())))) { if (methodName.toLowerCase().endsWith(fieldName.toLowerCase())) { fieldMethodMap.put(fieldName, method); METHOD_FIELD_MAP_CACHE.put(object.getClass().getCanonicalName(), fieldMethodMap); return method.invoke(object); } } } return null; } }
public class class_name { public static Object runGetter(Object object, Field field) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { final Class<?> clazz = object.getClass(); final String fieldName = field.getName(); Map<String, Method> fieldMethodMap = METHOD_FIELD_MAP_CACHE.get(object.getClass().getCanonicalName()); if (fieldMethodMap != null) { final Method fieldMethod = fieldMethodMap.get(fieldName); if (fieldMethod != null) { return fieldMethod.invoke(object); } } else { fieldMethodMap = new HashMap<>(); } try { final Method method = clazz.getMethod(JStringUtils.GET + JStringUtils.upperCaseFirst(fieldName), null); fieldMethodMap.put(fieldName, method); METHOD_FIELD_MAP_CACHE.put(object.getClass().getCanonicalName(), fieldMethodMap); return method.invoke(object); } catch (Exception e) { // Swallow exception so that we loop through the rest. } for (Method method : clazz.getMethods()) { final String methodName = method.getName(); if (((methodName.startsWith(JStringUtils.GET)) && (methodName.length() == (fieldName.length() + JStringUtils.GET.length()))) || ((methodName.startsWith(JStringUtils.IS)) && (methodName.length() == (fieldName.length() + JStringUtils.IS.length())))) { if (methodName.toLowerCase().endsWith(fieldName.toLowerCase())) { fieldMethodMap.put(fieldName, method); // depends on control dependency: [if], data = [none] METHOD_FIELD_MAP_CACHE.put(object.getClass().getCanonicalName(), fieldMethodMap); // depends on control dependency: [if], data = [none] return method.invoke(object); // depends on control dependency: [if], data = [none] } } } return null; } }
public class class_name { public List<String> getPeers() { final URI uri = createURI("/peers"); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, RequestOptions.BLANK, ""); final HTTP.Response httpResponse = HTTP.getResponse(uri + "?" + httpRequestBuilder.paramString()); if (httpResponse.code() != 200) { die("Unable to get the peers", uri, httpResponse.code(), httpResponse.body()); } return fromJsonArray(httpResponse.body(), String.class); } }
public class class_name { public List<String> getPeers() { final URI uri = createURI("/peers"); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, RequestOptions.BLANK, ""); final HTTP.Response httpResponse = HTTP.getResponse(uri + "?" + httpRequestBuilder.paramString()); if (httpResponse.code() != 200) { die("Unable to get the peers", uri, httpResponse.code(), httpResponse.body()); // depends on control dependency: [if], data = [none] } return fromJsonArray(httpResponse.body(), String.class); } }
public class class_name { private void unhookFromTrigger(final T trigger) { // Unhook from trigger final TriggerListener triggerAdapter = triggersToTriggerAdapters.get(trigger); trigger.removeTriggerListener(triggerAdapter); // Check if trigger was added several times if (!triggersToTriggerAdapters.containsKey(trigger)) { // All occurrences of the same trigger have been removed triggersToTriggerAdapters.remove(trigger); } } }
public class class_name { private void unhookFromTrigger(final T trigger) { // Unhook from trigger final TriggerListener triggerAdapter = triggersToTriggerAdapters.get(trigger); trigger.removeTriggerListener(triggerAdapter); // Check if trigger was added several times if (!triggersToTriggerAdapters.containsKey(trigger)) { // All occurrences of the same trigger have been removed triggersToTriggerAdapters.remove(trigger); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String getChecksum(String url, ResourceReaderHandler rsReader, JawrConfig jawrConfig) throws IOException, ResourceNotFoundException { String checksum = null; InputStream is = null; boolean generatedBinaryResource = jawrConfig.getGeneratorRegistry().isGeneratedBinaryResource(url); try { if (!generatedBinaryResource) { url = PathNormalizer.asPath(url); } is = rsReader.getResourceAsStream(url); if (is != null) { checksum = CheckSumUtils.getChecksum(is, jawrConfig.getBinaryHashAlgorithm()); } else { throw new ResourceNotFoundException(url); } } catch (FileNotFoundException e) { throw new ResourceNotFoundException(url); } finally { IOUtils.close(is); } return checksum; } }
public class class_name { public static String getChecksum(String url, ResourceReaderHandler rsReader, JawrConfig jawrConfig) throws IOException, ResourceNotFoundException { String checksum = null; InputStream is = null; boolean generatedBinaryResource = jawrConfig.getGeneratorRegistry().isGeneratedBinaryResource(url); try { if (!generatedBinaryResource) { url = PathNormalizer.asPath(url); // depends on control dependency: [if], data = [none] } is = rsReader.getResourceAsStream(url); if (is != null) { checksum = CheckSumUtils.getChecksum(is, jawrConfig.getBinaryHashAlgorithm()); // depends on control dependency: [if], data = [(is] } else { throw new ResourceNotFoundException(url); } } catch (FileNotFoundException e) { throw new ResourceNotFoundException(url); } finally { IOUtils.close(is); } return checksum; } }
public class class_name { public void writeFields (Object object) { Class type = object.getClass(); Object[] defaultValues = getDefaultValues(type); OrderedMap<String, FieldMetadata> fields = getFields(type); int i = 0; for (FieldMetadata metadata : new OrderedMapValues<FieldMetadata>(fields)) { Field field = metadata.field; try { Object value = field.get(object); if (defaultValues != null) { Object defaultValue = defaultValues[i++]; if (value == null && defaultValue == null) continue; if (value != null && defaultValue != null) { if (value.equals(defaultValue)) continue; if (value.getClass().isArray() && defaultValue.getClass().isArray()) { equals1[0] = value; equals2[0] = defaultValue; if (Arrays.deepEquals(equals1, equals2)) continue; } } } if (debug) System.out.println("Writing field: " + field.getName() + " (" + type.getName() + ")"); writer.name(field.getName()); writeValue(value, field.getType(), metadata.elementType); } catch (IllegalAccessException ex) { throw new JsonException("Error accessing field: " + field.getName() + " (" + type.getName() + ")", ex); } catch (JsonException ex) { ex.addTrace(field + " (" + type.getName() + ")"); throw ex; } catch (Exception runtimeEx) { JsonException ex = new JsonException(runtimeEx); ex.addTrace(field + " (" + type.getName() + ")"); throw ex; } } } }
public class class_name { public void writeFields (Object object) { Class type = object.getClass(); Object[] defaultValues = getDefaultValues(type); OrderedMap<String, FieldMetadata> fields = getFields(type); int i = 0; for (FieldMetadata metadata : new OrderedMapValues<FieldMetadata>(fields)) { Field field = metadata.field; try { Object value = field.get(object); if (defaultValues != null) { Object defaultValue = defaultValues[i++]; if (value == null && defaultValue == null) continue; if (value != null && defaultValue != null) { if (value.equals(defaultValue)) continue; if (value.getClass().isArray() && defaultValue.getClass().isArray()) { equals1[0] = value; // depends on control dependency: [if], data = [none] equals2[0] = defaultValue; // depends on control dependency: [if], data = [none] if (Arrays.deepEquals(equals1, equals2)) continue; } } } if (debug) System.out.println("Writing field: " + field.getName() + " (" + type.getName() + ")"); writer.name(field.getName()); writeValue(value, field.getType(), metadata.elementType); } catch (IllegalAccessException ex) { throw new JsonException("Error accessing field: " + field.getName() + " (" + type.getName() + ")", ex); } catch (JsonException ex) { ex.addTrace(field + " (" + type.getName() + ")"); throw ex; } catch (Exception runtimeEx) { JsonException ex = new JsonException(runtimeEx); ex.addTrace(field + " (" + type.getName() + ")"); throw ex; } } } }
public class class_name { protected void populateTasksByStealer(List<StealerBasedRebalanceTask> sbTaskList) { // Setup mapping of stealers to work for this run. for(StealerBasedRebalanceTask task: sbTaskList) { if(task.getStealInfos().size() != 1) { throw new VoldemortException("StealerBasedRebalanceTasks should have a list of RebalancePartitionsInfo of length 1."); } RebalanceTaskInfo stealInfo = task.getStealInfos().get(0); int stealerId = stealInfo.getStealerId(); if(!this.tasksByStealer.containsKey(stealerId)) { this.tasksByStealer.put(stealerId, new ArrayList<StealerBasedRebalanceTask>()); } this.tasksByStealer.get(stealerId).add(task); } if(tasksByStealer.isEmpty()) { return; } // Shuffle order of each stealer's work list. This randomization // helps to get rid of any "patterns" in how rebalancing tasks were // added to the task list passed in. for(List<StealerBasedRebalanceTask> taskList: tasksByStealer.values()) { Collections.shuffle(taskList); } } }
public class class_name { protected void populateTasksByStealer(List<StealerBasedRebalanceTask> sbTaskList) { // Setup mapping of stealers to work for this run. for(StealerBasedRebalanceTask task: sbTaskList) { if(task.getStealInfos().size() != 1) { throw new VoldemortException("StealerBasedRebalanceTasks should have a list of RebalancePartitionsInfo of length 1."); } RebalanceTaskInfo stealInfo = task.getStealInfos().get(0); int stealerId = stealInfo.getStealerId(); if(!this.tasksByStealer.containsKey(stealerId)) { this.tasksByStealer.put(stealerId, new ArrayList<StealerBasedRebalanceTask>()); // depends on control dependency: [if], data = [none] } this.tasksByStealer.get(stealerId).add(task); // depends on control dependency: [for], data = [task] } if(tasksByStealer.isEmpty()) { return; // depends on control dependency: [if], data = [none] } // Shuffle order of each stealer's work list. This randomization // helps to get rid of any "patterns" in how rebalancing tasks were // added to the task list passed in. for(List<StealerBasedRebalanceTask> taskList: tasksByStealer.values()) { Collections.shuffle(taskList); // depends on control dependency: [for], data = [taskList] } } }
public class class_name { static Schema createPublicSchema(SqlgGraph sqlgGraph, Topology topology, String publicSchemaName) { Schema schema = new Schema(topology, publicSchemaName); if (!existPublicSchema(sqlgGraph)) { schema.createSchemaOnDb(); } schema.committed = false; return schema; } }
public class class_name { static Schema createPublicSchema(SqlgGraph sqlgGraph, Topology topology, String publicSchemaName) { Schema schema = new Schema(topology, publicSchemaName); if (!existPublicSchema(sqlgGraph)) { schema.createSchemaOnDb(); // depends on control dependency: [if], data = [none] } schema.committed = false; return schema; } }
public class class_name { @Override public IReactionSet initiate(IAtomContainerSet reactants, IAtomContainerSet agents) throws CDKException { logger.debug("initiate reaction: SharingLonePairReaction"); if (reactants.getAtomContainerCount() != 1) { throw new CDKException("SharingLonePairReaction only expects one reactant"); } if (agents != null) { throw new CDKException("SharingLonePairReaction don't expects agents"); } IReactionSet setOfReactions = reactants.getBuilder().newInstance(IReactionSet.class); IAtomContainer reactant = reactants.getAtomContainer(0); /* * if the parameter hasActiveCenter is not fixed yet, set the active * centers */ IParameterReact ipr = super.getParameterClass(SetReactionCenter.class); if (ipr != null && !ipr.isSetParameter()) setActiveCenters(reactant); Iterator<IAtom> atomis = reactant.atoms().iterator(); while (atomis.hasNext()) { IAtom atomi = atomis.next(); if (atomi.getFlag(CDKConstants.REACTIVE_CENTER) && atomi.getFormalCharge() == 0 && reactant.getConnectedSingleElectronsCount(atomi) == 0 && reactant.getConnectedLonePairsList(atomi).size() > 0) { Iterator<IBond> bondis = reactant.getConnectedBondsList(atomi).iterator(); while (bondis.hasNext()) { IBond bondi = bondis.next(); if (bondi.getFlag(CDKConstants.REACTIVE_CENTER) && bondi.getOrder() == IBond.Order.SINGLE) { IAtom atomj = bondi.getOther(atomi); if (atomj.getFlag(CDKConstants.REACTIVE_CENTER) && atomj.getFormalCharge() == 1 && reactant.getConnectedSingleElectronsCount(atomj) == 0) { ArrayList<IAtom> atomList = new ArrayList<IAtom>(); atomList.add(atomi); atomList.add(atomj); ArrayList<IBond> bondList = new ArrayList<IBond>(); bondList.add(bondi); IAtomContainerSet moleculeSet = reactant.getBuilder().newInstance(IAtomContainerSet.class); moleculeSet.addAtomContainer(reactant); IReaction reaction = mechanism.initiate(moleculeSet, atomList, bondList); if (reaction == null) continue; else setOfReactions.addReaction(reaction); } } } } } return setOfReactions; } }
public class class_name { @Override public IReactionSet initiate(IAtomContainerSet reactants, IAtomContainerSet agents) throws CDKException { logger.debug("initiate reaction: SharingLonePairReaction"); if (reactants.getAtomContainerCount() != 1) { throw new CDKException("SharingLonePairReaction only expects one reactant"); } if (agents != null) { throw new CDKException("SharingLonePairReaction don't expects agents"); } IReactionSet setOfReactions = reactants.getBuilder().newInstance(IReactionSet.class); IAtomContainer reactant = reactants.getAtomContainer(0); /* * if the parameter hasActiveCenter is not fixed yet, set the active * centers */ IParameterReact ipr = super.getParameterClass(SetReactionCenter.class); if (ipr != null && !ipr.isSetParameter()) setActiveCenters(reactant); Iterator<IAtom> atomis = reactant.atoms().iterator(); while (atomis.hasNext()) { IAtom atomi = atomis.next(); if (atomi.getFlag(CDKConstants.REACTIVE_CENTER) && atomi.getFormalCharge() == 0 && reactant.getConnectedSingleElectronsCount(atomi) == 0 && reactant.getConnectedLonePairsList(atomi).size() > 0) { Iterator<IBond> bondis = reactant.getConnectedBondsList(atomi).iterator(); while (bondis.hasNext()) { IBond bondi = bondis.next(); if (bondi.getFlag(CDKConstants.REACTIVE_CENTER) && bondi.getOrder() == IBond.Order.SINGLE) { IAtom atomj = bondi.getOther(atomi); if (atomj.getFlag(CDKConstants.REACTIVE_CENTER) && atomj.getFormalCharge() == 1 && reactant.getConnectedSingleElectronsCount(atomj) == 0) { ArrayList<IAtom> atomList = new ArrayList<IAtom>(); atomList.add(atomi); // depends on control dependency: [if], data = [none] atomList.add(atomj); // depends on control dependency: [if], data = [none] ArrayList<IBond> bondList = new ArrayList<IBond>(); bondList.add(bondi); // depends on control dependency: [if], data = [none] IAtomContainerSet moleculeSet = reactant.getBuilder().newInstance(IAtomContainerSet.class); moleculeSet.addAtomContainer(reactant); // depends on control dependency: [if], data = [none] IReaction reaction = mechanism.initiate(moleculeSet, atomList, bondList); if (reaction == null) continue; else setOfReactions.addReaction(reaction); } } } } } return setOfReactions; } }
public class class_name { public static void visitStatistics (@Nonnull final IStatisticsVisitorCallback aCallback) { ValueEnforcer.notNull (aCallback, "Callback"); // For all cache handler ICommonsList <String> aHandlers = StatisticsManager.getAllCacheHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerCache aHandler = StatisticsManager.getCacheHandler (sName); aCallback.onCache (sName, aHandler); } // For all timer handler aHandlers = StatisticsManager.getAllTimerHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerTimer aHandler = StatisticsManager.getTimerHandler (sName); aCallback.onTimer (sName, aHandler); } // For all keyed timer handler aHandlers = StatisticsManager.getAllKeyedTimerHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerKeyedTimer aHandler = StatisticsManager.getKeyedTimerHandler (sName); aCallback.onKeyedTimer (sName, aHandler); } // For all size handler aHandlers = StatisticsManager.getAllSizeHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerSize aHandler = StatisticsManager.getSizeHandler (sName); aCallback.onSize (sName, aHandler); } // For all keyed size handler aHandlers = StatisticsManager.getAllKeyedSizeHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerKeyedSize aHandler = StatisticsManager.getKeyedSizeHandler (sName); aCallback.onKeyedSize (sName, aHandler); } // For all counter handler aHandlers = StatisticsManager.getAllCounterHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerCounter aHandler = StatisticsManager.getCounterHandler (sName); aCallback.onCounter (sName, aHandler); } // For all keyed counter handler aHandlers = StatisticsManager.getAllKeyedCounterHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerKeyedCounter aHandler = StatisticsManager.getKeyedCounterHandler (sName); aCallback.onKeyedCounter (sName, aHandler); } } }
public class class_name { public static void visitStatistics (@Nonnull final IStatisticsVisitorCallback aCallback) { ValueEnforcer.notNull (aCallback, "Callback"); // For all cache handler ICommonsList <String> aHandlers = StatisticsManager.getAllCacheHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerCache aHandler = StatisticsManager.getCacheHandler (sName); aCallback.onCache (sName, aHandler); // depends on control dependency: [for], data = [sName] } // For all timer handler aHandlers = StatisticsManager.getAllTimerHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerTimer aHandler = StatisticsManager.getTimerHandler (sName); aCallback.onTimer (sName, aHandler); // depends on control dependency: [for], data = [sName] } // For all keyed timer handler aHandlers = StatisticsManager.getAllKeyedTimerHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerKeyedTimer aHandler = StatisticsManager.getKeyedTimerHandler (sName); aCallback.onKeyedTimer (sName, aHandler); // depends on control dependency: [for], data = [sName] } // For all size handler aHandlers = StatisticsManager.getAllSizeHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerSize aHandler = StatisticsManager.getSizeHandler (sName); aCallback.onSize (sName, aHandler); // depends on control dependency: [for], data = [sName] } // For all keyed size handler aHandlers = StatisticsManager.getAllKeyedSizeHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerKeyedSize aHandler = StatisticsManager.getKeyedSizeHandler (sName); aCallback.onKeyedSize (sName, aHandler); // depends on control dependency: [for], data = [sName] } // For all counter handler aHandlers = StatisticsManager.getAllCounterHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerCounter aHandler = StatisticsManager.getCounterHandler (sName); aCallback.onCounter (sName, aHandler); // depends on control dependency: [for], data = [sName] } // For all keyed counter handler aHandlers = StatisticsManager.getAllKeyedCounterHandler ().getSorted (Comparator.naturalOrder ()); for (final String sName : aHandlers) { final IStatisticsHandlerKeyedCounter aHandler = StatisticsManager.getKeyedCounterHandler (sName); aCallback.onKeyedCounter (sName, aHandler); // depends on control dependency: [for], data = [sName] } } }
public class class_name { Reference getInitializingReferenceForConstants() { int size = references.size(); for (int i = 0; i < size; i++) { if (isInitializingDeclarationAt(i) || isInitializingAssignmentAt(i)) { return references.get(i); } } return null; } }
public class class_name { Reference getInitializingReferenceForConstants() { int size = references.size(); for (int i = 0; i < size; i++) { if (isInitializingDeclarationAt(i) || isInitializingAssignmentAt(i)) { return references.get(i); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public static void fire(final HasRenderedHandlers source) { if (TYPE != null) { RenderedEvent event = new RenderedEvent(); source.fireEvent(event); } } }
public class class_name { public static void fire(final HasRenderedHandlers source) { if (TYPE != null) { RenderedEvent event = new RenderedEvent(); source.fireEvent(event); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public String getNamingContextId() { boolean append = isAppendID(); if (!append) { // Check if this is the top level name context NamingContextable top = WebUtilities.getParentNamingContext(this); if (top != null) { // Not top context, so always append append = true; } } if (append) { return getId(); } else { return ""; } } }
public class class_name { @Override public String getNamingContextId() { boolean append = isAppendID(); if (!append) { // Check if this is the top level name context NamingContextable top = WebUtilities.getParentNamingContext(this); if (top != null) { // Not top context, so always append append = true; // depends on control dependency: [if], data = [none] } } if (append) { return getId(); // depends on control dependency: [if], data = [none] } else { return ""; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void showShareDialog(final Activity context, final Entity entity, int options, final SocialNetworkShareListener socialNetworkListener, final ShareDialogListener dialogListener) { shareDialogFactory.show(context, entity, socialNetworkListener, new ShareDialogListener() { SharePanelView dialogView; @Override public void onCancel(Dialog dialog) { if(dialogListener != null) { dialogListener.onCancel(dialog); } } @Override public void onShow(Dialog dialog, SharePanelView dialogView) { if(dialogListener != null) { dialogListener.onShow(dialog, dialogView); } this.dialogView = dialogView; } @Override public void onSimpleShare(ShareType type) { if(dialogListener != null) { dialogListener.onSimpleShare(type); } } @Override public void onFlowInterrupted(DialogFlowController controller) { // Will not be called. } @Override public boolean onContinue(final Dialog dialog, boolean remember, final SocialNetwork... networks) { boolean consumed = false; if(dialogListener != null) { consumed = dialogListener.onContinue(dialog, remember, networks); } if(!consumed) { doShare(dialog, context, entity, socialNetworkListener, networks); } else { dialogListener.onFlowInterrupted(new DialogFlowController() { @Override public void onContinue(String text) { ShareOptions options = new ShareOptions(); options.setText(text); doShare(dialog, context, entity, socialNetworkListener, options, networks); } @Override public void onCancel() { if(dialogView != null) { dialogView.getContinueButton().setEnabled(true); } dialogListener.onCancel(dialog); } }); } return false; } }, options); } }
public class class_name { @Override public void showShareDialog(final Activity context, final Entity entity, int options, final SocialNetworkShareListener socialNetworkListener, final ShareDialogListener dialogListener) { shareDialogFactory.show(context, entity, socialNetworkListener, new ShareDialogListener() { SharePanelView dialogView; @Override public void onCancel(Dialog dialog) { if(dialogListener != null) { dialogListener.onCancel(dialog); // depends on control dependency: [if], data = [none] } } @Override public void onShow(Dialog dialog, SharePanelView dialogView) { if(dialogListener != null) { dialogListener.onShow(dialog, dialogView); // depends on control dependency: [if], data = [none] } this.dialogView = dialogView; } @Override public void onSimpleShare(ShareType type) { if(dialogListener != null) { dialogListener.onSimpleShare(type); // depends on control dependency: [if], data = [none] } } @Override public void onFlowInterrupted(DialogFlowController controller) { // Will not be called. } @Override public boolean onContinue(final Dialog dialog, boolean remember, final SocialNetwork... networks) { boolean consumed = false; if(dialogListener != null) { consumed = dialogListener.onContinue(dialog, remember, networks); // depends on control dependency: [if], data = [none] } if(!consumed) { doShare(dialog, context, entity, socialNetworkListener, networks); // depends on control dependency: [if], data = [none] } else { dialogListener.onFlowInterrupted(new DialogFlowController() { @Override public void onContinue(String text) { ShareOptions options = new ShareOptions(); options.setText(text); doShare(dialog, context, entity, socialNetworkListener, options, networks); } @Override public void onCancel() { if(dialogView != null) { dialogView.getContinueButton().setEnabled(true); // depends on control dependency: [if], data = [none] } dialogListener.onCancel(dialog); } }); // depends on control dependency: [if], data = [none] } return false; } }, options); } }
public class class_name { @Override public JSONObject handleEvent(SlackEvent event) throws ServiceException { if (event.getCallbackId() != null && event.getCallbackId().indexOf('_') > 0 && event.getCallbackId().indexOf('/') > 0 && event.getTs() != null) { Long instanceId = Long.parseLong(event.getCallbackId().substring(event.getCallbackId().lastIndexOf('/') + 1)); new Thread(() -> { Map<String,String> indexes = new HashMap<>(); logger.debug("Saving slack:message_ts=" + event.getTs() + " for task " + instanceId); indexes.put("slack:message_ts", event.getTs()); try { ServiceLocator.getTaskServices().updateIndexes(instanceId, indexes); } catch (Exception ex) { logger.severeException("Error updating indexes for task " + instanceId + ": " + ex, ex); } }).start(); } else if (event.getThreadTs() != null && event.getUser() != null) { new Thread(() -> { try { TaskServices taskServices = ServiceLocator.getTaskServices(); Query query = new Query(); query.setFilter("index", "slack:message_ts=" + event.getThreadTs()); List<TaskInstance> instances = taskServices.getTasks(query).getTasks(); for (TaskInstance instance : instances) { // add a corresponding note Comment comment = new Comment(); comment.setCreated(Date.from(Instant.now())); // TODO: lookup (or cache) users comment.setCreateUser(event.getUser().equals("U4V5SG5PU") ? "Donald Oakes" : event.getUser()); comment.setContent(event.getText()); comment.setOwnerType(OwnerType.TASK_INSTANCE); comment.setOwnerId(instance.getTaskInstanceId()); comment.setName("slack_message"); ServiceLocator.getCollaborationServices().createComment(comment); } } catch (Exception ex) { logger.severeException(ex.getMessage(), ex); } }).start(); } return null; } }
public class class_name { @Override public JSONObject handleEvent(SlackEvent event) throws ServiceException { if (event.getCallbackId() != null && event.getCallbackId().indexOf('_') > 0 && event.getCallbackId().indexOf('/') > 0 && event.getTs() != null) { Long instanceId = Long.parseLong(event.getCallbackId().substring(event.getCallbackId().lastIndexOf('/') + 1)); new Thread(() -> { Map<String,String> indexes = new HashMap<>(); logger.debug("Saving slack:message_ts=" + event.getTs() + " for task " + instanceId); indexes.put("slack:message_ts", event.getTs()); try { ServiceLocator.getTaskServices().updateIndexes(instanceId, indexes); // depends on control dependency: [try], data = [none] } catch (Exception ex) { logger.severeException("Error updating indexes for task " + instanceId + ": " + ex, ex); } // depends on control dependency: [catch], data = [none] }).start(); } else if (event.getThreadTs() != null && event.getUser() != null) { new Thread(() -> { try { TaskServices taskServices = ServiceLocator.getTaskServices(); Query query = new Query(); query.setFilter("index", "slack:message_ts=" + event.getThreadTs()); List<TaskInstance> instances = taskServices.getTasks(query).getTasks(); for (TaskInstance instance : instances) { // add a corresponding note Comment comment = new Comment(); comment.setCreated(Date.from(Instant.now())); // TODO: lookup (or cache) users comment.setCreateUser(event.getUser().equals("U4V5SG5PU") ? "Donald Oakes" : event.getUser()); comment.setContent(event.getText()); comment.setOwnerType(OwnerType.TASK_INSTANCE); comment.setOwnerId(instance.getTaskInstanceId()); comment.setName("slack_message"); ServiceLocator.getCollaborationServices().createComment(comment); } } catch (Exception ex) { logger.severeException(ex.getMessage(), ex); } }).start(); } return null; } }
public class class_name { protected void doExport() { // Get archive final Archive<?> archive = getArchive(); if (log.isLoggable(Level.FINE)) { log.fine("Exporting archive - " + archive.getName()); } // Obtain the root final Node rootNode = archive.get(ArchivePaths.root()); // Recursively process the root children for (Node child : rootNode.getChildren()) { processNode(child); } } }
public class class_name { protected void doExport() { // Get archive final Archive<?> archive = getArchive(); if (log.isLoggable(Level.FINE)) { log.fine("Exporting archive - " + archive.getName()); // depends on control dependency: [if], data = [none] } // Obtain the root final Node rootNode = archive.get(ArchivePaths.root()); // Recursively process the root children for (Node child : rootNode.getChildren()) { processNode(child); // depends on control dependency: [for], data = [child] } } }
public class class_name { static public double rastrigin(double[] x) { double sum = 0.0; for (int i = 0; i < x.length; i++) { sum += (x[i] * x[i]) - (10.0 * Math.cos(PIx2 * x[i])) + 10.0; } return (sum); } }
public class class_name { static public double rastrigin(double[] x) { double sum = 0.0; for (int i = 0; i < x.length; i++) { sum += (x[i] * x[i]) - (10.0 * Math.cos(PIx2 * x[i])) + 10.0; // depends on control dependency: [for], data = [i] } return (sum); } }
public class class_name { static boolean isMatchSymbol(final Symbol symbol) { if (!(symbol instanceof BasisSymbol)) { return false; } BasisSymbol basisSymbol = (BasisSymbol) symbol; Set<Symbol> uniqueSymbols = new HashSet<Symbol>(); for (Object o : basisSymbol.getSymbols()) { Symbol s = (Symbol) o; if (isGapSymbol(s)) { return false; } uniqueSymbols.add((Symbol) o); } return (uniqueSymbols.size() == 1); } }
public class class_name { static boolean isMatchSymbol(final Symbol symbol) { if (!(symbol instanceof BasisSymbol)) { return false; // depends on control dependency: [if], data = [none] } BasisSymbol basisSymbol = (BasisSymbol) symbol; Set<Symbol> uniqueSymbols = new HashSet<Symbol>(); for (Object o : basisSymbol.getSymbols()) { Symbol s = (Symbol) o; if (isGapSymbol(s)) { return false; // depends on control dependency: [if], data = [none] } uniqueSymbols.add((Symbol) o); // depends on control dependency: [for], data = [o] } return (uniqueSymbols.size() == 1); } }
public class class_name { public long padData(File inFile, File outFile, File padFile, long offset) { RandomAccessFile raf = null; InputStream is = null; OutputStream os = null; try { raf = new RandomAccessFile(padFile, zeroFill ? "rw" : "r"); raf.seek(offset); is = new BufferedInputStream(new FileInputStream(inFile), ioBufferSize); os = new BufferedOutputStream(new FileOutputStream(outFile), ioBufferSize); byte[] padBytes = new byte[workBufferSize]; byte[] bytes = new byte[workBufferSize]; int nPadBytes; int nBytes; while(true) { nBytes = is.read(bytes); nPadBytes = raf.read(padBytes); if(nBytes > 0) { if(nPadBytes >= nBytes) { for(int i = 0; i < nBytes; i++) { // Work the magic bytes[i] = (byte) (bytes[i] ^ padBytes[i]); } os.write(bytes, 0, nBytes); if(zeroFill) { // Perform zero-filling raf.seek(offset); for(int i = 0; i < nBytes; i++) { raf.write(0); } } offset += nBytes; } else { throw new IOException("Not enough pad bytes"); } } else { break; } } os.flush(); return offset; } catch(IOException e) { throw new RuntimeException(e); } finally { if(raf != null) { try { raf.close(); } catch (IOException e) { e.printStackTrace(); } } if(is != null) { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } if(os != null) { try { os.close(); } catch (IOException e) { e.printStackTrace(); } } } } }
public class class_name { public long padData(File inFile, File outFile, File padFile, long offset) { RandomAccessFile raf = null; InputStream is = null; OutputStream os = null; try { raf = new RandomAccessFile(padFile, zeroFill ? "rw" : "r"); // depends on control dependency: [try], data = [none] raf.seek(offset); // depends on control dependency: [try], data = [none] is = new BufferedInputStream(new FileInputStream(inFile), ioBufferSize); // depends on control dependency: [try], data = [none] os = new BufferedOutputStream(new FileOutputStream(outFile), ioBufferSize); // depends on control dependency: [try], data = [none] byte[] padBytes = new byte[workBufferSize]; byte[] bytes = new byte[workBufferSize]; int nPadBytes; int nBytes; while(true) { nBytes = is.read(bytes); // depends on control dependency: [while], data = [none] nPadBytes = raf.read(padBytes); // depends on control dependency: [while], data = [none] if(nBytes > 0) { if(nPadBytes >= nBytes) { for(int i = 0; i < nBytes; i++) { // Work the magic bytes[i] = (byte) (bytes[i] ^ padBytes[i]); // depends on control dependency: [for], data = [i] } os.write(bytes, 0, nBytes); // depends on control dependency: [if], data = [none] if(zeroFill) { // Perform zero-filling raf.seek(offset); // depends on control dependency: [if], data = [none] for(int i = 0; i < nBytes; i++) { raf.write(0); // depends on control dependency: [for], data = [none] } } offset += nBytes; // depends on control dependency: [if], data = [none] } else { throw new IOException("Not enough pad bytes"); } } else { break; } } os.flush(); // depends on control dependency: [try], data = [none] return offset; // depends on control dependency: [try], data = [none] } catch(IOException e) { throw new RuntimeException(e); } finally { // depends on control dependency: [catch], data = [none] if(raf != null) { try { raf.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } if(is != null) { try { is.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } if(os != null) { try { os.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } } } }
public class class_name { @Override public void clearCached(ICacheManager mgr) { Map<String, CacheEntry> moduleBuilds; synchronized (this) { moduleBuilds = _moduleBuilds; _moduleBuilds = null; } if (moduleBuilds != null) { for (Map.Entry<String, CacheEntry> entry : moduleBuilds.entrySet()) { entry.getValue().delete(mgr); } moduleBuilds.clear(); } } }
public class class_name { @Override public void clearCached(ICacheManager mgr) { Map<String, CacheEntry> moduleBuilds; synchronized (this) { moduleBuilds = _moduleBuilds; _moduleBuilds = null; } if (moduleBuilds != null) { for (Map.Entry<String, CacheEntry> entry : moduleBuilds.entrySet()) { entry.getValue().delete(mgr); // depends on control dependency: [for], data = [entry] } moduleBuilds.clear(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static int[][] validate2x2NonNegative(int[][] data, String paramName){ for(int[] part : data) validateNonNegative(part, paramName); return validate2x2(data, paramName); } /** * Reformats the input array to a 2x2 array. * * If the array is 2x1 ([[a], [b]]), returns [[a, a], [b, b]] * If the array is 1x2 ([[a, b]]), returns [[a, b], [a, b]] * If the array is 2x2, returns the array * * @param data An array * @param paramName The param name, for error reporting * @return An int array of length 2 that represents the input */ public static int[][] validate2x2(int[][] data, String paramName){ if(data == null) { return null; } Preconditions.checkArgument( (data.length == 1 && data[0].length == 2) || (data.length == 2 && (data[0].length == 1 || data[0].length == 2) && (data[1].length == 1 || data[1].length == 2) && data[0].length == data[1].length ), "Value for %s must have shape 2x1, 1x2, or 2x2, got %sx%s shaped array: %s", paramName, data.length, data[0].length, data); if(data.length == 1) { return new int[][]{ data[0], data[0] }; } else if(data[0].length == 1){ return new int[][]{ new int[]{data[0][0], data[0][0]}, new int[]{data[1][0], data[1][0]} }; } else { return data; } } }
public class class_name { public static int[][] validate2x2NonNegative(int[][] data, String paramName){ for(int[] part : data) validateNonNegative(part, paramName); return validate2x2(data, paramName); } /** * Reformats the input array to a 2x2 array. * * If the array is 2x1 ([[a], [b]]), returns [[a, a], [b, b]] * If the array is 1x2 ([[a, b]]), returns [[a, b], [a, b]] * If the array is 2x2, returns the array * * @param data An array * @param paramName The param name, for error reporting * @return An int array of length 2 that represents the input */ public static int[][] validate2x2(int[][] data, String paramName){ if(data == null) { return null; // depends on control dependency: [if], data = [none] } Preconditions.checkArgument( (data.length == 1 && data[0].length == 2) || (data.length == 2 && (data[0].length == 1 || data[0].length == 2) && (data[1].length == 1 || data[1].length == 2) && data[0].length == data[1].length ), "Value for %s must have shape 2x1, 1x2, or 2x2, got %sx%s shaped array: %s", paramName, data.length, data[0].length, data); if(data.length == 1) { return new int[][]{ data[0], data[0] }; // depends on control dependency: [if], data = [none] } else if(data[0].length == 1){ return new int[][]{ new int[]{data[0][0], data[0][0]}, new int[]{data[1][0], data[1][0]} }; // depends on control dependency: [if], data = [none] } else { return data; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static <T extends Enum<T>> Option<T> get(Class<T> enumClass, String value) { try { return Option.some(Enum.valueOf(enumClass, value)); } catch (IllegalArgumentException iae) { return Option.none(); } } }
public class class_name { public static <T extends Enum<T>> Option<T> get(Class<T> enumClass, String value) { try { return Option.some(Enum.valueOf(enumClass, value)); // depends on control dependency: [try], data = [none] } catch (IllegalArgumentException iae) { return Option.none(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void removeScripType(ScriptType type) { ScriptType scriptType = typeMap.remove(type.getName()); if (scriptType != null) { getTreeModel().removeType(scriptType); } } }
public class class_name { public void removeScripType(ScriptType type) { ScriptType scriptType = typeMap.remove(type.getName()); if (scriptType != null) { getTreeModel().removeType(scriptType); // depends on control dependency: [if], data = [(scriptType] } } }
public class class_name { private long search_ml(int request_size) throws IOException { Block best_candidate = null; Block best_candidate_pred = null; Block block = ql_heads[last_ql_index].first_block; long block_addr; boolean found = false; Block prev_block = null; int ql_index; int ql_size; int rem; int rem2; large_requests++; while (block != null) { ml_blocks_searched++; if (block.size >= request_size) { rem = block.size - request_size; if (rem <= acceptable_waste) { // allocate the block without splitting if (prev_block != null) { prev_block.next = block.next; } else { ql_heads[last_ql_index].first_block = block.next; } ql_heads[last_ql_index].length--; if (ql_heads[last_ql_index].length == 0) { nonempty_lists--; } allocated_blocks++; free_blocks--; ml_hits++; allocated_words += block.size; free_words -= block.size; seek_and_count(block.address); writeInt(- block.size); return(block.address + HDR_SIZE); } else { if (best_candidate == null) { best_candidate = block; best_candidate_pred = prev_block; } else { if (best_candidate.size >= block.size) { best_candidate = block; best_candidate_pred = prev_block; } } } } prev_block = block; block = block.next; } if (best_candidate == null) { // have to allocate from tail return allocate_from_tail(request_size); } else { // we have to split a block rem = best_candidate.size - request_size; block_addr = best_candidate.address; if (rem <= last_quick_size_block) { rem2 = rem % grain_size; rem -= rem2; ql_index = calculate_list_index_for_dealloc(rem); request_size += rem2; if (best_candidate_pred != null) { best_candidate_pred.next = best_candidate.next; } else { ql_heads[last_ql_index].first_block = best_candidate.next; } add_block_to_freelist(best_candidate, ql_index); } best_candidate.size = rem; best_candidate.address += request_size; return split_block(block_addr, request_size, rem); } } }
public class class_name { private long search_ml(int request_size) throws IOException { Block best_candidate = null; Block best_candidate_pred = null; Block block = ql_heads[last_ql_index].first_block; long block_addr; boolean found = false; Block prev_block = null; int ql_index; int ql_size; int rem; int rem2; large_requests++; while (block != null) { ml_blocks_searched++; if (block.size >= request_size) { rem = block.size - request_size; if (rem <= acceptable_waste) { // allocate the block without splitting if (prev_block != null) { prev_block.next = block.next; // depends on control dependency: [if], data = [none] } else { ql_heads[last_ql_index].first_block = block.next; // depends on control dependency: [if], data = [none] } ql_heads[last_ql_index].length--; // depends on control dependency: [if], data = [none] if (ql_heads[last_ql_index].length == 0) { nonempty_lists--; // depends on control dependency: [if], data = [none] } allocated_blocks++; // depends on control dependency: [if], data = [none] free_blocks--; // depends on control dependency: [if], data = [none] ml_hits++; // depends on control dependency: [if], data = [none] allocated_words += block.size; // depends on control dependency: [if], data = [none] free_words -= block.size; // depends on control dependency: [if], data = [none] seek_and_count(block.address); // depends on control dependency: [if], data = [none] writeInt(- block.size); // depends on control dependency: [if], data = [none] return(block.address + HDR_SIZE); // depends on control dependency: [if], data = [none] } else { if (best_candidate == null) { best_candidate = block; // depends on control dependency: [if], data = [none] best_candidate_pred = prev_block; // depends on control dependency: [if], data = [none] } else { if (best_candidate.size >= block.size) { best_candidate = block; // depends on control dependency: [if], data = [none] best_candidate_pred = prev_block; // depends on control dependency: [if], data = [none] } } } } prev_block = block; block = block.next; } if (best_candidate == null) { // have to allocate from tail return allocate_from_tail(request_size); } else { // we have to split a block rem = best_candidate.size - request_size; block_addr = best_candidate.address; if (rem <= last_quick_size_block) { rem2 = rem % grain_size; rem -= rem2; ql_index = calculate_list_index_for_dealloc(rem); request_size += rem2; if (best_candidate_pred != null) { best_candidate_pred.next = best_candidate.next; } else { ql_heads[last_ql_index].first_block = best_candidate.next; } add_block_to_freelist(best_candidate, ql_index); } best_candidate.size = rem; best_candidate.address += request_size; return split_block(block_addr, request_size, rem); } } }
public class class_name { private boolean isTableMarkedVolatile(Connection con) { if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.entering(methodClassName, methodNames[IS_TABLE_MARKED_VOLATILE]); } boolean isMarkedVolatile = false; String tblName = tableName, qualifierName = null, sqlQuery = null; PreparedStatement ps = null; ResultSet rs = null; tblName = tblName.toUpperCase(); if (_smc.isUsingCustomSchemaName()) { //PM27191 qualifierName = qualifierNameWhenCustomSchemaIsSet; } else if (dbid != null) { qualifierName = dbid.toUpperCase(); } sqlQuery = "select 1 from syscat.tables " + "where TabName = '" + tblName + "' and Volatile = 'C' "; if (qualifierName != null) sqlQuery += " and tabschema = '" + qualifierName + "'"; sqlQuery += " for read only"; if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Sql: " + sqlQuery); } try { ps = con.prepareStatement(sqlQuery); rs = ps.executeQuery(); if (rs.next()) { isMarkedVolatile = true; } } catch (Throwable th) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[CREATE_TABLE], "CommonMessage.exception", th); } finally { if (rs != null) { closeResultSet(rs); } if (ps != null) { closeStatement(ps); } } if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.exiting(methodClassName, methodNames[IS_TABLE_MARKED_VOLATILE], isMarkedVolatile); } return isMarkedVolatile; } }
public class class_name { private boolean isTableMarkedVolatile(Connection con) { if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.entering(methodClassName, methodNames[IS_TABLE_MARKED_VOLATILE]); // depends on control dependency: [if], data = [none] } boolean isMarkedVolatile = false; String tblName = tableName, qualifierName = null, sqlQuery = null; PreparedStatement ps = null; ResultSet rs = null; tblName = tblName.toUpperCase(); if (_smc.isUsingCustomSchemaName()) { //PM27191 qualifierName = qualifierNameWhenCustomSchemaIsSet; // depends on control dependency: [if], data = [none] } else if (dbid != null) { qualifierName = dbid.toUpperCase(); // depends on control dependency: [if], data = [none] } sqlQuery = "select 1 from syscat.tables " + "where TabName = '" + tblName + "' and Volatile = 'C' "; if (qualifierName != null) sqlQuery += " and tabschema = '" + qualifierName + "'"; sqlQuery += " for read only"; if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Sql: " + sqlQuery); // depends on control dependency: [if], data = [none] } try { ps = con.prepareStatement(sqlQuery); // depends on control dependency: [try], data = [none] rs = ps.executeQuery(); // depends on control dependency: [try], data = [none] if (rs.next()) { isMarkedVolatile = true; // depends on control dependency: [if], data = [none] } } catch (Throwable th) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[CREATE_TABLE], "CommonMessage.exception", th); } finally { // depends on control dependency: [catch], data = [none] if (rs != null) { closeResultSet(rs); // depends on control dependency: [if], data = [(rs] } if (ps != null) { closeStatement(ps); // depends on control dependency: [if], data = [(ps] } } if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.exiting(methodClassName, methodNames[IS_TABLE_MARKED_VOLATILE], isMarkedVolatile); // depends on control dependency: [if], data = [none] } return isMarkedVolatile; } }
public class class_name { public void unmarkCompacting(Iterable<SSTableReader> unmark) { boolean isValid = cfstore.isValid(); if (!isValid) { // The CF has been dropped. We don't know if the original compaction suceeded or failed, // which makes it difficult to know if the sstable reference has already been released. // A "good enough" approach is to mark the sstables involved obsolete, which if compaction succeeded // is harmlessly redundant, and if it failed ensures that at least the sstable will get deleted on restart. for (SSTableReader sstable : unmark) sstable.markObsolete(); } View currentView, newView; do { currentView = view.get(); newView = currentView.unmarkCompacting(unmark); } while (!view.compareAndSet(currentView, newView)); if (!isValid) { // when the CFS is invalidated, it will call unreferenceSSTables(). However, unreferenceSSTables only deals // with sstables that aren't currently being compacted. If there are ongoing compactions that finish or are // interrupted after the CFS is invalidated, those sstables need to be unreferenced as well, so we do that here. unreferenceSSTables(); } } }
public class class_name { public void unmarkCompacting(Iterable<SSTableReader> unmark) { boolean isValid = cfstore.isValid(); if (!isValid) { // The CF has been dropped. We don't know if the original compaction suceeded or failed, // which makes it difficult to know if the sstable reference has already been released. // A "good enough" approach is to mark the sstables involved obsolete, which if compaction succeeded // is harmlessly redundant, and if it failed ensures that at least the sstable will get deleted on restart. for (SSTableReader sstable : unmark) sstable.markObsolete(); } View currentView, newView; do { currentView = view.get(); newView = currentView.unmarkCompacting(unmark); } while (!view.compareAndSet(currentView, newView)); if (!isValid) { // when the CFS is invalidated, it will call unreferenceSSTables(). However, unreferenceSSTables only deals // with sstables that aren't currently being compacted. If there are ongoing compactions that finish or are // interrupted after the CFS is invalidated, those sstables need to be unreferenced as well, so we do that here. unreferenceSSTables(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static void closeAndSwallowIOExceptions(Closeable c) { if (c == null) { return; } try { c.close(); } catch (IOException e) { LOG.warn("Encountered exception closing closeable", e); } } }
public class class_name { public static void closeAndSwallowIOExceptions(Closeable c) { if (c == null) { return; } // depends on control dependency: [if], data = [none] try { c.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { LOG.warn("Encountered exception closing closeable", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Inject public void setSamplerConfiguration(@Nullable Configuration.SamplerConfiguration samplerConfiguration) { if (samplerConfiguration != null) { configuration.withSampler(samplerConfiguration); } } }
public class class_name { @Inject public void setSamplerConfiguration(@Nullable Configuration.SamplerConfiguration samplerConfiguration) { if (samplerConfiguration != null) { configuration.withSampler(samplerConfiguration); // depends on control dependency: [if], data = [(samplerConfiguration] } } }
public class class_name { public static long getSnapshotRecordingId(final CountersReader counters, final int counterId, final int serviceId) { final DirectBuffer buffer = counters.metaDataBuffer(); if (counters.getCounterState(counterId) == RECORD_ALLOCATED) { final int recordOffset = CountersReader.metaDataOffset(counterId); if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECOVERY_STATE_TYPE_ID) { final int serviceCount = buffer.getInt(recordOffset + KEY_OFFSET + SERVICE_COUNT_OFFSET); if (serviceId < 0 || serviceId >= serviceCount) { throw new ClusterException("invalid serviceId " + serviceId + " for count of " + serviceCount); } return buffer.getLong( recordOffset + KEY_OFFSET + SNAPSHOT_RECORDING_IDS_OFFSET + (serviceId * SIZE_OF_LONG)); } } throw new ClusterException("Active counter not found " + counterId); } }
public class class_name { public static long getSnapshotRecordingId(final CountersReader counters, final int counterId, final int serviceId) { final DirectBuffer buffer = counters.metaDataBuffer(); if (counters.getCounterState(counterId) == RECORD_ALLOCATED) { final int recordOffset = CountersReader.metaDataOffset(counterId); if (buffer.getInt(recordOffset + TYPE_ID_OFFSET) == RECOVERY_STATE_TYPE_ID) { final int serviceCount = buffer.getInt(recordOffset + KEY_OFFSET + SERVICE_COUNT_OFFSET); if (serviceId < 0 || serviceId >= serviceCount) { throw new ClusterException("invalid serviceId " + serviceId + " for count of " + serviceCount); } return buffer.getLong( recordOffset + KEY_OFFSET + SNAPSHOT_RECORDING_IDS_OFFSET + (serviceId * SIZE_OF_LONG)); // depends on control dependency: [if], data = [none] } } throw new ClusterException("Active counter not found " + counterId); } }
public class class_name { public java.util.List<String> getIds() { if (ids == null) { ids = new com.amazonaws.internal.SdkInternalList<String>(); } return ids; } }
public class class_name { public java.util.List<String> getIds() { if (ids == null) { ids = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none] } return ids; } }
public class class_name { public boolean isSet() { if (isUsageMode()) { return true; } if (getJobFile() != null) { return true; } if (getListType() != null) { return true; } return false; } }
public class class_name { public boolean isSet() { if (isUsageMode()) { return true; // depends on control dependency: [if], data = [none] } if (getJobFile() != null) { return true; // depends on control dependency: [if], data = [none] } if (getListType() != null) { return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static String format(Object obj, String mask, double round) { if (obj == null) { return ""; } if (obj instanceof Date) { return formatDate((Date) obj, mask); } if (obj instanceof Number) { return formatNumber((Number) obj, mask, round); } return obj.toString(); } }
public class class_name { public static String format(Object obj, String mask, double round) { if (obj == null) { return ""; // depends on control dependency: [if], data = [none] } if (obj instanceof Date) { return formatDate((Date) obj, mask); // depends on control dependency: [if], data = [none] } if (obj instanceof Number) { return formatNumber((Number) obj, mask, round); // depends on control dependency: [if], data = [none] } return obj.toString(); } }
public class class_name { private void loginUser(final String userId, final String password, final BooleanCallback callback) { LoginRequest request = new LoginRequest(); request.setLogin(userId); request.setPassword(password); GwtCommand command = new GwtCommand(loginCommandName); command.setCommandRequest(request); GwtCommandDispatcher.getInstance().execute(command, new AbstractCommandCallback<LoginResponse>() { public void execute(LoginResponse loginResponse) { if (loginResponse.getToken() == null) { if (callback != null) { callback.execute(false); } manager.fireEvent(new LoginFailureEvent(loginResponse.getErrorMessages())); } else { userToken = loginResponse.getToken(); Authentication.this.userId = userId; UserDetail userDetail = GwtCommandDispatcher.getInstance().getUserDetail(); userDetail.setUserId(loginResponse.getUserId()); userDetail.setUserName(loginResponse.getUserName()); userDetail.setUserOrganization(loginResponse.getUserOrganization()); userDetail.setUserDivision(loginResponse.getUserDivision()); userDetail.setUserLocale(loginResponse.getUserLocale()); if (callback != null) { callback.execute(true); } manager.fireEvent(new LoginSuccessEvent(userToken)); } } }); } }
public class class_name { private void loginUser(final String userId, final String password, final BooleanCallback callback) { LoginRequest request = new LoginRequest(); request.setLogin(userId); request.setPassword(password); GwtCommand command = new GwtCommand(loginCommandName); command.setCommandRequest(request); GwtCommandDispatcher.getInstance().execute(command, new AbstractCommandCallback<LoginResponse>() { public void execute(LoginResponse loginResponse) { if (loginResponse.getToken() == null) { if (callback != null) { callback.execute(false); // depends on control dependency: [if], data = [none] } manager.fireEvent(new LoginFailureEvent(loginResponse.getErrorMessages())); // depends on control dependency: [if], data = [none] } else { userToken = loginResponse.getToken(); // depends on control dependency: [if], data = [none] Authentication.this.userId = userId; // depends on control dependency: [if], data = [none] UserDetail userDetail = GwtCommandDispatcher.getInstance().getUserDetail(); userDetail.setUserId(loginResponse.getUserId()); // depends on control dependency: [if], data = [none] userDetail.setUserName(loginResponse.getUserName()); // depends on control dependency: [if], data = [none] userDetail.setUserOrganization(loginResponse.getUserOrganization()); // depends on control dependency: [if], data = [none] userDetail.setUserDivision(loginResponse.getUserDivision()); // depends on control dependency: [if], data = [none] userDetail.setUserLocale(loginResponse.getUserLocale()); // depends on control dependency: [if], data = [none] if (callback != null) { callback.execute(true); // depends on control dependency: [if], data = [none] } manager.fireEvent(new LoginSuccessEvent(userToken)); // depends on control dependency: [if], data = [none] } } }); } }
public class class_name { @NotNull public Set<String> getNormalForms(@NotNull String word, @NotNull Language language) { com.fnklabs.metrics.Timer timer = MetricsFactory.getMetrics().getTimer(MetricsType.TEXT_UTILS_GET_NORMAL_FORMS.name()); Set<String> normalForms = new HashSet<>(); try { Morphology morphology = MorphologyFactory.getMorphology(language); List<String> normalForms1 = morphology.getNormalForms(word); normalForms.addAll(normalForms1); } catch (WrongCharaterException e) { LOGGER.warn("Can't get normal form of word", e); } timer.stop(); return normalForms; } }
public class class_name { @NotNull public Set<String> getNormalForms(@NotNull String word, @NotNull Language language) { com.fnklabs.metrics.Timer timer = MetricsFactory.getMetrics().getTimer(MetricsType.TEXT_UTILS_GET_NORMAL_FORMS.name()); Set<String> normalForms = new HashSet<>(); try { Morphology morphology = MorphologyFactory.getMorphology(language); List<String> normalForms1 = morphology.getNormalForms(word); normalForms.addAll(normalForms1); // depends on control dependency: [try], data = [none] } catch (WrongCharaterException e) { LOGGER.warn("Can't get normal form of word", e); } // depends on control dependency: [catch], data = [none] timer.stop(); return normalForms; } }
public class class_name { protected Collection createCollectionPath(String collectionPath, Collection rootCollection) throws PolicyIndexException { try { if (rootCollection.getParentCollection() != null) { throw new PolicyIndexException("Collection supplied is not a root collection"); } String rootCollectionName = rootCollection.getName(); if (!collectionPath.startsWith(rootCollectionName)) { throw new PolicyIndexException("Collection path " + collectionPath + " does not start from root collection - " + rootCollectionName ); } // strip root collection from path, obtain each individual collection name in the path String pathToCreate = collectionPath.substring(rootCollectionName.length()); String[] collections = pathToCreate.split("/"); // iterate each and create as necessary Collection nextCollection = rootCollection; for (String collectionName : collections ) { Collection childCollection = nextCollection.getChildCollection(collectionName); if (childCollection != null) { // child exists childCollection = nextCollection.getChildCollection(collectionName); } else { // does not exist, create it CollectionManagementService mgtService = (CollectionManagementService) nextCollection.getService("CollectionManagementService", "1.0"); childCollection = mgtService.createCollection(collectionName); log.debug("Created collection " + collectionName); } if (nextCollection.isOpen()) { nextCollection.close(); } nextCollection = childCollection; } return nextCollection; } catch (XMLDBException e) { log.error("Error creating collections from path " + e.getMessage(), e); throw new PolicyIndexException("Error creating collections from path " + e.getMessage(), e); } } }
public class class_name { protected Collection createCollectionPath(String collectionPath, Collection rootCollection) throws PolicyIndexException { try { if (rootCollection.getParentCollection() != null) { throw new PolicyIndexException("Collection supplied is not a root collection"); } String rootCollectionName = rootCollection.getName(); if (!collectionPath.startsWith(rootCollectionName)) { throw new PolicyIndexException("Collection path " + collectionPath + " does not start from root collection - " + rootCollectionName ); } // strip root collection from path, obtain each individual collection name in the path String pathToCreate = collectionPath.substring(rootCollectionName.length()); String[] collections = pathToCreate.split("/"); // iterate each and create as necessary Collection nextCollection = rootCollection; for (String collectionName : collections ) { Collection childCollection = nextCollection.getChildCollection(collectionName); if (childCollection != null) { // child exists childCollection = nextCollection.getChildCollection(collectionName); // depends on control dependency: [if], data = [none] } else { // does not exist, create it CollectionManagementService mgtService = (CollectionManagementService) nextCollection.getService("CollectionManagementService", "1.0"); childCollection = mgtService.createCollection(collectionName); // depends on control dependency: [if], data = [none] log.debug("Created collection " + collectionName); // depends on control dependency: [if], data = [none] } if (nextCollection.isOpen()) { nextCollection.close(); // depends on control dependency: [if], data = [none] } nextCollection = childCollection; } return nextCollection; } catch (XMLDBException e) { log.error("Error creating collections from path " + e.getMessage(), e); throw new PolicyIndexException("Error creating collections from path " + e.getMessage(), e); } } }
public class class_name { protected final void notifyConnectionError(final Exception exception) { if (!isConnected()) { LOGGER.log(Level.INFO, "Connection was already disconnected when attempting to handle " + exception, exception); return; } ASYNC_BUT_ORDERED.performAsyncButOrdered(this, () -> { currentConnectionException = exception; for (StanzaCollector collector : collectors) { collector.notifyConnectionError(exception); } SmackWrappedException smackWrappedException = new SmackWrappedException(exception); tlsHandled.reportGenericFailure(smackWrappedException); saslFeatureReceived.reportGenericFailure(smackWrappedException); lastFeaturesReceived.reportGenericFailure(smackWrappedException); // TODO From XMPPTCPConnection. Was called in Smack 4.3 where notifyConnectionError() was part of // XMPPTCPConnection. Create delegation method? // maybeCompressFeaturesReceived.reportGenericFailure(smackWrappedException); synchronized (AbstractXMPPConnection.this) { notifyAll(); // Closes the connection temporary. A if the connection supports stream management, then a reconnection is // possible. Note that a connection listener of e.g. XMPPTCPConnection will drop the SM state in // case the Exception is a StreamErrorException. instantShutdown(); } Async.go(() -> { // Notify connection listeners of the error. callConnectionClosedOnErrorListener(exception); }, AbstractXMPPConnection.this + " callConnectionClosedOnErrorListener()"); }); } }
public class class_name { protected final void notifyConnectionError(final Exception exception) { if (!isConnected()) { LOGGER.log(Level.INFO, "Connection was already disconnected when attempting to handle " + exception, exception); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } ASYNC_BUT_ORDERED.performAsyncButOrdered(this, () -> { currentConnectionException = exception; for (StanzaCollector collector : collectors) { collector.notifyConnectionError(exception); } SmackWrappedException smackWrappedException = new SmackWrappedException(exception); tlsHandled.reportGenericFailure(smackWrappedException); saslFeatureReceived.reportGenericFailure(smackWrappedException); lastFeaturesReceived.reportGenericFailure(smackWrappedException); // TODO From XMPPTCPConnection. Was called in Smack 4.3 where notifyConnectionError() was part of // XMPPTCPConnection. Create delegation method? // maybeCompressFeaturesReceived.reportGenericFailure(smackWrappedException); synchronized (AbstractXMPPConnection.this) { notifyAll(); // Closes the connection temporary. A if the connection supports stream management, then a reconnection is // possible. Note that a connection listener of e.g. XMPPTCPConnection will drop the SM state in // case the Exception is a StreamErrorException. instantShutdown(); } Async.go(() -> { // Notify connection listeners of the error. callConnectionClosedOnErrorListener(exception); }, AbstractXMPPConnection.this + " callConnectionClosedOnErrorListener()"); }); } }
public class class_name { @Override public Clustering<MeanModel> run(Database database, Relation<V> relation) { // Database objects to process final DBIDs ids = relation.getDBIDs(); // Choose initial means double[][] means = initializer.chooseInitialMeans(database, relation, k, getDistanceFunction()); // Setup cluster assignment store List<ModifiableDBIDs> clusters = new ArrayList<>(); for(int i = 0; i < k; i++) { clusters.add(DBIDUtil.newHashSet(relation.size() / k + 2)); } // Meta data storage final WritableDataStore<Meta> metas = initializeMeta(relation, means); // Perform the initial assignment ArrayModifiableDBIDs tids = initialAssignment(clusters, metas, ids); // Recompute the means after the initial assignment means = means(clusters, means, relation); // Refine the result via k-means like iterations means = refineResult(relation, means, clusters, metas, tids); // Wrap result Clustering<MeanModel> result = new Clustering<>("k-Means Samesize Clustering", "kmeans-samesize-clustering"); for(int i = 0; i < clusters.size(); i++) { result.addToplevelCluster(new Cluster<>(clusters.get(i), new MeanModel(means[i]))); } return result; } }
public class class_name { @Override public Clustering<MeanModel> run(Database database, Relation<V> relation) { // Database objects to process final DBIDs ids = relation.getDBIDs(); // Choose initial means double[][] means = initializer.chooseInitialMeans(database, relation, k, getDistanceFunction()); // Setup cluster assignment store List<ModifiableDBIDs> clusters = new ArrayList<>(); for(int i = 0; i < k; i++) { clusters.add(DBIDUtil.newHashSet(relation.size() / k + 2)); // depends on control dependency: [for], data = [none] } // Meta data storage final WritableDataStore<Meta> metas = initializeMeta(relation, means); // Perform the initial assignment ArrayModifiableDBIDs tids = initialAssignment(clusters, metas, ids); // Recompute the means after the initial assignment means = means(clusters, means, relation); // Refine the result via k-means like iterations means = refineResult(relation, means, clusters, metas, tids); // Wrap result Clustering<MeanModel> result = new Clustering<>("k-Means Samesize Clustering", "kmeans-samesize-clustering"); for(int i = 0; i < clusters.size(); i++) { result.addToplevelCluster(new Cluster<>(clusters.get(i), new MeanModel(means[i]))); // depends on control dependency: [for], data = [i] } return result; } }
public class class_name { public org.modeshape.jcr.api.Problems backupRepository( File backupDirectory, BackupOptions options ) throws RepositoryException { // Create the activity ... final BackupActivity backupActivity = createBackupActivity(backupDirectory, options); //suspend any existing transactions try { if (runningState.suspendExistingUserTransaction()) { LOGGER.debug("Suspended existing active user transaction before the backup operation starts"); } try { // Run the backup and return the problems ... return new JcrProblems(backupActivity.execute()); } finally { runningState.resumeExistingUserTransaction(); } } catch (SystemException e) { throw new RuntimeException(e); } } }
public class class_name { public org.modeshape.jcr.api.Problems backupRepository( File backupDirectory, BackupOptions options ) throws RepositoryException { // Create the activity ... final BackupActivity backupActivity = createBackupActivity(backupDirectory, options); //suspend any existing transactions try { if (runningState.suspendExistingUserTransaction()) { LOGGER.debug("Suspended existing active user transaction before the backup operation starts"); // depends on control dependency: [if], data = [none] } try { // Run the backup and return the problems ... return new JcrProblems(backupActivity.execute()); // depends on control dependency: [try], data = [none] } finally { runningState.resumeExistingUserTransaction(); } } catch (SystemException e) { throw new RuntimeException(e); } } }
public class class_name { public I_CmsResourceType getResourceType() { if (m_resourceType == null) { m_resourceType = OpenCms.getResourceManager().getResourceType(m_resource); } return m_resourceType; } }
public class class_name { public I_CmsResourceType getResourceType() { if (m_resourceType == null) { m_resourceType = OpenCms.getResourceManager().getResourceType(m_resource); // depends on control dependency: [if], data = [none] } return m_resourceType; } }
public class class_name { private static WildcardType canonicalizeWildcardType( TypeVariable<?> declaration, WildcardType type) { Type[] declared = declaration.getBounds(); List<Type> upperBounds = new ArrayList<>(); for (Type bound : type.getUpperBounds()) { if (!any(declared).isSubtypeOf(bound)) { upperBounds.add(canonicalizeWildcardsInType(bound)); } } return new Types.WildcardTypeImpl(type.getLowerBounds(), upperBounds.toArray(new Type[0])); } }
public class class_name { private static WildcardType canonicalizeWildcardType( TypeVariable<?> declaration, WildcardType type) { Type[] declared = declaration.getBounds(); List<Type> upperBounds = new ArrayList<>(); for (Type bound : type.getUpperBounds()) { if (!any(declared).isSubtypeOf(bound)) { upperBounds.add(canonicalizeWildcardsInType(bound)); // depends on control dependency: [if], data = [none] } } return new Types.WildcardTypeImpl(type.getLowerBounds(), upperBounds.toArray(new Type[0])); } }
public class class_name { public JvmIdentifiableElement getVisibleType(JvmMember context, String name) { if (context == null) return null; Map<String, JvmIdentifiableElement> map = visibleElements.get(context); if (map == null) { map = create(context); } return map.get(name); } }
public class class_name { public JvmIdentifiableElement getVisibleType(JvmMember context, String name) { if (context == null) return null; Map<String, JvmIdentifiableElement> map = visibleElements.get(context); if (map == null) { map = create(context); // depends on control dependency: [if], data = [none] } return map.get(name); } }
public class class_name { @Override public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() { return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() { private final UnsafeRow key = new UnsafeRow(keySchema.length()); private final UnsafeRow value = new UnsafeRow(valueSchema.length()); private long offsetInPage = 0; private int recordsInPage = 0; private int currentklen; private int currentvlen; private int totalLength; private boolean initialized = false; private void init() { if (page != null) { offsetInPage = page.getBaseOffset(); recordsInPage = numRows; } initialized = true; } @Override public boolean next() { if (!initialized) init(); //searching for the next non empty page is records is now zero if (recordsInPage == 0) { freeCurrentPage(); return false; } totalLength = Platform.getInt(base, offsetInPage) - 4; currentklen = Platform.getInt(base, offsetInPage + 4); currentvlen = totalLength - currentklen; key.pointTo(base, offsetInPage + 8, currentklen); value.pointTo(base, offsetInPage + 8 + currentklen, currentvlen); offsetInPage += 8 + totalLength + 8; recordsInPage -= 1; return true; } @Override public UnsafeRow getKey() { return key; } @Override public UnsafeRow getValue() { return value; } @Override public void close() { // do nothing } private void freeCurrentPage() { if (page != null) { freePage(page); page = null; } } }; } }
public class class_name { @Override public org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow> rowIterator() { return new org.apache.spark.unsafe.KVIterator<UnsafeRow, UnsafeRow>() { private final UnsafeRow key = new UnsafeRow(keySchema.length()); private final UnsafeRow value = new UnsafeRow(valueSchema.length()); private long offsetInPage = 0; private int recordsInPage = 0; private int currentklen; private int currentvlen; private int totalLength; private boolean initialized = false; private void init() { if (page != null) { offsetInPage = page.getBaseOffset(); // depends on control dependency: [if], data = [none] recordsInPage = numRows; // depends on control dependency: [if], data = [none] } initialized = true; } @Override public boolean next() { if (!initialized) init(); //searching for the next non empty page is records is now zero if (recordsInPage == 0) { freeCurrentPage(); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } totalLength = Platform.getInt(base, offsetInPage) - 4; currentklen = Platform.getInt(base, offsetInPage + 4); currentvlen = totalLength - currentklen; key.pointTo(base, offsetInPage + 8, currentklen); value.pointTo(base, offsetInPage + 8 + currentklen, currentvlen); offsetInPage += 8 + totalLength + 8; recordsInPage -= 1; return true; } @Override public UnsafeRow getKey() { return key; } @Override public UnsafeRow getValue() { return value; } @Override public void close() { // do nothing } private void freeCurrentPage() { if (page != null) { freePage(page); // depends on control dependency: [if], data = [(page] page = null; // depends on control dependency: [if], data = [none] } } }; } }
public class class_name { public final ZMonitor verbose(boolean verbose) { if (started) { System.out.println("ZMonitor: Unable to change verbosity while already started."); return this; } agent.send(VERBOSE, true); agent.send(Boolean.toString(verbose)); agent.recv(); return this; } }
public class class_name { public final ZMonitor verbose(boolean verbose) { if (started) { System.out.println("ZMonitor: Unable to change verbosity while already started."); // depends on control dependency: [if], data = [none] return this; // depends on control dependency: [if], data = [none] } agent.send(VERBOSE, true); agent.send(Boolean.toString(verbose)); agent.recv(); return this; } }
public class class_name { protected ViewPropertyAnimatorCompat animateMoveImpl(final ViewHolder holder, int fromX, int fromY, int toX, int toY) { final View view = holder.itemView; final int deltaX = toX - fromX; final int deltaY = toY - fromY; ViewCompat.animate(view).cancel(); if (deltaX != 0) { ViewCompat.animate(view).translationX(0); } if (deltaY != 0) { ViewCompat.animate(view).translationY(0); } // TODO: make EndActions end listeners instead, since end actions aren't called when // vpas are canceled (and can't end them. why?) // need listener functionality in VPACompat for this. Ick. return ViewCompat.animate(view).setInterpolator(null).setDuration(getMoveDuration()); } }
public class class_name { protected ViewPropertyAnimatorCompat animateMoveImpl(final ViewHolder holder, int fromX, int fromY, int toX, int toY) { final View view = holder.itemView; final int deltaX = toX - fromX; final int deltaY = toY - fromY; ViewCompat.animate(view).cancel(); if (deltaX != 0) { ViewCompat.animate(view).translationX(0); // depends on control dependency: [if], data = [0)] } if (deltaY != 0) { ViewCompat.animate(view).translationY(0); // depends on control dependency: [if], data = [0)] } // TODO: make EndActions end listeners instead, since end actions aren't called when // vpas are canceled (and can't end them. why?) // need listener functionality in VPACompat for this. Ick. return ViewCompat.animate(view).setInterpolator(null).setDuration(getMoveDuration()); } }
public class class_name { public List<String> getAllKeys(final int limit) { final List<String> keyList = new ArrayList<String>(limit); byte[] array = new byte[EVCacheConfig.getInstance().getDynamicIntProperty(appName + ".all.keys.reader.buffer.size.bytes", 4*1024*1024).get()]; final int waitInSec = EVCacheConfig.getInstance().getDynamicIntProperty(appName + ".all.keys.reader.wait.duration.sec", 60).get(); for(InetSocketAddress address : memcachedNodesInZone) { //final List<String> keyList = new ArrayList<String>(limit); Socket socket = null; PrintWriter printWriter = null; BufferedInputStream bufferedReader = null; try { socket = new Socket(address.getHostName(), address.getPort()); printWriter = new PrintWriter(socket.getOutputStream(), true); printWriter.print("lru_crawler metadump all \r\n"); printWriter.print("quit \r\n"); printWriter.flush(); bufferedReader = new BufferedInputStream(socket.getInputStream()); while(isDataAvailableForRead(bufferedReader, waitInSec, TimeUnit.SECONDS, socket)) { int read = bufferedReader.read(array); if (log.isDebugEnabled()) log.debug("Number of bytes read = " +read); if(read > 0) { StringBuilder b = new StringBuilder(); boolean start = true; for (int i = 0; i < read; i++) { if(array[i] == ' ') { start = false; if(b.length() > 4) keyList.add(URLDecoder.decode(b.substring(4))); b = new StringBuilder(); } if(start) b.append((char)array[i]); if(array[i] == '\n') { start = true; } if(keyList.size() >= limit) { if (log.isDebugEnabled()) log.debug("Record Limit reached. Will break and return"); return keyList; } } } else if (read < 0 ){ break; } } } catch (Exception e) { if(socket != null) { try { socket.close(); } catch (IOException e1) { log.error("Error closing socket", e1); } } log.error("Exception", e); } finally { if(bufferedReader != null) { try { bufferedReader.close(); } catch (IOException e1) { log.error("Error closing bufferedReader", e1); } } if(printWriter != null) { try { printWriter.close(); } catch (Exception e1) { log.error("Error closing socket", e1); } } if(socket != null) { try { socket.close(); } catch (IOException e) { if (log.isDebugEnabled()) log.debug("Error closing socket", e); } } } } return keyList; } }
public class class_name { public List<String> getAllKeys(final int limit) { final List<String> keyList = new ArrayList<String>(limit); byte[] array = new byte[EVCacheConfig.getInstance().getDynamicIntProperty(appName + ".all.keys.reader.buffer.size.bytes", 4*1024*1024).get()]; final int waitInSec = EVCacheConfig.getInstance().getDynamicIntProperty(appName + ".all.keys.reader.wait.duration.sec", 60).get(); for(InetSocketAddress address : memcachedNodesInZone) { //final List<String> keyList = new ArrayList<String>(limit); Socket socket = null; PrintWriter printWriter = null; BufferedInputStream bufferedReader = null; try { socket = new Socket(address.getHostName(), address.getPort()); // depends on control dependency: [try], data = [none] printWriter = new PrintWriter(socket.getOutputStream(), true); // depends on control dependency: [try], data = [none] printWriter.print("lru_crawler metadump all \r\n"); // depends on control dependency: [try], data = [none] printWriter.print("quit \r\n"); // depends on control dependency: [try], data = [none] printWriter.flush(); // depends on control dependency: [try], data = [none] bufferedReader = new BufferedInputStream(socket.getInputStream()); // depends on control dependency: [try], data = [none] while(isDataAvailableForRead(bufferedReader, waitInSec, TimeUnit.SECONDS, socket)) { int read = bufferedReader.read(array); if (log.isDebugEnabled()) log.debug("Number of bytes read = " +read); if(read > 0) { StringBuilder b = new StringBuilder(); boolean start = true; for (int i = 0; i < read; i++) { if(array[i] == ' ') { start = false; // depends on control dependency: [if], data = [none] if(b.length() > 4) keyList.add(URLDecoder.decode(b.substring(4))); b = new StringBuilder(); // depends on control dependency: [if], data = [none] } if(start) b.append((char)array[i]); if(array[i] == '\n') { start = true; // depends on control dependency: [if], data = [none] } if(keyList.size() >= limit) { if (log.isDebugEnabled()) log.debug("Record Limit reached. Will break and return"); return keyList; // depends on control dependency: [if], data = [none] } } } else if (read < 0 ){ break; } } } catch (Exception e) { if(socket != null) { try { socket.close(); // depends on control dependency: [try], data = [none] } catch (IOException e1) { log.error("Error closing socket", e1); } // depends on control dependency: [catch], data = [none] } log.error("Exception", e); } // depends on control dependency: [catch], data = [none] finally { if(bufferedReader != null) { try { bufferedReader.close(); // depends on control dependency: [try], data = [none] } catch (IOException e1) { log.error("Error closing bufferedReader", e1); } // depends on control dependency: [catch], data = [none] } if(printWriter != null) { try { printWriter.close(); // depends on control dependency: [try], data = [none] } catch (Exception e1) { log.error("Error closing socket", e1); } // depends on control dependency: [catch], data = [none] } if(socket != null) { try { socket.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { if (log.isDebugEnabled()) log.debug("Error closing socket", e); } // depends on control dependency: [catch], data = [none] } } } return keyList; } }
public class class_name { public String buildUrl() { StringBuilder sb = new StringBuilder(); boolean includePort = true; if (null != scheme) { sb.append(scheme).append("://"); includePort = (port != (scheme.equals("http") ? 80 : 443)); } if (null != serverName) { sb.append(serverName); if (includePort && port > 0) { sb.append(':').append(port); } } if (!Objects.equals(contextPath, "/")) { sb.append(contextPath); } sb.append(buildRequestUrl()); return sb.toString(); } }
public class class_name { public String buildUrl() { StringBuilder sb = new StringBuilder(); boolean includePort = true; if (null != scheme) { sb.append(scheme).append("://"); includePort = (port != (scheme.equals("http") ? 80 : 443)); // depends on control dependency: [if], data = [scheme)] } if (null != serverName) { sb.append(serverName); // depends on control dependency: [if], data = [serverName)] if (includePort && port > 0) { sb.append(':').append(port); // depends on control dependency: [if], data = [none] } } if (!Objects.equals(contextPath, "/")) { sb.append(contextPath); // depends on control dependency: [if], data = [none] } sb.append(buildRequestUrl()); return sb.toString(); } }
public class class_name { public void exportData(CmsExportParameters parameters) throws CmsImportExportException { m_parameters = parameters; m_exportCount = 0; // clear all caches getReport().println(Messages.get().container(Messages.RPT_CLEARCACHE_0), I_CmsReport.FORMAT_NOTE); OpenCms.fireCmsEvent(new CmsEvent(I_CmsEventListener.EVENT_CLEAR_CACHES, new HashMap<String, Object>(0))); try { Element exportNode = openExportFile(parameters.getExportMode()); if (m_parameters.getModuleInfo() != null) { // add the module element exportNode.add(m_parameters.getModuleInfo()); // write the XML digestElement(exportNode, m_parameters.getModuleInfo()); } // export account data only if selected if (m_parameters.isExportAccountData()) { Element accountsElement = exportNode.addElement(CmsImportVersion10.N_ACCOUNTS); getSaxWriter().writeOpen(accountsElement); exportOrgUnits(accountsElement); getSaxWriter().writeClose(accountsElement); exportNode.remove(accountsElement); } // export resource data only if selected if (m_parameters.isExportResourceData()) { exportAllResources(exportNode, m_parameters.getResources()); } // export project data only if selected if (m_parameters.isExportProjectData()) { Element projectsElement = exportNode.addElement(CmsImportVersion10.N_PROJECTS); getSaxWriter().writeOpen(projectsElement); exportProjects(projectsElement); getSaxWriter().writeClose(projectsElement); exportNode.remove(projectsElement); } closeExportFile(exportNode); } catch (SAXException se) { getReport().println(se); CmsMessageContainer message = Messages.get().container( Messages.ERR_IMPORTEXPORT_ERROR_EXPORTING_TO_FILE_1, getExportFileName()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), se); } throw new CmsImportExportException(message, se); } catch (IOException ioe) { getReport().println(ioe); CmsMessageContainer message = Messages.get().container( Messages.ERR_IMPORTEXPORT_ERROR_EXPORTING_TO_FILE_1, getExportFileName()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), ioe); } throw new CmsImportExportException(message, ioe); } } }
public class class_name { public void exportData(CmsExportParameters parameters) throws CmsImportExportException { m_parameters = parameters; m_exportCount = 0; // clear all caches getReport().println(Messages.get().container(Messages.RPT_CLEARCACHE_0), I_CmsReport.FORMAT_NOTE); OpenCms.fireCmsEvent(new CmsEvent(I_CmsEventListener.EVENT_CLEAR_CACHES, new HashMap<String, Object>(0))); try { Element exportNode = openExportFile(parameters.getExportMode()); if (m_parameters.getModuleInfo() != null) { // add the module element exportNode.add(m_parameters.getModuleInfo()); // depends on control dependency: [if], data = [(m_parameters.getModuleInfo()] // write the XML digestElement(exportNode, m_parameters.getModuleInfo()); // depends on control dependency: [if], data = [none] } // export account data only if selected if (m_parameters.isExportAccountData()) { Element accountsElement = exportNode.addElement(CmsImportVersion10.N_ACCOUNTS); getSaxWriter().writeOpen(accountsElement); // depends on control dependency: [if], data = [none] exportOrgUnits(accountsElement); // depends on control dependency: [if], data = [none] getSaxWriter().writeClose(accountsElement); // depends on control dependency: [if], data = [none] exportNode.remove(accountsElement); // depends on control dependency: [if], data = [none] } // export resource data only if selected if (m_parameters.isExportResourceData()) { exportAllResources(exportNode, m_parameters.getResources()); // depends on control dependency: [if], data = [none] } // export project data only if selected if (m_parameters.isExportProjectData()) { Element projectsElement = exportNode.addElement(CmsImportVersion10.N_PROJECTS); getSaxWriter().writeOpen(projectsElement); // depends on control dependency: [if], data = [none] exportProjects(projectsElement); // depends on control dependency: [if], data = [none] getSaxWriter().writeClose(projectsElement); // depends on control dependency: [if], data = [none] exportNode.remove(projectsElement); // depends on control dependency: [if], data = [none] } closeExportFile(exportNode); } catch (SAXException se) { getReport().println(se); CmsMessageContainer message = Messages.get().container( Messages.ERR_IMPORTEXPORT_ERROR_EXPORTING_TO_FILE_1, getExportFileName()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), se); // depends on control dependency: [if], data = [none] } throw new CmsImportExportException(message, se); } catch (IOException ioe) { getReport().println(ioe); CmsMessageContainer message = Messages.get().container( Messages.ERR_IMPORTEXPORT_ERROR_EXPORTING_TO_FILE_1, getExportFileName()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), ioe); // depends on control dependency: [if], data = [none] } throw new CmsImportExportException(message, ioe); } } }
public class class_name { public Result run(Database db, Relation<O> relation) { DistanceQuery<O> dq = db.getDistanceQuery(relation, getDistanceFunction()); ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); final int size = ids.size(); if(size > 0x10000) { throw new AbortException("This implementation does not scale to data sets larger than " + 0x10000 + " instances (~17 GB RAM), which results in an integer overflow."); } if(Linkage.SINGLE.equals(linkage)) { LOG.verbose("Notice: SLINK is a much faster algorithm for single-linkage clustering!"); } // Compute the initial (lower triangular) distance matrix. double[] scratch = new double[triangleSize(size)]; DBIDArrayIter ix = ids.iter(), iy = ids.iter(); // Position counter - must agree with computeOffset! int pos = 0; boolean square = Linkage.WARD.equals(linkage) && !getDistanceFunction().isSquared(); for(int x = 0; ix.valid(); x++, ix.advance()) { iy.seek(0); for(int y = 0; y < x; y++, iy.advance()) { scratch[pos] = dq.distance(ix, iy); // Ward uses variances -- i.e. squared values if(square) { scratch[pos] *= scratch[pos]; } pos++; } } // Initialize space for result: double[] height = new double[size]; Arrays.fill(height, Double.POSITIVE_INFINITY); // Parent node, to track merges // have every object point to itself initially ArrayModifiableDBIDs parent = DBIDUtil.newArray(ids); // Active clusters, when not trivial. Int2ReferenceMap<ModifiableDBIDs> clusters = new Int2ReferenceOpenHashMap<>(); // Repeat until everything merged, except the desired number of clusters: final int stop = size - numclusters; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Agglomerative clustering", stop, LOG) : null; for(int i = 0; i < stop; i++) { double min = Double.POSITIVE_INFINITY; int minx = -1, miny = -1; for(int x = 0; x < size; x++) { if(height[x] < Double.POSITIVE_INFINITY) { continue; } final int xbase = triangleSize(x); for(int y = 0; y < x; y++) { if(height[y] < Double.POSITIVE_INFINITY) { continue; } final int idx = xbase + y; if(scratch[idx] < min) { min = scratch[idx]; minx = x; miny = y; } } } assert (minx >= 0 && miny >= 0); // Avoid allocating memory, by reusing existing iterators: ix.seek(minx); iy.seek(miny); // Perform merge in data structure: x -> y // Since y < x, prefer keeping y, dropping x. height[minx] = min; parent.set(minx, iy); // Merge into cluster ModifiableDBIDs cx = clusters.get(minx); ModifiableDBIDs cy = clusters.get(miny); int sizex = 1, sizey = 1; // cluster sizes, for averaging if(cy == null) { cy = DBIDUtil.newHashSet(); cy.add(iy); } else { sizey = cy.size(); } if(cx == null) { cy.add(ix); } else { sizex = cx.size(); cy.addDBIDs(cx); clusters.remove(minx); } clusters.put(miny, cy); // Update distance matrix. Note: miny < minx // Implementation note: most will not need sizej, and could save the // hashmap lookup. final int xbase = triangleSize(minx), ybase = triangleSize(miny); // Write to (y, j), with j < y for(int j = 0; j < miny; j++) { if(height[j] < Double.POSITIVE_INFINITY) { continue; } final DBIDs idsj = clusters.get(j); final int sizej = (idsj == null) ? 1 : idsj.size(); scratch[ybase + j] = linkage.combine(sizex, scratch[xbase + j], sizey, scratch[ybase + j], sizej, min); } // Write to (j, y), with y < j < x for(int j = miny + 1; j < minx; j++) { if(height[j] < Double.POSITIVE_INFINITY) { continue; } final int jbase = triangleSize(j); final DBIDs idsj = clusters.get(j); final int sizej = (idsj == null) ? 1 : idsj.size(); scratch[jbase + miny] = linkage.combine(sizex, scratch[xbase + j], sizey, scratch[jbase + miny], sizej, min); } // Write to (j, y), with y < x < j for(int j = minx + 1; j < size; j++) { if(height[j] < Double.POSITIVE_INFINITY) { continue; } final DBIDs idsj = clusters.get(j); final int sizej = (idsj == null) ? 1 : idsj.size(); final int jbase = triangleSize(j); scratch[jbase + miny] = linkage.combine(sizex, scratch[jbase + minx], sizey, scratch[jbase + miny], sizej, min); } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); // Build the clustering result final Clustering<Model> dendrogram = new Clustering<>("Hierarchical-Clustering", "hierarchical-clustering"); for(int x = 0; x < size; x++) { if(height[x] < Double.POSITIVE_INFINITY) { DBIDs cids = clusters.get(x); if(cids == null) { ix.seek(x); cids = DBIDUtil.deref(ix); } Cluster<Model> cluster = new Cluster<>("Cluster", cids); dendrogram.addToplevelCluster(cluster); } } return dendrogram; } }
public class class_name { public Result run(Database db, Relation<O> relation) { DistanceQuery<O> dq = db.getDistanceQuery(relation, getDistanceFunction()); ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs()); final int size = ids.size(); if(size > 0x10000) { throw new AbortException("This implementation does not scale to data sets larger than " + 0x10000 + " instances (~17 GB RAM), which results in an integer overflow."); } if(Linkage.SINGLE.equals(linkage)) { LOG.verbose("Notice: SLINK is a much faster algorithm for single-linkage clustering!"); // depends on control dependency: [if], data = [none] } // Compute the initial (lower triangular) distance matrix. double[] scratch = new double[triangleSize(size)]; DBIDArrayIter ix = ids.iter(), iy = ids.iter(); // Position counter - must agree with computeOffset! int pos = 0; boolean square = Linkage.WARD.equals(linkage) && !getDistanceFunction().isSquared(); for(int x = 0; ix.valid(); x++, ix.advance()) { iy.seek(0); // depends on control dependency: [for], data = [none] for(int y = 0; y < x; y++, iy.advance()) { scratch[pos] = dq.distance(ix, iy); // depends on control dependency: [for], data = [none] // Ward uses variances -- i.e. squared values if(square) { scratch[pos] *= scratch[pos]; // depends on control dependency: [if], data = [none] } pos++; // depends on control dependency: [for], data = [none] } } // Initialize space for result: double[] height = new double[size]; Arrays.fill(height, Double.POSITIVE_INFINITY); // Parent node, to track merges // have every object point to itself initially ArrayModifiableDBIDs parent = DBIDUtil.newArray(ids); // Active clusters, when not trivial. Int2ReferenceMap<ModifiableDBIDs> clusters = new Int2ReferenceOpenHashMap<>(); // Repeat until everything merged, except the desired number of clusters: final int stop = size - numclusters; FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Agglomerative clustering", stop, LOG) : null; for(int i = 0; i < stop; i++) { double min = Double.POSITIVE_INFINITY; int minx = -1, miny = -1; for(int x = 0; x < size; x++) { if(height[x] < Double.POSITIVE_INFINITY) { continue; } final int xbase = triangleSize(x); for(int y = 0; y < x; y++) { if(height[y] < Double.POSITIVE_INFINITY) { continue; } final int idx = xbase + y; if(scratch[idx] < min) { min = scratch[idx]; // depends on control dependency: [if], data = [none] minx = x; // depends on control dependency: [if], data = [none] miny = y; // depends on control dependency: [if], data = [none] } } } assert (minx >= 0 && miny >= 0); // depends on control dependency: [for], data = [none] // Avoid allocating memory, by reusing existing iterators: ix.seek(minx); // depends on control dependency: [for], data = [none] iy.seek(miny); // depends on control dependency: [for], data = [none] // Perform merge in data structure: x -> y // Since y < x, prefer keeping y, dropping x. height[minx] = min; // depends on control dependency: [for], data = [none] parent.set(minx, iy); // depends on control dependency: [for], data = [none] // Merge into cluster ModifiableDBIDs cx = clusters.get(minx); ModifiableDBIDs cy = clusters.get(miny); int sizex = 1, sizey = 1; // cluster sizes, for averaging if(cy == null) { cy = DBIDUtil.newHashSet(); // depends on control dependency: [if], data = [none] cy.add(iy); // depends on control dependency: [if], data = [none] } else { sizey = cy.size(); // depends on control dependency: [if], data = [none] } if(cx == null) { cy.add(ix); // depends on control dependency: [if], data = [none] } else { sizex = cx.size(); // depends on control dependency: [if], data = [none] cy.addDBIDs(cx); // depends on control dependency: [if], data = [(cx] clusters.remove(minx); // depends on control dependency: [if], data = [none] } clusters.put(miny, cy); // depends on control dependency: [for], data = [none] // Update distance matrix. Note: miny < minx // Implementation note: most will not need sizej, and could save the // hashmap lookup. final int xbase = triangleSize(minx), ybase = triangleSize(miny); // Write to (y, j), with j < y for(int j = 0; j < miny; j++) { if(height[j] < Double.POSITIVE_INFINITY) { continue; } final DBIDs idsj = clusters.get(j); final int sizej = (idsj == null) ? 1 : idsj.size(); scratch[ybase + j] = linkage.combine(sizex, scratch[xbase + j], sizey, scratch[ybase + j], sizej, min); // depends on control dependency: [for], data = [j] } // Write to (j, y), with y < j < x for(int j = miny + 1; j < minx; j++) { if(height[j] < Double.POSITIVE_INFINITY) { continue; } final int jbase = triangleSize(j); final DBIDs idsj = clusters.get(j); final int sizej = (idsj == null) ? 1 : idsj.size(); scratch[jbase + miny] = linkage.combine(sizex, scratch[xbase + j], sizey, scratch[jbase + miny], sizej, min); // depends on control dependency: [for], data = [j] } // Write to (j, y), with y < x < j for(int j = minx + 1; j < size; j++) { if(height[j] < Double.POSITIVE_INFINITY) { continue; } final DBIDs idsj = clusters.get(j); final int sizej = (idsj == null) ? 1 : idsj.size(); final int jbase = triangleSize(j); scratch[jbase + miny] = linkage.combine(sizex, scratch[jbase + minx], sizey, scratch[jbase + miny], sizej, min); // depends on control dependency: [for], data = [none] } LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none] } LOG.ensureCompleted(prog); // Build the clustering result final Clustering<Model> dendrogram = new Clustering<>("Hierarchical-Clustering", "hierarchical-clustering"); for(int x = 0; x < size; x++) { if(height[x] < Double.POSITIVE_INFINITY) { DBIDs cids = clusters.get(x); if(cids == null) { ix.seek(x); // depends on control dependency: [if], data = [none] cids = DBIDUtil.deref(ix); // depends on control dependency: [if], data = [none] } Cluster<Model> cluster = new Cluster<>("Cluster", cids); dendrogram.addToplevelCluster(cluster); // depends on control dependency: [if], data = [none] } } return dendrogram; } }
public class class_name { public static String compressNumber(String value, CompressionLevel compressionLevel) { value = value.replaceAll("([0-9])0+$", "$1"); if (compressionLevel.equals(CompressionLevel.NORMAL)) { value = value.replaceAll("\\.0+$", ".0"); } else if (compressionLevel.equals(CompressionLevel.WITHOUT_TRAILING_ZEROS)) { value = value.replaceAll("\\.0+$", ""); } return value; } }
public class class_name { public static String compressNumber(String value, CompressionLevel compressionLevel) { value = value.replaceAll("([0-9])0+$", "$1"); if (compressionLevel.equals(CompressionLevel.NORMAL)) { value = value.replaceAll("\\.0+$", ".0"); // depends on control dependency: [if], data = [none] } else if (compressionLevel.equals(CompressionLevel.WITHOUT_TRAILING_ZEROS)) { value = value.replaceAll("\\.0+$", ""); // depends on control dependency: [if], data = [none] } return value; } }
public class class_name { public static long sum( InterleavedS64 img ) { if( BoofConcurrency.USE_CONCURRENT ) { return ImplImageStatistics_MT.sum(img); } else { return ImplImageStatistics.sum(img); } } }
public class class_name { public static long sum( InterleavedS64 img ) { if( BoofConcurrency.USE_CONCURRENT ) { return ImplImageStatistics_MT.sum(img); // depends on control dependency: [if], data = [none] } else { return ImplImageStatistics.sum(img); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String expand(String filename) { if (filename.startsWith("~" + File.separator)) { try { String home = System.getProperty("user.home"); if (home != null) { return home + filename.substring(1); } } catch (SecurityException e) { // ignore } } return filename; } }
public class class_name { public static String expand(String filename) { if (filename.startsWith("~" + File.separator)) { try { String home = System.getProperty("user.home"); if (home != null) { return home + filename.substring(1); // depends on control dependency: [if], data = [none] } } catch (SecurityException e) { // ignore } // depends on control dependency: [catch], data = [none] } return filename; } }
public class class_name { public static int charAt(CharSequence source, int offset16) { char single = source.charAt(offset16); if (single < UTF16.LEAD_SURROGATE_MIN_VALUE) { return single; } return _charAt(source, offset16, single); } }
public class class_name { public static int charAt(CharSequence source, int offset16) { char single = source.charAt(offset16); if (single < UTF16.LEAD_SURROGATE_MIN_VALUE) { return single; // depends on control dependency: [if], data = [none] } return _charAt(source, offset16, single); } }
public class class_name { private IOException checkRequest(long numBytes, boolean async) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "checkRequest"); } IOException exception = null; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "numBytes=" + numBytes + " jitsize=" + getJITAllocateSize() + " buffers=" + SSLUtils.getBufferTraceInfo(getBuffers())); } // Extract the buffers provided by the calling channel. WsByteBuffer callerBuffers[] = getBuffers(); if (callerBuffers == null || callerBuffers.length == 0) { // Found null caller buffers. Check allocation size set by caller. if (getJITAllocateSize() <= 0 || getJITAllocateSize() < numBytes) { exception = new IOException("No buffer(s) provided for reading data into."); } } else if (numBytes == 0) { // zero byte read is allowed for sync only if (async) { // Can't do a read of zero in async mode. exception = new IOException("Number of bytes requested, " + numBytes + " is less than minimum allowed (async)."); } } else if (numBytes < 0) { // NumBytes requested must be zero or positive exception = new IOException("Number of bytes requested, " + numBytes + " is less than minimum allowed."); } else { // Ensure buffer provided by caller is big enough to contain the // number of bytes requested. int bytesAvail = SSLUtils.lengthOf(callerBuffers, 0); if (bytesAvail < numBytes) { exception = new IOException("Number of bytes requested, " + numBytes + " exceeds space remaining in the buffers provided: " + bytesAvail); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "checkRequest: " + exception); } return exception; } }
public class class_name { private IOException checkRequest(long numBytes, boolean async) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "checkRequest"); // depends on control dependency: [if], data = [none] } IOException exception = null; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "numBytes=" + numBytes + " jitsize=" + getJITAllocateSize() + " buffers=" + SSLUtils.getBufferTraceInfo(getBuffers())); // depends on control dependency: [if], data = [none] } // Extract the buffers provided by the calling channel. WsByteBuffer callerBuffers[] = getBuffers(); if (callerBuffers == null || callerBuffers.length == 0) { // Found null caller buffers. Check allocation size set by caller. if (getJITAllocateSize() <= 0 || getJITAllocateSize() < numBytes) { exception = new IOException("No buffer(s) provided for reading data into."); // depends on control dependency: [if], data = [none] } } else if (numBytes == 0) { // zero byte read is allowed for sync only if (async) { // Can't do a read of zero in async mode. exception = new IOException("Number of bytes requested, " + numBytes + " is less than minimum allowed (async)."); // depends on control dependency: [if], data = [none] } } else if (numBytes < 0) { // NumBytes requested must be zero or positive exception = new IOException("Number of bytes requested, " + numBytes + " is less than minimum allowed."); // depends on control dependency: [if], data = [none] } else { // Ensure buffer provided by caller is big enough to contain the // number of bytes requested. int bytesAvail = SSLUtils.lengthOf(callerBuffers, 0); if (bytesAvail < numBytes) { exception = new IOException("Number of bytes requested, " + numBytes + " exceeds space remaining in the buffers provided: " + bytesAvail); // depends on control dependency: [if], data = [none] } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "checkRequest: " + exception); // depends on control dependency: [if], data = [none] } return exception; } }
public class class_name { public void setUnprocessedNamedQueryIds(java.util.Collection<UnprocessedNamedQueryId> unprocessedNamedQueryIds) { if (unprocessedNamedQueryIds == null) { this.unprocessedNamedQueryIds = null; return; } this.unprocessedNamedQueryIds = new java.util.ArrayList<UnprocessedNamedQueryId>(unprocessedNamedQueryIds); } }
public class class_name { public void setUnprocessedNamedQueryIds(java.util.Collection<UnprocessedNamedQueryId> unprocessedNamedQueryIds) { if (unprocessedNamedQueryIds == null) { this.unprocessedNamedQueryIds = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.unprocessedNamedQueryIds = new java.util.ArrayList<UnprocessedNamedQueryId>(unprocessedNamedQueryIds); } }
public class class_name { private static float[] arcToBeziers(double angleStart, double angleExtent) { int numSegments = (int) Math.ceil(Math.abs(angleExtent) * 2.0 / Math.PI); // (angleExtent / 90deg) double angleIncrement = angleExtent / numSegments; // The length of each control point vector is given by the following formula. double controlLength = 4.0 / 3.0 * Math.sin(angleIncrement / 2.0) / (1.0 + Math.cos(angleIncrement / 2.0)); float[] coords = new float[numSegments * 6]; int pos = 0; for (int i=0; i<numSegments; i++) { double angle = angleStart + i * angleIncrement; // Calculate the control vector at this angle double dx = Math.cos(angle); double dy = Math.sin(angle); // First control point coords[pos++] = (float) (dx - controlLength * dy); coords[pos++] = (float) (dy + controlLength * dx); // Second control point angle += angleIncrement; dx = Math.cos(angle); dy = Math.sin(angle); coords[pos++] = (float) (dx + controlLength * dy); coords[pos++] = (float) (dy - controlLength * dx); // Endpoint of bezier coords[pos++] = (float) dx; coords[pos++] = (float) dy; } return coords; } }
public class class_name { private static float[] arcToBeziers(double angleStart, double angleExtent) { int numSegments = (int) Math.ceil(Math.abs(angleExtent) * 2.0 / Math.PI); // (angleExtent / 90deg) double angleIncrement = angleExtent / numSegments; // The length of each control point vector is given by the following formula. double controlLength = 4.0 / 3.0 * Math.sin(angleIncrement / 2.0) / (1.0 + Math.cos(angleIncrement / 2.0)); float[] coords = new float[numSegments * 6]; int pos = 0; for (int i=0; i<numSegments; i++) { double angle = angleStart + i * angleIncrement; // Calculate the control vector at this angle double dx = Math.cos(angle); double dy = Math.sin(angle); // First control point coords[pos++] = (float) (dx - controlLength * dy); // depends on control dependency: [for], data = [none] coords[pos++] = (float) (dy + controlLength * dx); // depends on control dependency: [for], data = [none] // Second control point angle += angleIncrement; // depends on control dependency: [for], data = [none] dx = Math.cos(angle); // depends on control dependency: [for], data = [none] dy = Math.sin(angle); // depends on control dependency: [for], data = [none] coords[pos++] = (float) (dx + controlLength * dy); // depends on control dependency: [for], data = [none] coords[pos++] = (float) (dy - controlLength * dx); // depends on control dependency: [for], data = [none] // Endpoint of bezier coords[pos++] = (float) dx; // depends on control dependency: [for], data = [none] coords[pos++] = (float) dy; // depends on control dependency: [for], data = [none] } return coords; } }
public class class_name { void createTable() { Connection con = null; java.sql.Statement s = null; //PK55900 We can pass in true to the getConnection method since this method, //createTable, is only called from the initDBSettings method. con = getConnection(true); if (con == null) { return; } try { try { s = con.createStatement(); if (!getTableDefinition(con)) { if (usingOracle) { if (_smc.isUseOracleBlob()) { // cmd LI1963 start s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess integer, creationtime integer, maxinactivetime integer, username varchar(256), small raw(" + SMALLCOL_SIZE_ORACLE + "), medium BLOB, large raw(1))"); } else { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess integer, creationtime integer, maxinactivetime integer, username varchar(256), small raw(" + SMALLCOL_SIZE_ORACLE + "), medium long raw, large raw(1))"); } // cmd LI1963 end } else if (usingAS400DB2) { try { //if using AS400, create collection first s.executeUpdate("CREATE COLLECTION " + collectionName); } catch (Exception e) { com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "470", con); } s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(256), small varchar(" + SMALLCOL_SIZE_DB2 + ") for bit data, medium long varchar for bit data, large BLOB(2M))"); } else if (usingSybase) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint null, lastaccess numeric(21,0) null, creationtime numeric(21,0) null, maxinactivetime numeric(10,0) null, username varchar(255) null, small image null, medium image null, large image null)"); } else if (usingSQLServer) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint null, lastaccess decimal(21,0) null, creationtime decimal(21,0) null, maxinactivetime integer null, username varchar(255) null, small image null, medium image null, large image null)"); } else if (usingInformix) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess int8, creationtime int8, maxinactivetime integer, username varchar(255), small BYTE, medium BYTE, large BYTE)"); } else if (usingCloudScape) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(255), small LONG VARBINARY , medium char(1) , large char(1))"); } else if (dbHandler != null) { dbHandler.createTable(s, tableName); } else if (!usingDB2Connect && !usingDB2zOS) { // LIDB2775.25 zOS String tableSpaceName = " "; // a little overkill for this test String configTableSpaceName = _smc.getTableSpaceName(); if (configTableSpaceName != null && !configTableSpaceName.equals("") && configTableSpaceName.length() != 0) tableSpaceName = " in " + configTableSpaceName; if (usingSolidDB) s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(256), small varchar(" + smallColSize + "), medium long varchar, large BLOB(2M)) " + tableSpaceName); else s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(256), small varchar(" + smallColSize + ") for bit data, medium long varchar for bit data, large BLOB(2M)) " + tableSpaceName); } } // } catch (com.ibm.ejs.cm.portability.TableAlreadyExistsException eee) { // // Do nothing since all is well!! // com.ibm.ws.ffdc.FFDCFilter.processException(eee, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "495", this); } catch (SQLException err) { com.ibm.ws.ffdc.FFDCFilter.processException(err, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "497", this); if (!usingCloudScape) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[CREATE_TABLE], "CommonMessage.exception", err); } else { if (err.getErrorCode() != 30000) LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[CREATE_TABLE], "CommonMessage.exception", err); } } if (!_smc.isSessionTableSkipIndexCreation()) { // PM37139 //Creating index in a seperate step as we have seen customers missing this //step when they create the table manually try { if (s == null) s = con.createStatement(); if (usingSybase) { s.executeUpdate("create unique index sess_index on " + tableName + " (id, propid, appname)"); s.executeUpdate("alter table sessions lock datarows"); } else if (usingSolidDB) { s.executeUpdate("create unique index sess_index on " + tableName + " (id, propid, appname)"); } else if (usingAS400DB2) { mediumColSize = mediumColSize - 2; //d154211 //PK56991: If multiple cluster members startup at the same time the // combination of these DDLs can lock up for a substantial duration. // We now execute these only if they do not exist. if (!doesIndexExists(con, "sess_index")) { s.executeUpdate("create unique index " + collectionName + ".sess_index on " + tableName + " (id,propid,appname)"); //tableName is already in the form of "collectionName.tableName" //PK56991 comment out marking table as volatile. The DB2 chief architect on iSeries //team informed that VOLATILE clause is only compatible and improve performance on LUW //s.executeUpdate("alter table " + tableName + " volatile"); } else if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Skip index creation"); } } else if (dbHandler != null) { dbHandler.createIndex(con, s, tableName); } else if (!usingDB2Connect && !usingDB2zOS) { // LIDB2775.25 //PK56991: Refer to detailed comment above if (!doesIndexExists(con, "sess_index")) { s.executeUpdate("create unique index sess_index on " + tableName + " (id, propid, appname)");//PK86373: changed sesscmd_index to sess_index } else if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Skip index creation"); } //PK56991: We are marking the the session table as volatile if it is not //done yet. if (usingDB2 && !isTableMarkedVolatile(con)) { s.executeUpdate("alter table " + tableName + " volatile"); } else if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Skip marking table volatile"); } } } catch (SQLException err) { //Don't do any thing if it is only index already exists //err.printStackTrace(); com.ibm.ws.ffdc.FFDCFilter.processException(err, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "526", con); } } // END PM37139 } finally { if (s != null) closeStatement(s); closeConnection(con); //findbugs for 106329 } } }
public class class_name { void createTable() { Connection con = null; java.sql.Statement s = null; //PK55900 We can pass in true to the getConnection method since this method, //createTable, is only called from the initDBSettings method. con = getConnection(true); if (con == null) { return; // depends on control dependency: [if], data = [none] } try { try { s = con.createStatement(); // depends on control dependency: [try], data = [none] if (!getTableDefinition(con)) { if (usingOracle) { if (_smc.isUseOracleBlob()) { // cmd LI1963 start s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess integer, creationtime integer, maxinactivetime integer, username varchar(256), small raw(" + SMALLCOL_SIZE_ORACLE + "), medium BLOB, large raw(1))"); // depends on control dependency: [if], data = [none] } else { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess integer, creationtime integer, maxinactivetime integer, username varchar(256), small raw(" + SMALLCOL_SIZE_ORACLE + "), medium long raw, large raw(1))"); // depends on control dependency: [if], data = [none] } // cmd LI1963 end } else if (usingAS400DB2) { try { //if using AS400, create collection first s.executeUpdate("CREATE COLLECTION " + collectionName); // depends on control dependency: [try], data = [none] } catch (Exception e) { com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "470", con); } // depends on control dependency: [catch], data = [none] s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(256), small varchar(" + SMALLCOL_SIZE_DB2 + ") for bit data, medium long varchar for bit data, large BLOB(2M))"); // depends on control dependency: [if], data = [none] } else if (usingSybase) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint null, lastaccess numeric(21,0) null, creationtime numeric(21,0) null, maxinactivetime numeric(10,0) null, username varchar(255) null, small image null, medium image null, large image null)"); // depends on control dependency: [if], data = [none] } else if (usingSQLServer) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint null, lastaccess decimal(21,0) null, creationtime decimal(21,0) null, maxinactivetime integer null, username varchar(255) null, small image null, medium image null, large image null)"); // depends on control dependency: [if], data = [none] } else if (usingInformix) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess int8, creationtime int8, maxinactivetime integer, username varchar(255), small BYTE, medium BYTE, large BYTE)"); } else if (usingCloudScape) { s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(255), small LONG VARBINARY , medium char(1) , large char(1))"); // depends on control dependency: [if], data = [none] } else if (dbHandler != null) { dbHandler.createTable(s, tableName); // depends on control dependency: [if], data = [none] } else if (!usingDB2Connect && !usingDB2zOS) { // LIDB2775.25 zOS String tableSpaceName = " "; // a little overkill for this test String configTableSpaceName = _smc.getTableSpaceName(); if (configTableSpaceName != null && !configTableSpaceName.equals("") && configTableSpaceName.length() != 0) tableSpaceName = " in " + configTableSpaceName; if (usingSolidDB) s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(256), small varchar(" + smallColSize + "), medium long varchar, large BLOB(2M)) " + tableSpaceName); else s.executeUpdate("create table " + tableName + " (id varchar(128) not null, propid varchar(128) not null, appname varchar(128) not null, listenercnt smallint, lastaccess bigint, creationtime bigint, maxinactivetime integer, username varchar(256), small varchar(" + smallColSize + ") for bit data, medium long varchar for bit data, large BLOB(2M)) " + tableSpaceName); } } // } catch (com.ibm.ejs.cm.portability.TableAlreadyExistsException eee) { // // Do nothing since all is well!! // com.ibm.ws.ffdc.FFDCFilter.processException(eee, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "495", this); } catch (SQLException err) { com.ibm.ws.ffdc.FFDCFilter.processException(err, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "497", this); if (!usingCloudScape) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[CREATE_TABLE], "CommonMessage.exception", err); // depends on control dependency: [if], data = [none] } else { if (err.getErrorCode() != 30000) LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[CREATE_TABLE], "CommonMessage.exception", err); } } // depends on control dependency: [catch], data = [none] if (!_smc.isSessionTableSkipIndexCreation()) { // PM37139 //Creating index in a seperate step as we have seen customers missing this //step when they create the table manually try { if (s == null) s = con.createStatement(); if (usingSybase) { s.executeUpdate("create unique index sess_index on " + tableName + " (id, propid, appname)"); // depends on control dependency: [if], data = [none] s.executeUpdate("alter table sessions lock datarows"); // depends on control dependency: [if], data = [none] } else if (usingSolidDB) { s.executeUpdate("create unique index sess_index on " + tableName + " (id, propid, appname)"); // depends on control dependency: [if], data = [none] } else if (usingAS400DB2) { mediumColSize = mediumColSize - 2; //d154211 // depends on control dependency: [if], data = [none] //PK56991: If multiple cluster members startup at the same time the // combination of these DDLs can lock up for a substantial duration. // We now execute these only if they do not exist. if (!doesIndexExists(con, "sess_index")) { s.executeUpdate("create unique index " + collectionName + ".sess_index on " + tableName + " (id,propid,appname)"); //tableName is already in the form of "collectionName.tableName" // depends on control dependency: [if], data = [none] //PK56991 comment out marking table as volatile. The DB2 chief architect on iSeries //team informed that VOLATILE clause is only compatible and improve performance on LUW //s.executeUpdate("alter table " + tableName + " volatile"); } else if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Skip index creation"); // depends on control dependency: [if], data = [none] } } else if (dbHandler != null) { dbHandler.createIndex(con, s, tableName); // depends on control dependency: [if], data = [none] } else if (!usingDB2Connect && !usingDB2zOS) { // LIDB2775.25 //PK56991: Refer to detailed comment above if (!doesIndexExists(con, "sess_index")) { s.executeUpdate("create unique index sess_index on " + tableName + " (id, propid, appname)");//PK86373: changed sesscmd_index to sess_index // depends on control dependency: [if], data = [none] } else if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Skip index creation"); // depends on control dependency: [if], data = [none] } //PK56991: We are marking the the session table as volatile if it is not //done yet. if (usingDB2 && !isTableMarkedVolatile(con)) { s.executeUpdate("alter table " + tableName + " volatile"); // depends on control dependency: [if], data = [none] } else if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[CREATE_TABLE], "Skip marking table volatile"); // depends on control dependency: [if], data = [none] } } } catch (SQLException err) { //Don't do any thing if it is only index already exists //err.printStackTrace(); com.ibm.ws.ffdc.FFDCFilter.processException(err, "com.ibm.ws.session.store.db.DatabaseHashMap.createTable", "526", con); } // depends on control dependency: [catch], data = [none] } // END PM37139 } finally { if (s != null) closeStatement(s); closeConnection(con); //findbugs for 106329 } } }
public class class_name { public void addAutoLoginHandler() { this.getMainRecord().addListener(new FileListener(null) { public int doRecordChange(FieldInfo field, int iChangeType, boolean bDisplayOption) { // Return an error to stop the change int iErrorCode = super.doRecordChange(field, iChangeType, bDisplayOption); if ((iChangeType == DBConstants.AFTER_ADD_TYPE) || (iChangeType == DBConstants.AFTER_UPDATE_TYPE)) if (iErrorCode == DBConstants.NORMAL_RETURN) { Record recUserInfo = this.getOwner(); Task task = recUserInfo.getTask(); String strUserName = recUserInfo.getField(UserInfo.ID).toString(); if ((strUserName == null) || (strUserName.length() == 0)) strUserName = recUserInfo.getLastModified(DBConstants.BOOKMARK_HANDLE).toString(); String strPassword = recUserInfo.getField(UserInfo.PASSWORD).toString(); iErrorCode = task.getApplication().login(task, strUserName, strPassword, task.getProperty(DBParams.DOMAIN)); // Always okay } return iErrorCode; } }); } }
public class class_name { public void addAutoLoginHandler() { this.getMainRecord().addListener(new FileListener(null) { public int doRecordChange(FieldInfo field, int iChangeType, boolean bDisplayOption) { // Return an error to stop the change int iErrorCode = super.doRecordChange(field, iChangeType, bDisplayOption); if ((iChangeType == DBConstants.AFTER_ADD_TYPE) || (iChangeType == DBConstants.AFTER_UPDATE_TYPE)) if (iErrorCode == DBConstants.NORMAL_RETURN) { Record recUserInfo = this.getOwner(); Task task = recUserInfo.getTask(); String strUserName = recUserInfo.getField(UserInfo.ID).toString(); if ((strUserName == null) || (strUserName.length() == 0)) strUserName = recUserInfo.getLastModified(DBConstants.BOOKMARK_HANDLE).toString(); String strPassword = recUserInfo.getField(UserInfo.PASSWORD).toString(); iErrorCode = task.getApplication().login(task, strUserName, strPassword, task.getProperty(DBParams.DOMAIN)); // Always okay // depends on control dependency: [if], data = [none] } return iErrorCode; } }); } }
public class class_name { public static int byteSizeOf(Bitmap bitmap) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { return bitmap.getAllocationByteCount(); } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) { return bitmap.getByteCount(); } else { return bitmap.getRowBytes() * bitmap.getHeight(); } } }
public class class_name { public static int byteSizeOf(Bitmap bitmap) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { return bitmap.getAllocationByteCount(); // depends on control dependency: [if], data = [none] } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) { return bitmap.getByteCount(); // depends on control dependency: [if], data = [none] } else { return bitmap.getRowBytes() * bitmap.getHeight(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void addSlaveJulLogRecords(Container result, List<java.util.concurrent.Callable<List<FileContent>>> tasks, final Node node, final SmartLogFetcher logFetcher) { final FilePath rootPath = node.getRootPath(); if (rootPath != null) { // rotated log files stored on the disk tasks.add(new java.util.concurrent.Callable<List<FileContent>>(){ public List<FileContent> call() throws Exception { List<FileContent> result = new ArrayList<FileContent>(); FilePath supportPath = rootPath.child(SUPPORT_DIRECTORY_NAME); if (supportPath.isDirectory()) { final Map<String, File> logFiles = logFetcher.forNode(node).getLogFiles(supportPath); for (Map.Entry<String, File> entry : logFiles.entrySet()) { result.add(new FileContent( "nodes/slave/{0}/logs/{1}", new String[]{node.getNodeName(), entry.getKey()}, entry.getValue()) ); } } return result; } }); } // this file captures the most recent of those that are still kept around in memory. // this overlaps with Jenkins.logRecords, and also overlaps with what's written in files, // but added nonetheless just in case. // // should be ignorable. result.add(new LogRecordContent("nodes/slave/{0}/logs/all_memory_buffer.log", node.getNodeName()) { @Override public Iterable<LogRecord> getLogRecords() throws IOException { try { return SupportPlugin.getInstance().getAllLogRecords(node); } catch (InterruptedException e) { throw (IOException)new InterruptedIOException().initCause(e); } } }); } }
public class class_name { private void addSlaveJulLogRecords(Container result, List<java.util.concurrent.Callable<List<FileContent>>> tasks, final Node node, final SmartLogFetcher logFetcher) { final FilePath rootPath = node.getRootPath(); if (rootPath != null) { // rotated log files stored on the disk tasks.add(new java.util.concurrent.Callable<List<FileContent>>(){ public List<FileContent> call() throws Exception { List<FileContent> result = new ArrayList<FileContent>(); FilePath supportPath = rootPath.child(SUPPORT_DIRECTORY_NAME); if (supportPath.isDirectory()) { final Map<String, File> logFiles = logFetcher.forNode(node).getLogFiles(supportPath); for (Map.Entry<String, File> entry : logFiles.entrySet()) { result.add(new FileContent( "nodes/slave/{0}/logs/{1}", new String[]{node.getNodeName(), entry.getKey()}, entry.getValue()) ); } } return result; } }); // depends on control dependency: [if], data = [none] } // this file captures the most recent of those that are still kept around in memory. // this overlaps with Jenkins.logRecords, and also overlaps with what's written in files, // but added nonetheless just in case. // // should be ignorable. result.add(new LogRecordContent("nodes/slave/{0}/logs/all_memory_buffer.log", node.getNodeName()) { @Override public Iterable<LogRecord> getLogRecords() throws IOException { try { return SupportPlugin.getInstance().getAllLogRecords(node); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { throw (IOException)new InterruptedIOException().initCause(e); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public void flushDataStoreEvents() { DataStoreEvent e; switch(currentDataStoreEventType){ case INSERT: e = DataStoreEvent.insertionEvent(dataStoreObjects); break; case REMOVE: e = DataStoreEvent.removalEvent(dataStoreObjects); break; case UPDATE: e = DataStoreEvent.updateEvent(dataStoreObjects); break; default: return; } for(int i = dataListenerList.size(); --i >= 0;) { dataListenerList.get(i).contentChanged(e); } // reset accumulateDataStoreEvents = false; currentDataStoreEventType = null; dataStoreObjects = null; } }
public class class_name { public void flushDataStoreEvents() { DataStoreEvent e; switch(currentDataStoreEventType){ case INSERT: e = DataStoreEvent.insertionEvent(dataStoreObjects); break; case REMOVE: e = DataStoreEvent.removalEvent(dataStoreObjects); break; case UPDATE: e = DataStoreEvent.updateEvent(dataStoreObjects); break; default: return; } for(int i = dataListenerList.size(); --i >= 0;) { dataListenerList.get(i).contentChanged(e); // depends on control dependency: [for], data = [i] } // reset accumulateDataStoreEvents = false; currentDataStoreEventType = null; dataStoreObjects = null; } }
public class class_name { @Override public void addResponse(Response response) { logger.warn("Security response " + response.getAction() + " triggered for user: " + response.getUser().getUsername()); Point.Builder builder = Point.measurement(Utils.RESPONSES) .time(DateUtils.fromString(response.getTimestamp()).getMillis(), TimeUnit.MILLISECONDS) .field(Utils.RESPONSE_ACTION, response.getAction()) .tag(Utils.USERNAME, response.getUser().getUsername()) .tag(Utils.TIMESTAMP, response.getTimestamp()) .tag(Utils.DETECTION_SYSTEM, response.getDetectionSystem().getDetectionSystemId()); if(response.getInterval() != null) { builder = builder .tag(Utils.RESPONSE_INTERVAL_DURATION, String.valueOf(response.getInterval().getDuration())) .tag(Utils.RESPONSE_INTERVAL_UNIT, response.getInterval().getUnit()); } Point point = builder.tag(Utils.RESPONSE_ACTION, response.getAction()) .field(Utils.JSON_CONTENT, gson.toJson(response)) .build(); influxDB.write(Utils.DATABASE, "default", point); super.notifyListeners(response); } }
public class class_name { @Override public void addResponse(Response response) { logger.warn("Security response " + response.getAction() + " triggered for user: " + response.getUser().getUsername()); Point.Builder builder = Point.measurement(Utils.RESPONSES) .time(DateUtils.fromString(response.getTimestamp()).getMillis(), TimeUnit.MILLISECONDS) .field(Utils.RESPONSE_ACTION, response.getAction()) .tag(Utils.USERNAME, response.getUser().getUsername()) .tag(Utils.TIMESTAMP, response.getTimestamp()) .tag(Utils.DETECTION_SYSTEM, response.getDetectionSystem().getDetectionSystemId()); if(response.getInterval() != null) { builder = builder .tag(Utils.RESPONSE_INTERVAL_DURATION, String.valueOf(response.getInterval().getDuration())) .tag(Utils.RESPONSE_INTERVAL_UNIT, response.getInterval().getUnit()); // depends on control dependency: [if], data = [none] } Point point = builder.tag(Utils.RESPONSE_ACTION, response.getAction()) .field(Utils.JSON_CONTENT, gson.toJson(response)) .build(); influxDB.write(Utils.DATABASE, "default", point); super.notifyListeners(response); } }
public class class_name { public void write(Object object, Object value) { if(object == null) { String msg = "Can not update the identifier \"" + _identifier + "\" on a null value object."; LOGGER.error(msg); throw new RuntimeException(msg); } if(TRACE_ENABLED) LOGGER.trace("Update property named \"" + _identifier + "\" on object of type: \"" + object.getClass().getName() + "\""); if(object instanceof Map) mapUpdate((Map)object, _identifier, value); else if(object instanceof List) { int i = parseIndex(_identifier); listUpdate((List)object, i, value); } else if(object.getClass().isArray()) { int i = parseIndex(_identifier); arrayUpdate(object, i, value); } else beanUpdate(object, _identifier, value); } }
public class class_name { public void write(Object object, Object value) { if(object == null) { String msg = "Can not update the identifier \"" + _identifier + "\" on a null value object."; // depends on control dependency: [if], data = [none] LOGGER.error(msg); // depends on control dependency: [if], data = [none] throw new RuntimeException(msg); } if(TRACE_ENABLED) LOGGER.trace("Update property named \"" + _identifier + "\" on object of type: \"" + object.getClass().getName() + "\""); if(object instanceof Map) mapUpdate((Map)object, _identifier, value); else if(object instanceof List) { int i = parseIndex(_identifier); listUpdate((List)object, i, value); // depends on control dependency: [if], data = [none] } else if(object.getClass().isArray()) { int i = parseIndex(_identifier); arrayUpdate(object, i, value); // depends on control dependency: [if], data = [none] } else beanUpdate(object, _identifier, value); } }
public class class_name { protected Class recompile(URL source, String className, Class oldClass) throws CompilationFailedException, IOException { if (source != null) { // found a source, compile it if newer if ((oldClass != null && isSourceNewer(source, oldClass)) || (oldClass == null)) { synchronized (sourceCache) { String name = source.toExternalForm(); sourceCache.remove(name); if (isFile(source)) { try { return parseClass(new GroovyCodeSource(new File(source.toURI()), config.getSourceEncoding())); } catch (URISyntaxException e) { // do nothing and fall back to the other version } } return parseClass(source.openStream(), name); } } } return oldClass; } }
public class class_name { protected Class recompile(URL source, String className, Class oldClass) throws CompilationFailedException, IOException { if (source != null) { // found a source, compile it if newer if ((oldClass != null && isSourceNewer(source, oldClass)) || (oldClass == null)) { synchronized (sourceCache) { String name = source.toExternalForm(); sourceCache.remove(name); if (isFile(source)) { try { return parseClass(new GroovyCodeSource(new File(source.toURI()), config.getSourceEncoding())); // depends on control dependency: [try], data = [none] } catch (URISyntaxException e) { // do nothing and fall back to the other version } // depends on control dependency: [catch], data = [none] } return parseClass(source.openStream(), name); } } } return oldClass; } }
public class class_name { protected Expression mapExpression(AST mapNode) { List expressions = new ArrayList(); AST elist = mapNode.getFirstChild(); if (elist != null) { // totally empty in the case of [:] assertNodeType(ELIST, elist); for (AST node = elist.getFirstChild(); node != null; node = node.getNextSibling()) { switch (node.getType()) { case LABELED_ARG: case SPREAD_MAP_ARG: break; // legal cases case SPREAD_ARG: assertNodeType(SPREAD_MAP_ARG, node); break; // helpful error default: assertNodeType(LABELED_ARG, node); break; // helpful error } expressions.add(mapEntryExpression(node)); } } MapExpression mapExpression = new MapExpression(expressions); configureAST(mapExpression, mapNode); return mapExpression; } }
public class class_name { protected Expression mapExpression(AST mapNode) { List expressions = new ArrayList(); AST elist = mapNode.getFirstChild(); if (elist != null) { // totally empty in the case of [:] assertNodeType(ELIST, elist); // depends on control dependency: [if], data = [none] for (AST node = elist.getFirstChild(); node != null; node = node.getNextSibling()) { switch (node.getType()) { case LABELED_ARG: case SPREAD_MAP_ARG: break; // legal cases case SPREAD_ARG: assertNodeType(SPREAD_MAP_ARG, node); break; // helpful error default: assertNodeType(LABELED_ARG, node); break; // helpful error } expressions.add(mapEntryExpression(node)); // depends on control dependency: [for], data = [node] } } MapExpression mapExpression = new MapExpression(expressions); configureAST(mapExpression, mapNode); return mapExpression; } }
public class class_name { static void save(KAFDocument kaf, String filename) { try { File file = new File(filename); Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF8")); out.write(kafToStr(kaf)); out.flush(); } catch (Exception e) { System.out.println("Error writing to file"); } } }
public class class_name { static void save(KAFDocument kaf, String filename) { try { File file = new File(filename); Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF8")); out.write(kafToStr(kaf)); // depends on control dependency: [try], data = [none] out.flush(); // depends on control dependency: [try], data = [none] } catch (Exception e) { System.out.println("Error writing to file"); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Session currentSession() throws HibernateException { SessionHolder sessionHolder = SessionUtils.currentSession(this.sessionFactory); Session session = sessionHolder.getSession(); // TODO what time enter into the code? if (TransactionSynchronizationManager.isSynchronizationActive() && !sessionHolder.isSynchronizedWithTransaction()) { TransactionSynchronizationManager.registerSynchronization(new SessionSynchronization(sessionHolder, this.sessionFactory)); sessionHolder.setSynchronizedWithTransaction(true); // Switch to FlushMode.AUTO, as we have to assume a thread-bound Session // with FlushMode.MANUAL, which needs to allow flushing within the transaction. FlushMode flushMode = session.getFlushMode(); if (FlushMode.isManualFlushMode(flushMode) && !TransactionSynchronizationManager.isCurrentTransactionReadOnly()) { session.setFlushMode(FlushMode.AUTO); sessionHolder.setPreviousFlushMode(flushMode); } } return session; } }
public class class_name { public Session currentSession() throws HibernateException { SessionHolder sessionHolder = SessionUtils.currentSession(this.sessionFactory); Session session = sessionHolder.getSession(); // TODO what time enter into the code? if (TransactionSynchronizationManager.isSynchronizationActive() && !sessionHolder.isSynchronizedWithTransaction()) { TransactionSynchronizationManager.registerSynchronization(new SessionSynchronization(sessionHolder, this.sessionFactory)); sessionHolder.setSynchronizedWithTransaction(true); // Switch to FlushMode.AUTO, as we have to assume a thread-bound Session // with FlushMode.MANUAL, which needs to allow flushing within the transaction. FlushMode flushMode = session.getFlushMode(); if (FlushMode.isManualFlushMode(flushMode) && !TransactionSynchronizationManager.isCurrentTransactionReadOnly()) { session.setFlushMode(FlushMode.AUTO); // depends on control dependency: [if], data = [none] sessionHolder.setPreviousFlushMode(flushMode); // depends on control dependency: [if], data = [none] } } return session; } }
public class class_name { public JMFMessage copy() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.entry(this, tc, "copy"); JMFMessage copy; synchronized (getMessageLockArtefact()) { if (map == null) { // If there is no map, then this is already become just a delegator, so // the copy does not need to include the compatibility wrapper. copy = ((JMFMessage)encoding).copy(); } else { // In the usual case, we maintain a compatibility layer copy = new JSCompatibleMessageImpl(this); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.exit(this, tc, "copy", copy); return copy; } }
public class class_name { public JMFMessage copy() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.entry(this, tc, "copy"); JMFMessage copy; synchronized (getMessageLockArtefact()) { if (map == null) { // If there is no map, then this is already become just a delegator, so // the copy does not need to include the compatibility wrapper. copy = ((JMFMessage)encoding).copy(); // depends on control dependency: [if], data = [none] } else { // In the usual case, we maintain a compatibility layer copy = new JSCompatibleMessageImpl(this); // depends on control dependency: [if], data = [none] } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.exit(this, tc, "copy", copy); return copy; } }
public class class_name { private final void push() { if (ptr >= stack.length) { // doubling here is probably overkill, but anything that needs to double more than // once (32 levels deep) is very atypical anyway. byte[] newstack = new byte[stack.length<<1]; System.arraycopy(stack,0,newstack,0,stack.length); stack = newstack; } stack[ptr++] = state; } }
public class class_name { private final void push() { if (ptr >= stack.length) { // doubling here is probably overkill, but anything that needs to double more than // once (32 levels deep) is very atypical anyway. byte[] newstack = new byte[stack.length<<1]; System.arraycopy(stack,0,newstack,0,stack.length); // depends on control dependency: [if], data = [none] stack = newstack; // depends on control dependency: [if], data = [none] } stack[ptr++] = state; } }
public class class_name { public static final <T> T dropTableIfExists( JdbcOperations jdbcOperations, final String table, final Function<JdbcOperations, T> preDropCallback) { LOGGER.info("Dropping table: " + table); final boolean tableExists = doesTableExist(jdbcOperations, table); if (tableExists) { final T ret = preDropCallback.apply(jdbcOperations); jdbcOperations.execute("DROP TABLE " + table); return ret; } return null; } }
public class class_name { public static final <T> T dropTableIfExists( JdbcOperations jdbcOperations, final String table, final Function<JdbcOperations, T> preDropCallback) { LOGGER.info("Dropping table: " + table); final boolean tableExists = doesTableExist(jdbcOperations, table); if (tableExists) { final T ret = preDropCallback.apply(jdbcOperations); jdbcOperations.execute("DROP TABLE " + table); // depends on control dependency: [if], data = [none] return ret; // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { @Override public void notificationReady(final CTInAppNotification inAppNotification){ if(Looper.myLooper() != Looper.getMainLooper()){ getHandlerUsingMainLooper().post(new Runnable() { @Override public void run() { notificationReady(inAppNotification); } }); return; } if(inAppNotification.getError() != null){ getConfigLogger().debug(getAccountId(),"Unable to process inapp notification " + inAppNotification.getError()); return; } getConfigLogger().debug(getAccountId(),"Notification ready: "+inAppNotification.getJsonDescription()); displayNotification(inAppNotification); } }
public class class_name { @Override public void notificationReady(final CTInAppNotification inAppNotification){ if(Looper.myLooper() != Looper.getMainLooper()){ getHandlerUsingMainLooper().post(new Runnable() { @Override public void run() { notificationReady(inAppNotification); } }); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if(inAppNotification.getError() != null){ getConfigLogger().debug(getAccountId(),"Unable to process inapp notification " + inAppNotification.getError()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } getConfigLogger().debug(getAccountId(),"Notification ready: "+inAppNotification.getJsonDescription()); displayNotification(inAppNotification); } }