code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { private double calculateMutualInformation(String phrase, List<Term> terms) { int size = terms.size(); if (size == 1) { return -Math.log(getFrequency(terms.get(0).getRealName()) / totalTerm); } double product = 1.; for (Term term : terms) { product *= getFrequency(term.getRealName()); } return Math.log(occurrenceMap.get(phrase).getFrequency() * Math.pow(totalTerm, size - 1)) - Math.log(product); } }
public class class_name { private double calculateMutualInformation(String phrase, List<Term> terms) { int size = terms.size(); if (size == 1) { return -Math.log(getFrequency(terms.get(0).getRealName()) / totalTerm); // depends on control dependency: [if], data = [none] } double product = 1.; for (Term term : terms) { product *= getFrequency(term.getRealName()); // depends on control dependency: [for], data = [term] } return Math.log(occurrenceMap.get(phrase).getFrequency() * Math.pow(totalTerm, size - 1)) - Math.log(product); } }
public class class_name { protected static void adjustUpLeft(byte[][] matrix, byte[][] tempMatrix, int maxX, int maxY) { int currentMinX = Integer.MAX_VALUE, currentMinY = Integer.MAX_VALUE; for(int i = 0; i < maxX; i++) { for(int j = 0; j < maxY; j++) { if(matrix[i][j] == 1) { if(i < currentMinX) { currentMinX = i; } if(j < currentMinY) { currentMinY = j; } } } } for(int i = currentMinX; i < maxX; i++) { for(int j = currentMinY; j < maxY; j++) { tempMatrix[i - currentMinX][j - currentMinY] = matrix[i][j]; } } for(int i = 0; i < maxX; i++) { for(int j = 0; j < maxY; j++) { matrix[i][j] = tempMatrix[i][j]; } } } }
public class class_name { protected static void adjustUpLeft(byte[][] matrix, byte[][] tempMatrix, int maxX, int maxY) { int currentMinX = Integer.MAX_VALUE, currentMinY = Integer.MAX_VALUE; for(int i = 0; i < maxX; i++) { for(int j = 0; j < maxY; j++) { if(matrix[i][j] == 1) { if(i < currentMinX) { currentMinX = i; // depends on control dependency: [if], data = [none] } if(j < currentMinY) { currentMinY = j; // depends on control dependency: [if], data = [none] } } } } for(int i = currentMinX; i < maxX; i++) { for(int j = currentMinY; j < maxY; j++) { tempMatrix[i - currentMinX][j - currentMinY] = matrix[i][j]; // depends on control dependency: [for], data = [j] } } for(int i = 0; i < maxX; i++) { for(int j = 0; j < maxY; j++) { matrix[i][j] = tempMatrix[i][j]; // depends on control dependency: [for], data = [j] } } } }
public class class_name { public void executeBatchAsync(FutureCallback<ResultSet> callback, Statement... statements) throws ExceedMaxAsyncJobsException { if (!asyncSemaphore.tryAcquire()) { if (callback == null) { throw new ExceedMaxAsyncJobsException(maxSyncJobs); } else { callback.onFailure(new ExceedMaxAsyncJobsException(maxSyncJobs)); } } else { try { ResultSetFuture rsf = CqlUtils.executeBatchAsync(getSession(), statements); if (callback != null) { Futures.addCallback(rsf, wrapCallbackResultSet(callback), asyncExecutor); } } catch (Exception e) { asyncSemaphore.release(); LOGGER.error(e.getMessage(), e); } } } }
public class class_name { public void executeBatchAsync(FutureCallback<ResultSet> callback, Statement... statements) throws ExceedMaxAsyncJobsException { if (!asyncSemaphore.tryAcquire()) { if (callback == null) { throw new ExceedMaxAsyncJobsException(maxSyncJobs); } else { callback.onFailure(new ExceedMaxAsyncJobsException(maxSyncJobs)); // depends on control dependency: [if], data = [none] } } else { try { ResultSetFuture rsf = CqlUtils.executeBatchAsync(getSession(), statements); if (callback != null) { Futures.addCallback(rsf, wrapCallbackResultSet(callback), asyncExecutor); // depends on control dependency: [if], data = [(callback] } } catch (Exception e) { asyncSemaphore.release(); LOGGER.error(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public static WsLogRecord createWsLogRecord(TraceComponent tc, Level level, String msg, Object[] msgParms) { WsLogRecord retMe = new WsLogRecord(level, msg); retMe.setLoggerName(tc.getName()); retMe.setParameters(msgParms); retMe.setTraceClass(tc.getTraceClass()); retMe.setResourceBundleName(tc.getResourceBundleName()); if (level.intValue() >= Level.INFO.intValue()) { retMe.setLocalizable(REQUIRES_LOCALIZATION); } else { retMe.setLocalizable(REQUIRES_NO_LOCALIZATION); } return retMe; } }
public class class_name { public static WsLogRecord createWsLogRecord(TraceComponent tc, Level level, String msg, Object[] msgParms) { WsLogRecord retMe = new WsLogRecord(level, msg); retMe.setLoggerName(tc.getName()); retMe.setParameters(msgParms); retMe.setTraceClass(tc.getTraceClass()); retMe.setResourceBundleName(tc.getResourceBundleName()); if (level.intValue() >= Level.INFO.intValue()) { retMe.setLocalizable(REQUIRES_LOCALIZATION); // depends on control dependency: [if], data = [none] } else { retMe.setLocalizable(REQUIRES_NO_LOCALIZATION); // depends on control dependency: [if], data = [none] } return retMe; } }
public class class_name { @RequestMapping(value = "changelog/{uuid}/revisions", method = RequestMethod.GET) public SVNChangeLogRevisions changeLogRevisions(@PathVariable String uuid) { // Gets the change log SVNChangeLog changeLog = getChangeLog(uuid); // Cached? SVNChangeLogRevisions revisions = changeLog.getRevisions(); if (revisions != null) { return revisions; } // Loads the revisions revisions = changeLogService.getChangeLogRevisions(changeLog); // Stores in cache logCache.put(uuid, changeLog.withRevisions(revisions)); // OK return revisions; } }
public class class_name { @RequestMapping(value = "changelog/{uuid}/revisions", method = RequestMethod.GET) public SVNChangeLogRevisions changeLogRevisions(@PathVariable String uuid) { // Gets the change log SVNChangeLog changeLog = getChangeLog(uuid); // Cached? SVNChangeLogRevisions revisions = changeLog.getRevisions(); if (revisions != null) { return revisions; // depends on control dependency: [if], data = [none] } // Loads the revisions revisions = changeLogService.getChangeLogRevisions(changeLog); // Stores in cache logCache.put(uuid, changeLog.withRevisions(revisions)); // OK return revisions; } }
public class class_name { protected String getPropertyName(String methodName, String prefix, String suffix) { if (methodName.startsWith(prefix) && methodName.endsWith(suffix)) { return getPropertyName(methodName, prefix.length(), suffix.length()); } return null; } }
public class class_name { protected String getPropertyName(String methodName, String prefix, String suffix) { if (methodName.startsWith(prefix) && methodName.endsWith(suffix)) { return getPropertyName(methodName, prefix.length(), suffix.length()); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static FileInfo parseFileName(File file) { // Owned filenames have the form: // dbname/CURRENT // dbname/LOCK // dbname/LOG // dbname/LOG.old // dbname/MANIFEST-[0-9]+ // dbname/[0-9]+.(log|sst|dbtmp) String fileName = file.getName(); if ("CURRENT".equals(fileName)) { return new FileInfo(FileType.CURRENT); } else if ("LOCK".equals(fileName)) { return new FileInfo(FileType.DB_LOCK); } else if ("LOG".equals(fileName)) { return new FileInfo(FileType.INFO_LOG); } else if ("LOG.old".equals(fileName)) { return new FileInfo(FileType.INFO_LOG); } else if (fileName.startsWith("MANIFEST-")) { long fileNumber = Long.parseLong(removePrefix(fileName, "MANIFEST-")); return new FileInfo(FileType.DESCRIPTOR, fileNumber); } else if (fileName.endsWith(".log")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".log")); return new FileInfo(FileType.LOG, fileNumber); } else if (fileName.endsWith(".sst")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".sst")); return new FileInfo(FileType.TABLE, fileNumber); } else if (fileName.endsWith(".dbtmp")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".dbtmp")); return new FileInfo(FileType.TEMP, fileNumber); } return null; } }
public class class_name { public static FileInfo parseFileName(File file) { // Owned filenames have the form: // dbname/CURRENT // dbname/LOCK // dbname/LOG // dbname/LOG.old // dbname/MANIFEST-[0-9]+ // dbname/[0-9]+.(log|sst|dbtmp) String fileName = file.getName(); if ("CURRENT".equals(fileName)) { return new FileInfo(FileType.CURRENT); // depends on control dependency: [if], data = [none] } else if ("LOCK".equals(fileName)) { return new FileInfo(FileType.DB_LOCK); // depends on control dependency: [if], data = [none] } else if ("LOG".equals(fileName)) { return new FileInfo(FileType.INFO_LOG); // depends on control dependency: [if], data = [none] } else if ("LOG.old".equals(fileName)) { return new FileInfo(FileType.INFO_LOG); // depends on control dependency: [if], data = [none] } else if (fileName.startsWith("MANIFEST-")) { long fileNumber = Long.parseLong(removePrefix(fileName, "MANIFEST-")); return new FileInfo(FileType.DESCRIPTOR, fileNumber); // depends on control dependency: [if], data = [none] } else if (fileName.endsWith(".log")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".log")); return new FileInfo(FileType.LOG, fileNumber); // depends on control dependency: [if], data = [none] } else if (fileName.endsWith(".sst")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".sst")); return new FileInfo(FileType.TABLE, fileNumber); // depends on control dependency: [if], data = [none] } else if (fileName.endsWith(".dbtmp")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".dbtmp")); return new FileInfo(FileType.TEMP, fileNumber); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public boolean similar(Object other) { try { if (!(other instanceof JSONObject)) { return false; } Set<String> set = this.keySet(); if (!set.equals(((JSONObject)other).keySet())) { return false; } Iterator<String> iterator = set.iterator(); while (iterator.hasNext()) { String name = iterator.next(); Object valueThis = this.get(name); Object valueOther = ((JSONObject)other).get(name); if (valueThis instanceof JSONObject) { if (!((JSONObject)valueThis).similar(valueOther)) { return false; } } else if (valueThis instanceof JSONArray) { if (!((JSONArray)valueThis).similar(valueOther)) { return false; } } else if (!valueThis.equals(valueOther)) { return false; } } return true; } catch (Throwable exception) { return false; } } }
public class class_name { public boolean similar(Object other) { try { if (!(other instanceof JSONObject)) { return false; // depends on control dependency: [if], data = [none] } Set<String> set = this.keySet(); if (!set.equals(((JSONObject)other).keySet())) { return false; // depends on control dependency: [if], data = [none] } Iterator<String> iterator = set.iterator(); while (iterator.hasNext()) { String name = iterator.next(); Object valueThis = this.get(name); Object valueOther = ((JSONObject)other).get(name); if (valueThis instanceof JSONObject) { if (!((JSONObject)valueThis).similar(valueOther)) { return false; // depends on control dependency: [if], data = [none] } } else if (valueThis instanceof JSONArray) { if (!((JSONArray)valueThis).similar(valueOther)) { return false; // depends on control dependency: [if], data = [none] } } else if (!valueThis.equals(valueOther)) { return false; // depends on control dependency: [if], data = [none] } } return true; // depends on control dependency: [try], data = [none] } catch (Throwable exception) { return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static double geometricCdf(int k, double p) { if(k<=0 || p<0) { throw new IllegalArgumentException("All the parameters must be positive."); } double probabilitySum = 0.0; for(int i=1;i<=k;++i) { probabilitySum += geometric(i, p); } return probabilitySum; } }
public class class_name { public static double geometricCdf(int k, double p) { if(k<=0 || p<0) { throw new IllegalArgumentException("All the parameters must be positive."); } double probabilitySum = 0.0; for(int i=1;i<=k;++i) { probabilitySum += geometric(i, p); // depends on control dependency: [for], data = [i] } return probabilitySum; } }
public class class_name { private void resolveImplicitLevels(int start, int limit, byte level, byte sor, byte eor) { if ((level & 1) == 0) { // even level for (int i = start; i < limit; ++i) { byte t = resultTypes[i]; // Rule I1. if (t == L ) { // no change } else if (t == R) { resultLevels[i] += 1; } else { // t == AN || t == EN resultLevels[i] += 2; } } } else { // odd level for (int i = start; i < limit; ++i) { byte t = resultTypes[i]; // Rule I2. if (t == R) { // no change } else { // t == L || t == AN || t == EN resultLevels[i] += 1; } } } } }
public class class_name { private void resolveImplicitLevels(int start, int limit, byte level, byte sor, byte eor) { if ((level & 1) == 0) { // even level for (int i = start; i < limit; ++i) { byte t = resultTypes[i]; // Rule I1. if (t == L ) { // no change } else if (t == R) { resultLevels[i] += 1; // depends on control dependency: [if], data = [none] } else { // t == AN || t == EN resultLevels[i] += 2; // depends on control dependency: [if], data = [none] } } } else { // odd level for (int i = start; i < limit; ++i) { byte t = resultTypes[i]; // Rule I2. if (t == R) { // no change } else { // t == L || t == AN || t == EN resultLevels[i] += 1; // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public <T> T deserialize(final ByteBuffer buffer) { ByteBufferInput in = new ByteBufferInput(buffer); Kryo kryo = borrow(); try { @SuppressWarnings("unchecked") T obj = (T) kryo.readClassAndObject(in); return obj; } finally { release(kryo); } } }
public class class_name { public <T> T deserialize(final ByteBuffer buffer) { ByteBufferInput in = new ByteBufferInput(buffer); Kryo kryo = borrow(); try { @SuppressWarnings("unchecked") T obj = (T) kryo.readClassAndObject(in); return obj; // depends on control dependency: [try], data = [none] } finally { release(kryo); } } }
public class class_name { @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Observable<R> flatMap(Function<? super T, ? extends ObservableSource<? extends R>> mapper, boolean delayErrors, int maxConcurrency, int bufferSize) { ObjectHelper.requireNonNull(mapper, "mapper is null"); ObjectHelper.verifyPositive(maxConcurrency, "maxConcurrency"); ObjectHelper.verifyPositive(bufferSize, "bufferSize"); if (this instanceof ScalarCallable) { @SuppressWarnings("unchecked") T v = ((ScalarCallable<T>)this).call(); if (v == null) { return empty(); } return ObservableScalarXMap.scalarXMap(v, mapper); } return RxJavaPlugins.onAssembly(new ObservableFlatMap<T, R>(this, mapper, delayErrors, maxConcurrency, bufferSize)); } }
public class class_name { @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Observable<R> flatMap(Function<? super T, ? extends ObservableSource<? extends R>> mapper, boolean delayErrors, int maxConcurrency, int bufferSize) { ObjectHelper.requireNonNull(mapper, "mapper is null"); ObjectHelper.verifyPositive(maxConcurrency, "maxConcurrency"); ObjectHelper.verifyPositive(bufferSize, "bufferSize"); if (this instanceof ScalarCallable) { @SuppressWarnings("unchecked") T v = ((ScalarCallable<T>)this).call(); if (v == null) { return empty(); // depends on control dependency: [if], data = [none] } return ObservableScalarXMap.scalarXMap(v, mapper); // depends on control dependency: [if], data = [none] } return RxJavaPlugins.onAssembly(new ObservableFlatMap<T, R>(this, mapper, delayErrors, maxConcurrency, bufferSize)); } }
public class class_name { public void marshall(GetRequestValidatorsRequest getRequestValidatorsRequest, ProtocolMarshaller protocolMarshaller) { if (getRequestValidatorsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getRequestValidatorsRequest.getRestApiId(), RESTAPIID_BINDING); protocolMarshaller.marshall(getRequestValidatorsRequest.getPosition(), POSITION_BINDING); protocolMarshaller.marshall(getRequestValidatorsRequest.getLimit(), LIMIT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetRequestValidatorsRequest getRequestValidatorsRequest, ProtocolMarshaller protocolMarshaller) { if (getRequestValidatorsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getRequestValidatorsRequest.getRestApiId(), RESTAPIID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRequestValidatorsRequest.getPosition(), POSITION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getRequestValidatorsRequest.getLimit(), LIMIT_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static void vectorSwap(PagesIndex pagesIndex, int from, int l, int s) { for (int i = 0; i < s; i++, from++, l++) { pagesIndex.swap(from, l); } } }
public class class_name { private static void vectorSwap(PagesIndex pagesIndex, int from, int l, int s) { for (int i = 0; i < s; i++, from++, l++) { pagesIndex.swap(from, l); // depends on control dependency: [for], data = [none] } } }
public class class_name { public DescribeByoipCidrsResult withByoipCidrs(ByoipCidr... byoipCidrs) { if (this.byoipCidrs == null) { setByoipCidrs(new com.amazonaws.internal.SdkInternalList<ByoipCidr>(byoipCidrs.length)); } for (ByoipCidr ele : byoipCidrs) { this.byoipCidrs.add(ele); } return this; } }
public class class_name { public DescribeByoipCidrsResult withByoipCidrs(ByoipCidr... byoipCidrs) { if (this.byoipCidrs == null) { setByoipCidrs(new com.amazonaws.internal.SdkInternalList<ByoipCidr>(byoipCidrs.length)); // depends on control dependency: [if], data = [none] } for (ByoipCidr ele : byoipCidrs) { this.byoipCidrs.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { @Override protected final T create(Object... args) { if (this.creating.get()) { throw new IllegalStateException( "Singleton creator has been called again while creation is in progress, this is indicative of a creation loop in a single thread"); } this.creating.set(true); try { final T instance = this.createSingleton(args); this.instance = instance; this.created.set(true); return instance; } finally { this.creating.set(false); } } }
public class class_name { @Override protected final T create(Object... args) { if (this.creating.get()) { throw new IllegalStateException( "Singleton creator has been called again while creation is in progress, this is indicative of a creation loop in a single thread"); } this.creating.set(true); try { final T instance = this.createSingleton(args); this.instance = instance; // depends on control dependency: [try], data = [none] this.created.set(true); // depends on control dependency: [try], data = [none] return instance; // depends on control dependency: [try], data = [none] } finally { this.creating.set(false); } } }
public class class_name { public String readString(String attribute) { final String value = getValue(attribute); if (NULL.equals(value)) { return null; } return value; } }
public class class_name { public String readString(String attribute) { final String value = getValue(attribute); if (NULL.equals(value)) { return null; // depends on control dependency: [if], data = [none] } return value; } }
public class class_name { public void trace( String format, Object[] argArray ) { if( m_delegate.isTraceEnabled() ) { FormattingTuple tuple = MessageFormatter.arrayFormat( format, argArray ); m_delegate.trace( tuple.getMessage(), tuple.getThrowable() ); } } }
public class class_name { public void trace( String format, Object[] argArray ) { if( m_delegate.isTraceEnabled() ) { FormattingTuple tuple = MessageFormatter.arrayFormat( format, argArray ); m_delegate.trace( tuple.getMessage(), tuple.getThrowable() ); // depends on control dependency: [if], data = [none] } } }
public class class_name { private int reselectSelector(int degrees, boolean isInnerCircle, boolean forceToVisibleValue, boolean forceDrawDot) { if (degrees == -1) { return -1; } int currentShowing = getCurrentItemShowing(); int stepSize; boolean allowFineGrained = !forceToVisibleValue && (currentShowing == MINUTE_INDEX); if (allowFineGrained) { degrees = snapPrefer30s(degrees); } else { degrees = snapOnly30s(degrees, 0); } RadialSelectorView radialSelectorView; if (currentShowing == HOUR_INDEX) { radialSelectorView = mHourRadialSelectorView; stepSize = HOUR_VALUE_TO_DEGREES_STEP_SIZE; } else { radialSelectorView = mMinuteRadialSelectorView; stepSize = MINUTE_VALUE_TO_DEGREES_STEP_SIZE; } radialSelectorView.setSelection(degrees, isInnerCircle, forceDrawDot); radialSelectorView.invalidate(); if (currentShowing == HOUR_INDEX) { if (mIs24HourMode) { if (degrees == 0 && isInnerCircle) { degrees = 360; } else if (degrees == 360 && !isInnerCircle) { degrees = 0; } } else if (degrees == 0) { degrees = 360; } } else if (degrees == 360 && currentShowing == MINUTE_INDEX) { degrees = 0; } int value = degrees / stepSize; if (currentShowing == HOUR_INDEX && mIs24HourMode && !isInnerCircle && degrees != 0) { value += 12; } return value; } }
public class class_name { private int reselectSelector(int degrees, boolean isInnerCircle, boolean forceToVisibleValue, boolean forceDrawDot) { if (degrees == -1) { return -1; // depends on control dependency: [if], data = [none] } int currentShowing = getCurrentItemShowing(); int stepSize; boolean allowFineGrained = !forceToVisibleValue && (currentShowing == MINUTE_INDEX); if (allowFineGrained) { degrees = snapPrefer30s(degrees); // depends on control dependency: [if], data = [none] } else { degrees = snapOnly30s(degrees, 0); // depends on control dependency: [if], data = [none] } RadialSelectorView radialSelectorView; if (currentShowing == HOUR_INDEX) { radialSelectorView = mHourRadialSelectorView; // depends on control dependency: [if], data = [none] stepSize = HOUR_VALUE_TO_DEGREES_STEP_SIZE; // depends on control dependency: [if], data = [none] } else { radialSelectorView = mMinuteRadialSelectorView; // depends on control dependency: [if], data = [none] stepSize = MINUTE_VALUE_TO_DEGREES_STEP_SIZE; // depends on control dependency: [if], data = [none] } radialSelectorView.setSelection(degrees, isInnerCircle, forceDrawDot); radialSelectorView.invalidate(); if (currentShowing == HOUR_INDEX) { if (mIs24HourMode) { if (degrees == 0 && isInnerCircle) { degrees = 360; // depends on control dependency: [if], data = [none] } else if (degrees == 360 && !isInnerCircle) { degrees = 0; // depends on control dependency: [if], data = [none] } } else if (degrees == 0) { degrees = 360; // depends on control dependency: [if], data = [none] } } else if (degrees == 360 && currentShowing == MINUTE_INDEX) { degrees = 0; // depends on control dependency: [if], data = [none] } int value = degrees / stepSize; if (currentShowing == HOUR_INDEX && mIs24HourMode && !isInnerCircle && degrees != 0) { value += 12; // depends on control dependency: [if], data = [none] } return value; } }
public class class_name { protected void parseComment() { int commentEndPos = sql.indexOf("*/", position); int commentEndPos2 = sql.indexOf("*#", position); if (0 < commentEndPos2 && commentEndPos2 < commentEndPos) { commentEndPos = commentEndPos2; } if (commentEndPos < 0) { throw new TwoWaySQLException(String.format( "%s is not closed with %s.", sql.substring(position), "*/")); } token = sql.substring(position, commentEndPos); nextTokenType = TokenType.SQL; position = commentEndPos + 2; tokenType = TokenType.COMMENT; } }
public class class_name { protected void parseComment() { int commentEndPos = sql.indexOf("*/", position); int commentEndPos2 = sql.indexOf("*#", position); if (0 < commentEndPos2 && commentEndPos2 < commentEndPos) { commentEndPos = commentEndPos2; // depends on control dependency: [if], data = [none] } if (commentEndPos < 0) { throw new TwoWaySQLException(String.format( "%s is not closed with %s.", sql.substring(position), "*/")); } token = sql.substring(position, commentEndPos); nextTokenType = TokenType.SQL; position = commentEndPos + 2; tokenType = TokenType.COMMENT; } }
public class class_name { public ListImagesResponse listImages(ListImagesRequest request) { checkNotNull(request, "request should not be null."); InternalRequest internalRequest = this.createRequest(request, HttpMethodName.GET, IMAGE_PREFIX); if (!Strings.isNullOrEmpty(request.getMarker())) { internalRequest.addParameter("marker", request.getMarker()); } if (request.getMaxKeys() > 0) { internalRequest.addParameter("maxKeys", String.valueOf(request.getMaxKeys())); } if (!Strings.isNullOrEmpty(request.getImageType())) { internalRequest.addParameter("imageType", request.getImageType()); } return invokeHttpClient(internalRequest, ListImagesResponse.class); } }
public class class_name { public ListImagesResponse listImages(ListImagesRequest request) { checkNotNull(request, "request should not be null."); InternalRequest internalRequest = this.createRequest(request, HttpMethodName.GET, IMAGE_PREFIX); if (!Strings.isNullOrEmpty(request.getMarker())) { internalRequest.addParameter("marker", request.getMarker()); // depends on control dependency: [if], data = [none] } if (request.getMaxKeys() > 0) { internalRequest.addParameter("maxKeys", String.valueOf(request.getMaxKeys())); // depends on control dependency: [if], data = [(request.getMaxKeys()] } if (!Strings.isNullOrEmpty(request.getImageType())) { internalRequest.addParameter("imageType", request.getImageType()); // depends on control dependency: [if], data = [none] } return invokeHttpClient(internalRequest, ListImagesResponse.class); } }
public class class_name { public static ApnsSigningKey loadFromInputStream(final InputStream inputStream, final String teamId, final String keyId) throws IOException, NoSuchAlgorithmException, InvalidKeyException { final ECPrivateKey signingKey; { final String base64EncodedPrivateKey; { final StringBuilder privateKeyBuilder = new StringBuilder(); final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); boolean haveReadHeader = false; boolean haveReadFooter = false; for (String line; (line = reader.readLine()) != null; ) { if (!haveReadHeader) { if (line.contains("BEGIN PRIVATE KEY")) { haveReadHeader = true; } } else { if (line.contains("END PRIVATE KEY")) { haveReadFooter = true; break; } else { privateKeyBuilder.append(line); } } } if (!(haveReadHeader && haveReadFooter)) { throw new IOException("Could not find private key header/footer"); } base64EncodedPrivateKey = privateKeyBuilder.toString(); } final byte[] keyBytes = decodeBase64EncodedString(base64EncodedPrivateKey); final PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyBytes); final KeyFactory keyFactory = KeyFactory.getInstance("EC"); try { signingKey = (ECPrivateKey) keyFactory.generatePrivate(keySpec); } catch (InvalidKeySpecException e) { throw new InvalidKeyException(e); } } return new ApnsSigningKey(keyId, teamId, signingKey); } }
public class class_name { public static ApnsSigningKey loadFromInputStream(final InputStream inputStream, final String teamId, final String keyId) throws IOException, NoSuchAlgorithmException, InvalidKeyException { final ECPrivateKey signingKey; { final String base64EncodedPrivateKey; { final StringBuilder privateKeyBuilder = new StringBuilder(); final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); boolean haveReadHeader = false; boolean haveReadFooter = false; for (String line; (line = reader.readLine()) != null; ) { if (!haveReadHeader) { if (line.contains("BEGIN PRIVATE KEY")) { haveReadHeader = true; // depends on control dependency: [if], data = [none] } } else { if (line.contains("END PRIVATE KEY")) { haveReadFooter = true; // depends on control dependency: [if], data = [none] break; } else { privateKeyBuilder.append(line); // depends on control dependency: [if], data = [none] } } } if (!(haveReadHeader && haveReadFooter)) { throw new IOException("Could not find private key header/footer"); } base64EncodedPrivateKey = privateKeyBuilder.toString(); } final byte[] keyBytes = decodeBase64EncodedString(base64EncodedPrivateKey); final PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyBytes); final KeyFactory keyFactory = KeyFactory.getInstance("EC"); try { signingKey = (ECPrivateKey) keyFactory.generatePrivate(keySpec); // depends on control dependency: [try], data = [none] } catch (InvalidKeySpecException e) { throw new InvalidKeyException(e); } // depends on control dependency: [catch], data = [none] } return new ApnsSigningKey(keyId, teamId, signingKey); } }
public class class_name { public Map<String, Map<String, List<CmsJspContentAccessValueWrapper>>> getLocaleValueList() { if (m_localeValueList == null) { m_localeValueList = CmsCollectionsGenericWrapper.createLazyMap(new CmsLocaleValueListTransformer()); } return m_localeValueList; } }
public class class_name { public Map<String, Map<String, List<CmsJspContentAccessValueWrapper>>> getLocaleValueList() { if (m_localeValueList == null) { m_localeValueList = CmsCollectionsGenericWrapper.createLazyMap(new CmsLocaleValueListTransformer()); // depends on control dependency: [if], data = [none] } return m_localeValueList; } }
public class class_name { public static void doBatchEnqueue(final Jedis jedis, final String namespace, final String queue, final List<String> jobJsons) { Pipeline pipelined = jedis.pipelined(); pipelined.sadd(JesqueUtils.createKey(namespace, QUEUES), queue); for (String jobJson : jobJsons) { pipelined.rpush(JesqueUtils.createKey(namespace, QUEUE, queue), jobJson); } pipelined.sync(); } }
public class class_name { public static void doBatchEnqueue(final Jedis jedis, final String namespace, final String queue, final List<String> jobJsons) { Pipeline pipelined = jedis.pipelined(); pipelined.sadd(JesqueUtils.createKey(namespace, QUEUES), queue); for (String jobJson : jobJsons) { pipelined.rpush(JesqueUtils.createKey(namespace, QUEUE, queue), jobJson); // depends on control dependency: [for], data = [jobJson] } pipelined.sync(); } }
public class class_name { public Map<String, ConfigurationMetadataProperty> find(final boolean strict, final Pattern propertyPattern) { val results = new HashMap<String, ConfigurationMetadataProperty>(); val repository = new CasConfigurationMetadataRepository(); val props = repository.getRepository().getAllProperties(); props.forEach((k, v) -> { val matched = StreamSupport.stream(RelaxedPropertyNames.forCamelCase(k).spliterator(), false) .map(Object::toString) .anyMatch(name -> strict ? RegexUtils.matches(propertyPattern, name) : RegexUtils.find(propertyPattern, name)); if (matched) { results.put(k, v); } }); return results; } }
public class class_name { public Map<String, ConfigurationMetadataProperty> find(final boolean strict, final Pattern propertyPattern) { val results = new HashMap<String, ConfigurationMetadataProperty>(); val repository = new CasConfigurationMetadataRepository(); val props = repository.getRepository().getAllProperties(); props.forEach((k, v) -> { val matched = StreamSupport.stream(RelaxedPropertyNames.forCamelCase(k).spliterator(), false) .map(Object::toString) .anyMatch(name -> strict ? RegexUtils.matches(propertyPattern, name) : RegexUtils.find(propertyPattern, name)); if (matched) { results.put(k, v); // depends on control dependency: [if], data = [none] } }); return results; } }
public class class_name { public <T extends OccupantInfo> boolean updateOccupantInfo ( int bodyOid, OccupantInfo.Updater<T> updater) { @SuppressWarnings("unchecked") T info = (T)_occInfo.get(bodyOid); if (info == null || !updater.update(info)) { return false; } // update the canonical copy _occInfo.put(info.getBodyOid(), info); // clone the canonical copy and send an event updating the distributed set with that clone _plobj.updateOccupantInfo(info.clone()); return true; } }
public class class_name { public <T extends OccupantInfo> boolean updateOccupantInfo ( int bodyOid, OccupantInfo.Updater<T> updater) { @SuppressWarnings("unchecked") T info = (T)_occInfo.get(bodyOid); if (info == null || !updater.update(info)) { return false; // depends on control dependency: [if], data = [none] } // update the canonical copy _occInfo.put(info.getBodyOid(), info); // clone the canonical copy and send an event updating the distributed set with that clone _plobj.updateOccupantInfo(info.clone()); return true; } }
public class class_name { public static ImmutableList<ImmutableMap<String, String>> applyAllSuggestedFixChoicesToCode( Iterable<SuggestedFix> fixChoices, Map<String, String> fileNameToCodeMap) { if (Iterables.isEmpty(fixChoices)) { return ImmutableList.of(ImmutableMap.of()); } int alternativeCount = Iterables.getFirst(fixChoices, null).getAlternatives().size(); Preconditions.checkArgument( Streams.stream(fixChoices) .map(f -> f.getAlternatives().size()) .allMatch(Predicate.isEqual(alternativeCount)), "All SuggestedFixAlternatives must offer an equal number of choices for this " + "utility to make sense"); return IntStream.range(0, alternativeCount) .mapToObj(i -> applySuggestedFixChoicesToCode(fixChoices, i, fileNameToCodeMap)) .collect(ImmutableList.toImmutableList()); } }
public class class_name { public static ImmutableList<ImmutableMap<String, String>> applyAllSuggestedFixChoicesToCode( Iterable<SuggestedFix> fixChoices, Map<String, String> fileNameToCodeMap) { if (Iterables.isEmpty(fixChoices)) { return ImmutableList.of(ImmutableMap.of()); // depends on control dependency: [if], data = [none] } int alternativeCount = Iterables.getFirst(fixChoices, null).getAlternatives().size(); Preconditions.checkArgument( Streams.stream(fixChoices) .map(f -> f.getAlternatives().size()) .allMatch(Predicate.isEqual(alternativeCount)), "All SuggestedFixAlternatives must offer an equal number of choices for this " + "utility to make sense"); return IntStream.range(0, alternativeCount) .mapToObj(i -> applySuggestedFixChoicesToCode(fixChoices, i, fileNameToCodeMap)) .collect(ImmutableList.toImmutableList()); } }
public class class_name { @Override public Map<String, Assignment> assign(final Cluster metadata, final Map<String, Subscription> subscriptions) { // construct the client metadata from the decoded subscription info final Map<UUID, ClientMetadata> clientsMetadata = new HashMap<>(); final Set<String> futureConsumers = new HashSet<>(); int minReceivedMetadataVersion = SubscriptionInfo.LATEST_SUPPORTED_VERSION; supportedVersions.clear(); int futureMetadataVersion = UNKNOWN; for (final Map.Entry<String, Subscription> entry : subscriptions.entrySet()) { final String consumerId = entry.getKey(); final Subscription subscription = entry.getValue(); final SubscriptionInfo info = SubscriptionInfo.decode(subscription.userData()); final int usedVersion = info.version(); supportedVersions.add(info.latestSupportedVersion()); if (usedVersion > SubscriptionInfo.LATEST_SUPPORTED_VERSION) { futureMetadataVersion = usedVersion; futureConsumers.add(consumerId); continue; } if (usedVersion < minReceivedMetadataVersion) { minReceivedMetadataVersion = usedVersion; } // create the new client metadata if necessary ClientMetadata clientMetadata = clientsMetadata.get(info.processId()); if (clientMetadata == null) { clientMetadata = new ClientMetadata(info.userEndPoint()); clientsMetadata.put(info.processId(), clientMetadata); } // add the consumer to the client clientMetadata.addConsumer(consumerId, info); } final boolean versionProbing; if (futureMetadataVersion != UNKNOWN) { if (minReceivedMetadataVersion >= EARLIEST_PROBEABLE_VERSION) { log.info("Received a future (version probing) subscription (version: {}). Sending empty assignment back (with supported version {}).", futureMetadataVersion, SubscriptionInfo.LATEST_SUPPORTED_VERSION); versionProbing = true; } else { throw new IllegalStateException("Received a future (version probing) subscription (version: " + futureMetadataVersion + ") and an incompatible pre Kafka 2.0 subscription (version: " + minReceivedMetadataVersion + ") at the same time."); } } else { versionProbing = false; } if (minReceivedMetadataVersion < SubscriptionInfo.LATEST_SUPPORTED_VERSION) { log.info("Downgrading metadata to version {}. Latest supported version is {}.", minReceivedMetadataVersion, SubscriptionInfo.LATEST_SUPPORTED_VERSION); } log.debug("Constructed client metadata {} from the member subscriptions.", clientsMetadata); // ---------------- Step Zero ---------------- // // parse the topology to determine the repartition source topics, // making sure they are created with the number of partitions as // the maximum of the depending sub-topologies source topics' number of partitions final Map<Integer, InternalTopologyBuilder.TopicsInfo> topicGroups = taskManager.builder().topicGroups(); final Map<String, InternalTopicMetadata> repartitionTopicMetadata = new HashMap<>(); for (final InternalTopologyBuilder.TopicsInfo topicsInfo : topicGroups.values()) { for (final String topic : topicsInfo.sourceTopics) { if (!topicsInfo.repartitionSourceTopics.keySet().contains(topic) && !metadata.topics().contains(topic)) { return errorAssignment(clientsMetadata, topic, Error.INCOMPLETE_SOURCE_TOPIC_METADATA.code); } } for (final InternalTopicConfig topic: topicsInfo.repartitionSourceTopics.values()) { repartitionTopicMetadata.put(topic.name(), new InternalTopicMetadata(topic)); } } boolean numPartitionsNeeded; do { numPartitionsNeeded = false; for (final InternalTopologyBuilder.TopicsInfo topicsInfo : topicGroups.values()) { for (final String topicName : topicsInfo.repartitionSourceTopics.keySet()) { int numPartitions = repartitionTopicMetadata.get(topicName).numPartitions; // try set the number of partitions for this repartition topic if it is not set yet if (numPartitions == UNKNOWN) { for (final InternalTopologyBuilder.TopicsInfo otherTopicsInfo : topicGroups.values()) { final Set<String> otherSinkTopics = otherTopicsInfo.sinkTopics; if (otherSinkTopics.contains(topicName)) { // if this topic is one of the sink topics of this topology, // use the maximum of all its source topic partitions as the number of partitions for (final String sourceTopicName : otherTopicsInfo.sourceTopics) { final Integer numPartitionsCandidate; // It is possible the sourceTopic is another internal topic, i.e, // map().join().join(map()) if (repartitionTopicMetadata.containsKey(sourceTopicName)) { numPartitionsCandidate = repartitionTopicMetadata.get(sourceTopicName).numPartitions; } else { numPartitionsCandidate = metadata.partitionCountForTopic(sourceTopicName); } if (numPartitionsCandidate > numPartitions) { numPartitions = numPartitionsCandidate; } } } } // if we still have not find the right number of partitions, // another iteration is needed if (numPartitions == UNKNOWN) { numPartitionsNeeded = true; } else { repartitionTopicMetadata.get(topicName).numPartitions = numPartitions; } } } } } while (numPartitionsNeeded); // ensure the co-partitioning topics within the group have the same number of partitions, // and enforce the number of partitions for those repartition topics to be the same if they // are co-partitioned as well. ensureCopartitioning(taskManager.builder().copartitionGroups(), repartitionTopicMetadata, metadata); // make sure the repartition source topics exist with the right number of partitions, // create these topics if necessary prepareTopic(repartitionTopicMetadata); // augment the metadata with the newly computed number of partitions for all the // repartition source topics final Map<TopicPartition, PartitionInfo> allRepartitionTopicPartitions = new HashMap<>(); for (final Map.Entry<String, InternalTopicMetadata> entry : repartitionTopicMetadata.entrySet()) { final String topic = entry.getKey(); final int numPartitions = entry.getValue().numPartitions; for (int partition = 0; partition < numPartitions; partition++) { allRepartitionTopicPartitions.put(new TopicPartition(topic, partition), new PartitionInfo(topic, partition, null, new Node[0], new Node[0])); } } final Cluster fullMetadata = metadata.withPartitions(allRepartitionTopicPartitions); taskManager.setClusterMetadata(fullMetadata); log.debug("Created repartition topics {} from the parsed topology.", allRepartitionTopicPartitions.values()); // ---------------- Step One ---------------- // // get the tasks as partition groups from the partition grouper final Set<String> allSourceTopics = new HashSet<>(); final Map<Integer, Set<String>> sourceTopicsByGroup = new HashMap<>(); for (final Map.Entry<Integer, InternalTopologyBuilder.TopicsInfo> entry : topicGroups.entrySet()) { allSourceTopics.addAll(entry.getValue().sourceTopics); sourceTopicsByGroup.put(entry.getKey(), entry.getValue().sourceTopics); } final Map<TaskId, Set<TopicPartition>> partitionsForTask = partitionGrouper.partitionGroups(sourceTopicsByGroup, fullMetadata); // check if all partitions are assigned, and there are no duplicates of partitions in multiple tasks final Set<TopicPartition> allAssignedPartitions = new HashSet<>(); final Map<Integer, Set<TaskId>> tasksByTopicGroup = new HashMap<>(); for (final Map.Entry<TaskId, Set<TopicPartition>> entry : partitionsForTask.entrySet()) { final Set<TopicPartition> partitions = entry.getValue(); for (final TopicPartition partition : partitions) { if (allAssignedPartitions.contains(partition)) { log.warn("Partition {} is assigned to more than one tasks: {}", partition, partitionsForTask); } } allAssignedPartitions.addAll(partitions); final TaskId id = entry.getKey(); tasksByTopicGroup.computeIfAbsent(id.topicGroupId, k -> new HashSet<>()).add(id); } for (final String topic : allSourceTopics) { final List<PartitionInfo> partitionInfoList = fullMetadata.partitionsForTopic(topic); if (!partitionInfoList.isEmpty()) { for (final PartitionInfo partitionInfo : partitionInfoList) { final TopicPartition partition = new TopicPartition(partitionInfo.topic(), partitionInfo.partition()); if (!allAssignedPartitions.contains(partition)) { log.warn("Partition {} is not assigned to any tasks: {}" + " Possible causes of a partition not getting assigned" + " is that another topic defined in the topology has not been" + " created when starting your streams application," + " resulting in no tasks created for this topology at all.", partition, partitionsForTask); } } } else { log.warn("No partitions found for topic {}", topic); } } // add tasks to state change log topic subscribers final Map<String, InternalTopicMetadata> changelogTopicMetadata = new HashMap<>(); for (final Map.Entry<Integer, InternalTopologyBuilder.TopicsInfo> entry : topicGroups.entrySet()) { final int topicGroupId = entry.getKey(); final Map<String, InternalTopicConfig> stateChangelogTopics = entry.getValue().stateChangelogTopics; for (final InternalTopicConfig topicConfig : stateChangelogTopics.values()) { // the expected number of partitions is the max value of TaskId.partition + 1 int numPartitions = UNKNOWN; if (tasksByTopicGroup.get(topicGroupId) != null) { for (final TaskId task : tasksByTopicGroup.get(topicGroupId)) { if (numPartitions < task.partition + 1) numPartitions = task.partition + 1; } final InternalTopicMetadata topicMetadata = new InternalTopicMetadata(topicConfig); topicMetadata.numPartitions = numPartitions; changelogTopicMetadata.put(topicConfig.name(), topicMetadata); } else { log.debug("No tasks found for topic group {}", topicGroupId); } } } prepareTopic(changelogTopicMetadata); log.debug("Created state changelog topics {} from the parsed topology.", changelogTopicMetadata.values()); // ---------------- Step Two ---------------- // // assign tasks to clients final Map<UUID, ClientState> states = new HashMap<>(); for (final Map.Entry<UUID, ClientMetadata> entry : clientsMetadata.entrySet()) { states.put(entry.getKey(), entry.getValue().state); } log.debug("Assigning tasks {} to clients {} with number of replicas {}", partitionsForTask.keySet(), states, numStandbyReplicas); //TWITTER CHANGED //final StickyTaskAssignor<UUID> taskAssignor = new StickyTaskAssignor<>(states, partitionsForTask.keySet()); final TaskAssignor<UUID, TaskId> taskAssignor = createTaskAssignor(partitionsForTask, states, clientsMetadata); taskAssignor.assign(numStandbyReplicas); log.info("Assigned tasks to clients as {}.", states); // ---------------- Step Three ---------------- // // construct the global partition assignment per host map final Map<HostInfo, Set<TopicPartition>> partitionsByHostState = new HashMap<>(); if (minReceivedMetadataVersion >= 2) { for (final Map.Entry<UUID, ClientMetadata> entry : clientsMetadata.entrySet()) { final HostInfo hostInfo = entry.getValue().hostInfo; if (hostInfo != null) { final Set<TopicPartition> topicPartitions = new HashSet<>(); final ClientState state = entry.getValue().state; for (final TaskId id : state.activeTasks()) { topicPartitions.addAll(partitionsForTask.get(id)); } partitionsByHostState.put(hostInfo, topicPartitions); } } } taskManager.setPartitionsByHostState(partitionsByHostState); final Map<String, Assignment> assignment; if (versionProbing) { assignment = versionProbingAssignment(clientsMetadata, partitionsForTask, partitionsByHostState, futureConsumers, minReceivedMetadataVersion); } else { assignment = computeNewAssignment(clientsMetadata, partitionsForTask, partitionsByHostState, minReceivedMetadataVersion); } return assignment; } }
public class class_name { @Override public Map<String, Assignment> assign(final Cluster metadata, final Map<String, Subscription> subscriptions) { // construct the client metadata from the decoded subscription info final Map<UUID, ClientMetadata> clientsMetadata = new HashMap<>(); final Set<String> futureConsumers = new HashSet<>(); int minReceivedMetadataVersion = SubscriptionInfo.LATEST_SUPPORTED_VERSION; supportedVersions.clear(); int futureMetadataVersion = UNKNOWN; for (final Map.Entry<String, Subscription> entry : subscriptions.entrySet()) { final String consumerId = entry.getKey(); final Subscription subscription = entry.getValue(); final SubscriptionInfo info = SubscriptionInfo.decode(subscription.userData()); final int usedVersion = info.version(); supportedVersions.add(info.latestSupportedVersion()); // depends on control dependency: [for], data = [none] if (usedVersion > SubscriptionInfo.LATEST_SUPPORTED_VERSION) { futureMetadataVersion = usedVersion; // depends on control dependency: [if], data = [none] futureConsumers.add(consumerId); // depends on control dependency: [if], data = [none] continue; } if (usedVersion < minReceivedMetadataVersion) { minReceivedMetadataVersion = usedVersion; // depends on control dependency: [if], data = [none] } // create the new client metadata if necessary ClientMetadata clientMetadata = clientsMetadata.get(info.processId()); if (clientMetadata == null) { clientMetadata = new ClientMetadata(info.userEndPoint()); // depends on control dependency: [if], data = [none] clientsMetadata.put(info.processId(), clientMetadata); // depends on control dependency: [if], data = [none] } // add the consumer to the client clientMetadata.addConsumer(consumerId, info); // depends on control dependency: [for], data = [none] } final boolean versionProbing; if (futureMetadataVersion != UNKNOWN) { if (minReceivedMetadataVersion >= EARLIEST_PROBEABLE_VERSION) { log.info("Received a future (version probing) subscription (version: {}). Sending empty assignment back (with supported version {}).", futureMetadataVersion, SubscriptionInfo.LATEST_SUPPORTED_VERSION); // depends on control dependency: [if], data = [none] versionProbing = true; // depends on control dependency: [if], data = [none] } else { throw new IllegalStateException("Received a future (version probing) subscription (version: " + futureMetadataVersion + ") and an incompatible pre Kafka 2.0 subscription (version: " + minReceivedMetadataVersion + ") at the same time."); } } else { versionProbing = false; } if (minReceivedMetadataVersion < SubscriptionInfo.LATEST_SUPPORTED_VERSION) { log.info("Downgrading metadata to version {}. Latest supported version is {}.", minReceivedMetadataVersion, SubscriptionInfo.LATEST_SUPPORTED_VERSION); } log.debug("Constructed client metadata {} from the member subscriptions.", clientsMetadata); // ---------------- Step Zero ---------------- // // parse the topology to determine the repartition source topics, // making sure they are created with the number of partitions as // the maximum of the depending sub-topologies source topics' number of partitions final Map<Integer, InternalTopologyBuilder.TopicsInfo> topicGroups = taskManager.builder().topicGroups(); final Map<String, InternalTopicMetadata> repartitionTopicMetadata = new HashMap<>(); for (final InternalTopologyBuilder.TopicsInfo topicsInfo : topicGroups.values()) { for (final String topic : topicsInfo.sourceTopics) { if (!topicsInfo.repartitionSourceTopics.keySet().contains(topic) && !metadata.topics().contains(topic)) { return errorAssignment(clientsMetadata, topic, Error.INCOMPLETE_SOURCE_TOPIC_METADATA.code); } } for (final InternalTopicConfig topic: topicsInfo.repartitionSourceTopics.values()) { repartitionTopicMetadata.put(topic.name(), new InternalTopicMetadata(topic)); // depends on control dependency: [for], data = [topic] } } boolean numPartitionsNeeded; do { numPartitionsNeeded = false; for (final InternalTopologyBuilder.TopicsInfo topicsInfo : topicGroups.values()) { for (final String topicName : topicsInfo.repartitionSourceTopics.keySet()) { int numPartitions = repartitionTopicMetadata.get(topicName).numPartitions; // try set the number of partitions for this repartition topic if it is not set yet if (numPartitions == UNKNOWN) { for (final InternalTopologyBuilder.TopicsInfo otherTopicsInfo : topicGroups.values()) { final Set<String> otherSinkTopics = otherTopicsInfo.sinkTopics; if (otherSinkTopics.contains(topicName)) { // if this topic is one of the sink topics of this topology, // use the maximum of all its source topic partitions as the number of partitions for (final String sourceTopicName : otherTopicsInfo.sourceTopics) { final Integer numPartitionsCandidate; // It is possible the sourceTopic is another internal topic, i.e, // map().join().join(map()) if (repartitionTopicMetadata.containsKey(sourceTopicName)) { numPartitionsCandidate = repartitionTopicMetadata.get(sourceTopicName).numPartitions; // depends on control dependency: [if], data = [none] } else { numPartitionsCandidate = metadata.partitionCountForTopic(sourceTopicName); // depends on control dependency: [if], data = [none] } if (numPartitionsCandidate > numPartitions) { numPartitions = numPartitionsCandidate; // depends on control dependency: [if], data = [none] } } } } // if we still have not find the right number of partitions, // another iteration is needed if (numPartitions == UNKNOWN) { numPartitionsNeeded = true; // depends on control dependency: [if], data = [none] } else { repartitionTopicMetadata.get(topicName).numPartitions = numPartitions; // depends on control dependency: [if], data = [none] } } } } } while (numPartitionsNeeded); // ensure the co-partitioning topics within the group have the same number of partitions, // and enforce the number of partitions for those repartition topics to be the same if they // are co-partitioned as well. ensureCopartitioning(taskManager.builder().copartitionGroups(), repartitionTopicMetadata, metadata); // make sure the repartition source topics exist with the right number of partitions, // create these topics if necessary prepareTopic(repartitionTopicMetadata); // augment the metadata with the newly computed number of partitions for all the // repartition source topics final Map<TopicPartition, PartitionInfo> allRepartitionTopicPartitions = new HashMap<>(); for (final Map.Entry<String, InternalTopicMetadata> entry : repartitionTopicMetadata.entrySet()) { final String topic = entry.getKey(); final int numPartitions = entry.getValue().numPartitions; for (int partition = 0; partition < numPartitions; partition++) { allRepartitionTopicPartitions.put(new TopicPartition(topic, partition), new PartitionInfo(topic, partition, null, new Node[0], new Node[0])); // depends on control dependency: [for], data = [partition] } } final Cluster fullMetadata = metadata.withPartitions(allRepartitionTopicPartitions); taskManager.setClusterMetadata(fullMetadata); log.debug("Created repartition topics {} from the parsed topology.", allRepartitionTopicPartitions.values()); // ---------------- Step One ---------------- // // get the tasks as partition groups from the partition grouper final Set<String> allSourceTopics = new HashSet<>(); final Map<Integer, Set<String>> sourceTopicsByGroup = new HashMap<>(); for (final Map.Entry<Integer, InternalTopologyBuilder.TopicsInfo> entry : topicGroups.entrySet()) { allSourceTopics.addAll(entry.getValue().sourceTopics); // depends on control dependency: [for], data = [entry] sourceTopicsByGroup.put(entry.getKey(), entry.getValue().sourceTopics); // depends on control dependency: [for], data = [entry] } final Map<TaskId, Set<TopicPartition>> partitionsForTask = partitionGrouper.partitionGroups(sourceTopicsByGroup, fullMetadata); // check if all partitions are assigned, and there are no duplicates of partitions in multiple tasks final Set<TopicPartition> allAssignedPartitions = new HashSet<>(); final Map<Integer, Set<TaskId>> tasksByTopicGroup = new HashMap<>(); for (final Map.Entry<TaskId, Set<TopicPartition>> entry : partitionsForTask.entrySet()) { final Set<TopicPartition> partitions = entry.getValue(); for (final TopicPartition partition : partitions) { if (allAssignedPartitions.contains(partition)) { log.warn("Partition {} is assigned to more than one tasks: {}", partition, partitionsForTask); // depends on control dependency: [if], data = [none] } } allAssignedPartitions.addAll(partitions); // depends on control dependency: [for], data = [none] final TaskId id = entry.getKey(); tasksByTopicGroup.computeIfAbsent(id.topicGroupId, k -> new HashSet<>()).add(id); // depends on control dependency: [for], data = [none] } for (final String topic : allSourceTopics) { final List<PartitionInfo> partitionInfoList = fullMetadata.partitionsForTopic(topic); if (!partitionInfoList.isEmpty()) { for (final PartitionInfo partitionInfo : partitionInfoList) { final TopicPartition partition = new TopicPartition(partitionInfo.topic(), partitionInfo.partition()); if (!allAssignedPartitions.contains(partition)) { log.warn("Partition {} is not assigned to any tasks: {}" + " Possible causes of a partition not getting assigned" + " is that another topic defined in the topology has not been" + " created when starting your streams application," + " resulting in no tasks created for this topology at all.", partition, partitionsForTask); // depends on control dependency: [if], data = [none] } } } else { log.warn("No partitions found for topic {}", topic); // depends on control dependency: [if], data = [none] } } // add tasks to state change log topic subscribers final Map<String, InternalTopicMetadata> changelogTopicMetadata = new HashMap<>(); for (final Map.Entry<Integer, InternalTopologyBuilder.TopicsInfo> entry : topicGroups.entrySet()) { final int topicGroupId = entry.getKey(); final Map<String, InternalTopicConfig> stateChangelogTopics = entry.getValue().stateChangelogTopics; for (final InternalTopicConfig topicConfig : stateChangelogTopics.values()) { // the expected number of partitions is the max value of TaskId.partition + 1 int numPartitions = UNKNOWN; if (tasksByTopicGroup.get(topicGroupId) != null) { for (final TaskId task : tasksByTopicGroup.get(topicGroupId)) { if (numPartitions < task.partition + 1) numPartitions = task.partition + 1; } final InternalTopicMetadata topicMetadata = new InternalTopicMetadata(topicConfig); topicMetadata.numPartitions = numPartitions; // depends on control dependency: [if], data = [none] changelogTopicMetadata.put(topicConfig.name(), topicMetadata); // depends on control dependency: [if], data = [none] } else { log.debug("No tasks found for topic group {}", topicGroupId); // depends on control dependency: [if], data = [none] } } } prepareTopic(changelogTopicMetadata); log.debug("Created state changelog topics {} from the parsed topology.", changelogTopicMetadata.values()); // ---------------- Step Two ---------------- // // assign tasks to clients final Map<UUID, ClientState> states = new HashMap<>(); for (final Map.Entry<UUID, ClientMetadata> entry : clientsMetadata.entrySet()) { states.put(entry.getKey(), entry.getValue().state); // depends on control dependency: [for], data = [entry] } log.debug("Assigning tasks {} to clients {} with number of replicas {}", partitionsForTask.keySet(), states, numStandbyReplicas); //TWITTER CHANGED //final StickyTaskAssignor<UUID> taskAssignor = new StickyTaskAssignor<>(states, partitionsForTask.keySet()); final TaskAssignor<UUID, TaskId> taskAssignor = createTaskAssignor(partitionsForTask, states, clientsMetadata); taskAssignor.assign(numStandbyReplicas); log.info("Assigned tasks to clients as {}.", states); // ---------------- Step Three ---------------- // // construct the global partition assignment per host map final Map<HostInfo, Set<TopicPartition>> partitionsByHostState = new HashMap<>(); if (minReceivedMetadataVersion >= 2) { for (final Map.Entry<UUID, ClientMetadata> entry : clientsMetadata.entrySet()) { final HostInfo hostInfo = entry.getValue().hostInfo; if (hostInfo != null) { final Set<TopicPartition> topicPartitions = new HashSet<>(); final ClientState state = entry.getValue().state; for (final TaskId id : state.activeTasks()) { topicPartitions.addAll(partitionsForTask.get(id)); // depends on control dependency: [for], data = [id] } partitionsByHostState.put(hostInfo, topicPartitions); // depends on control dependency: [if], data = [(hostInfo] } } } taskManager.setPartitionsByHostState(partitionsByHostState); final Map<String, Assignment> assignment; if (versionProbing) { assignment = versionProbingAssignment(clientsMetadata, partitionsForTask, partitionsByHostState, futureConsumers, minReceivedMetadataVersion); // depends on control dependency: [if], data = [none] } else { assignment = computeNewAssignment(clientsMetadata, partitionsForTask, partitionsByHostState, minReceivedMetadataVersion); // depends on control dependency: [if], data = [none] } return assignment; } }
public class class_name { @Override public boolean isIdle() { if (ioSession != null) { if (log.isDebugEnabled()) { log.debug("Connection idle - read: {} write: {}", ioSession.isReaderIdle(), ioSession.isWriterIdle()); } return super.isIdle() && ioSession.isBothIdle(); } return super.isIdle(); } }
public class class_name { @Override public boolean isIdle() { if (ioSession != null) { if (log.isDebugEnabled()) { log.debug("Connection idle - read: {} write: {}", ioSession.isReaderIdle(), ioSession.isWriterIdle()); // depends on control dependency: [if], data = [none] } return super.isIdle() && ioSession.isBothIdle(); // depends on control dependency: [if], data = [none] } return super.isIdle(); } }
public class class_name { public boolean setImage(Image image, GraphicsConfiguration config, int w, int h, Object... args) { if (!isImageCachable(w, h)) return false; int hash = hash(config, w, h, args); lock.writeLock().lock(); try { PixelCountSoftReference ref = map.get(hash); // check if currently in map if (ref != null && ref.get() == image) { return true; } // clear out old if (ref != null) { currentPixelCount -= ref.pixelCount; map.remove(hash); } // add new image to pixel count int newPixelCount = image.getWidth(null) * image.getHeight(null); currentPixelCount += newPixelCount; // clean out lost references if not enough space if (currentPixelCount > maxPixelCount) { while ((ref = (PixelCountSoftReference) referenceQueue.poll()) != null) { // reference lost map.remove(ref.hash); currentPixelCount -= ref.pixelCount; } } // remove old items till there is enough free space if (currentPixelCount > maxPixelCount) { Iterator<Map.Entry<Integer, PixelCountSoftReference>> mapIter = map.entrySet().iterator(); while ((currentPixelCount > maxPixelCount) && mapIter.hasNext()) { Map.Entry<Integer, PixelCountSoftReference> entry = mapIter.next(); mapIter.remove(); Image img = entry.getValue().get(); if (img != null) img.flush(); currentPixelCount -= entry.getValue().pixelCount; } } // finaly put new in map map.put(hash, new PixelCountSoftReference(image, referenceQueue, newPixelCount, hash, config, w, h, args)); return true; } finally { lock.writeLock().unlock(); } } }
public class class_name { public boolean setImage(Image image, GraphicsConfiguration config, int w, int h, Object... args) { if (!isImageCachable(w, h)) return false; int hash = hash(config, w, h, args); lock.writeLock().lock(); try { PixelCountSoftReference ref = map.get(hash); // check if currently in map if (ref != null && ref.get() == image) { return true; // depends on control dependency: [if], data = [none] } // clear out old if (ref != null) { currentPixelCount -= ref.pixelCount; // depends on control dependency: [if], data = [none] map.remove(hash); // depends on control dependency: [if], data = [none] } // add new image to pixel count int newPixelCount = image.getWidth(null) * image.getHeight(null); currentPixelCount += newPixelCount; // depends on control dependency: [try], data = [none] // clean out lost references if not enough space if (currentPixelCount > maxPixelCount) { while ((ref = (PixelCountSoftReference) referenceQueue.poll()) != null) { // reference lost map.remove(ref.hash); // depends on control dependency: [while], data = [none] currentPixelCount -= ref.pixelCount; // depends on control dependency: [while], data = [none] } } // remove old items till there is enough free space if (currentPixelCount > maxPixelCount) { Iterator<Map.Entry<Integer, PixelCountSoftReference>> mapIter = map.entrySet().iterator(); while ((currentPixelCount > maxPixelCount) && mapIter.hasNext()) { Map.Entry<Integer, PixelCountSoftReference> entry = mapIter.next(); mapIter.remove(); // depends on control dependency: [while], data = [none] Image img = entry.getValue().get(); if (img != null) img.flush(); currentPixelCount -= entry.getValue().pixelCount; // depends on control dependency: [while], data = [none] } } // finaly put new in map map.put(hash, new PixelCountSoftReference(image, referenceQueue, newPixelCount, hash, config, w, h, args)); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } finally { lock.writeLock().unlock(); } } }
public class class_name { @Override public synchronized void deactivate() throws InterruptedException, CouldNotPerformException { manageLock.lockWrite(this); try { try { validateInitialization(); } catch (InvalidStateException ex) { // was never initialized! return; } // skip initial data sync if still running if (initialDataSyncFuture != null && !initialDataSyncFuture.isDone()) { initialDataSyncFuture.cancel(true); } logger.debug("Deactivate AbstractControllerServer for: " + this); // The order is important: The informer publishes a zero event when the availabilityState is set to deactivating which leads remotes to disconnect // The remotes try to reconnect again and start a requestData. If the server is still active it will respond // and the remotes will think that the server is still there.. if (serverWatchDog != null) { serverWatchDog.deactivate(); } // inform remotes about deactivation setAvailabilityState(DEACTIVATING); if (informerWatchDog != null) { informerWatchDog.deactivate(); } setAvailabilityState(OFFLINE); } finally { manageLock.unlockWrite(this); } } }
public class class_name { @Override public synchronized void deactivate() throws InterruptedException, CouldNotPerformException { manageLock.lockWrite(this); try { try { validateInitialization(); // depends on control dependency: [try], data = [none] } catch (InvalidStateException ex) { // was never initialized! return; } // depends on control dependency: [catch], data = [none] // skip initial data sync if still running if (initialDataSyncFuture != null && !initialDataSyncFuture.isDone()) { initialDataSyncFuture.cancel(true); // depends on control dependency: [if], data = [none] } logger.debug("Deactivate AbstractControllerServer for: " + this); // The order is important: The informer publishes a zero event when the availabilityState is set to deactivating which leads remotes to disconnect // The remotes try to reconnect again and start a requestData. If the server is still active it will respond // and the remotes will think that the server is still there.. if (serverWatchDog != null) { serverWatchDog.deactivate(); // depends on control dependency: [if], data = [none] } // inform remotes about deactivation setAvailabilityState(DEACTIVATING); if (informerWatchDog != null) { informerWatchDog.deactivate(); // depends on control dependency: [if], data = [none] } setAvailabilityState(OFFLINE); } finally { manageLock.unlockWrite(this); } } }
public class class_name { @Override public AbstractIoBufferEx putString(CharSequence val, int fieldSize, CharsetEncoder encoder) throws CharacterCodingException { checkFieldSize(fieldSize); if (fieldSize == 0) { return this; } autoExpand(fieldSize); boolean utf16 = encoder.charset().name().startsWith("UTF-16"); if (utf16 && (fieldSize & 1) != 0) { throw new IllegalArgumentException("fieldSize is not even."); } int oldLimit = limit(); int end = position() + fieldSize; if (oldLimit < end) { throw new BufferOverflowException(); } if (val.length() == 0) { if (!utf16) { put((byte) 0x00); } else { put((byte) 0x00); put((byte) 0x00); } position(end); return this; } CharBuffer in = CharBuffer.wrap(val); limit(end); encoder.reset(); for (;;) { CoderResult cr; if (in.hasRemaining()) { cr = encoder.encode(in, buf(), true); } else { cr = encoder.flush(buf()); } if (cr.isUnderflow() || cr.isOverflow()) { break; } cr.throwException(); } limit(oldLimit); if (position() < end) { if (!utf16) { put((byte) 0x00); } else { put((byte) 0x00); put((byte) 0x00); } } position(end); return this; } }
public class class_name { @Override public AbstractIoBufferEx putString(CharSequence val, int fieldSize, CharsetEncoder encoder) throws CharacterCodingException { checkFieldSize(fieldSize); if (fieldSize == 0) { return this; } autoExpand(fieldSize); boolean utf16 = encoder.charset().name().startsWith("UTF-16"); if (utf16 && (fieldSize & 1) != 0) { throw new IllegalArgumentException("fieldSize is not even."); } int oldLimit = limit(); int end = position() + fieldSize; if (oldLimit < end) { throw new BufferOverflowException(); } if (val.length() == 0) { if (!utf16) { put((byte) 0x00); // depends on control dependency: [if], data = [none] } else { put((byte) 0x00); // depends on control dependency: [if], data = [none] put((byte) 0x00); // depends on control dependency: [if], data = [none] } position(end); return this; } CharBuffer in = CharBuffer.wrap(val); limit(end); encoder.reset(); for (;;) { CoderResult cr; if (in.hasRemaining()) { cr = encoder.encode(in, buf(), true); // depends on control dependency: [if], data = [none] } else { cr = encoder.flush(buf()); // depends on control dependency: [if], data = [none] } if (cr.isUnderflow() || cr.isOverflow()) { break; } cr.throwException(); } limit(oldLimit); if (position() < end) { if (!utf16) { put((byte) 0x00); } else { put((byte) 0x00); put((byte) 0x00); } } position(end); return this; } }
public class class_name { public boolean hasNextDouble() { setRadix(10); boolean result = hasNext(floatPattern()); if (result) { // Cache it try { String s = processFloatToken(hasNextResult); typeCache = Double.valueOf(Double.parseDouble(s)); } catch (NumberFormatException nfe) { result = false; } } return result; } }
public class class_name { public boolean hasNextDouble() { setRadix(10); boolean result = hasNext(floatPattern()); if (result) { // Cache it try { String s = processFloatToken(hasNextResult); typeCache = Double.valueOf(Double.parseDouble(s)); // depends on control dependency: [try], data = [none] } catch (NumberFormatException nfe) { result = false; } // depends on control dependency: [catch], data = [none] } return result; } }
public class class_name { public static SPARQLQueryDefinition bindObject(SPARQLQueryDefinition qdef, String variableName, Node objectNode) { SPARQLBindings bindings = qdef.getBindings(); if (objectNode.isURI()) { bindings.bind(variableName, objectNode.getURI()); } else if (objectNode.isLiteral()) { if (! "".equals(objectNode.getLiteralLanguage())) { String languageTag = objectNode.getLiteralLanguage(); bindings.bind(variableName, objectNode.getLiteralLexicalForm(), Locale.forLanguageTag(languageTag)); } else if (objectNode.getLiteralDatatype() != null) { try { String xsdType = objectNode.getLiteralDatatypeURI(); String fragment = new URI(xsdType).getFragment(); bindings.bind(variableName, objectNode.getLiteralLexicalForm(), RDFTypes.valueOf(fragment.toUpperCase())); } catch (URISyntaxException e) { throw new MarkLogicJenaException( "Unrecognized binding type. Use XSD only.", e); } } else { // is this a hole, no type string? bindings.bind(variableName, objectNode.getLiteralLexicalForm(), RDFTypes.STRING); } } qdef.setBindings(bindings); return qdef; } }
public class class_name { public static SPARQLQueryDefinition bindObject(SPARQLQueryDefinition qdef, String variableName, Node objectNode) { SPARQLBindings bindings = qdef.getBindings(); if (objectNode.isURI()) { bindings.bind(variableName, objectNode.getURI()); // depends on control dependency: [if], data = [none] } else if (objectNode.isLiteral()) { if (! "".equals(objectNode.getLiteralLanguage())) { String languageTag = objectNode.getLiteralLanguage(); bindings.bind(variableName, objectNode.getLiteralLexicalForm(), Locale.forLanguageTag(languageTag)); // depends on control dependency: [if], data = [none] } else if (objectNode.getLiteralDatatype() != null) { try { String xsdType = objectNode.getLiteralDatatypeURI(); String fragment = new URI(xsdType).getFragment(); bindings.bind(variableName, objectNode.getLiteralLexicalForm(), RDFTypes.valueOf(fragment.toUpperCase())); // depends on control dependency: [try], data = [none] } catch (URISyntaxException e) { throw new MarkLogicJenaException( "Unrecognized binding type. Use XSD only.", e); } // depends on control dependency: [catch], data = [none] } else { // is this a hole, no type string? bindings.bind(variableName, objectNode.getLiteralLexicalForm(), RDFTypes.STRING); // depends on control dependency: [if], data = [none] } } qdef.setBindings(bindings); return qdef; } }
public class class_name { public void setItems(java.util.Collection<Route> items) { if (items == null) { this.items = null; return; } this.items = new java.util.ArrayList<Route>(items); } }
public class class_name { public void setItems(java.util.Collection<Route> items) { if (items == null) { this.items = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.items = new java.util.ArrayList<Route>(items); } }
public class class_name { public static CharSequence getDataReloadJS(final UIGrid _uiGrid) throws EFapsException { final StringBuilder js = new StringBuilder() .append("var grid = registry.byId('grid');\n") .append("var items = ").append(GridXComponent.getDataJS(_uiGrid)); final StringBuilder dialogJs = new StringBuilder(); if (!_uiGrid.isColumnsUpToDate()) { // lazy setting of data type when first time data _uiGrid.setColumnsUpToDate(true); js.append("array.forEach(grid.structure, function(entry){\n"); for (final GridColumn column : _uiGrid.getColumns()) { if (column.getDataType() != null) { js.append("if ('").append(column.getField().getId()).append("'== entry.id) {\n") .append("entry.dataType='").append(column.getDataType()).append("';\n") .append("entry.comparator = grid.comparators.").append(column.getDataType()).append(";\n") .append("}\n"); } if (_uiGrid.getFilterList().stream() .filter(filter -> filter.getFieldId() == column.getField().getId()) .findFirst().isPresent()) { // to prevent jumping of the modal filter dialog, close and open it final String varName = RandomUtil.randomAlphabetic(4); dialogJs.append("var ").append(varName) .append(" = registry.byId('").append("fttd_" + column.getField().getId()).append("');\n") .append("if (").append(varName).append(" && !(").append(varName) .append(".domNode.offsetHeight == 0 && ") .append(varName).append(".domNode.offsetWidth == 0)) {\n") .append(varName).append(".onBlur();\n") .append("var nl = query(\".gridxHeaderMenuBtn\", dom.byId('grid-") .append(column.getField().getId()).append("'));\n") .append("nl[0].click();\n") .append("}\n"); } } js.append("});\n") .append("grid.setColumns(grid.structure);\n") .append(dialogJs); } js.append("grid.model.clearCache();\n") .append("grid.model.store.setData(items);\n") .append("grid.body.refresh();\n"); return DojoWrapper.require(js, DojoClasses.registry, DojoClasses.array, DojoClasses.dom, DojoClasses.query); } }
public class class_name { public static CharSequence getDataReloadJS(final UIGrid _uiGrid) throws EFapsException { final StringBuilder js = new StringBuilder() .append("var grid = registry.byId('grid');\n") .append("var items = ").append(GridXComponent.getDataJS(_uiGrid)); final StringBuilder dialogJs = new StringBuilder(); if (!_uiGrid.isColumnsUpToDate()) { // lazy setting of data type when first time data _uiGrid.setColumnsUpToDate(true); js.append("array.forEach(grid.structure, function(entry){\n"); for (final GridColumn column : _uiGrid.getColumns()) { if (column.getDataType() != null) { js.append("if ('").append(column.getField().getId()).append("'== entry.id) {\n") .append("entry.dataType='").append(column.getDataType()).append("';\n") .append("entry.comparator = grid.comparators.").append(column.getDataType()).append(";\n") .append("}\n"); // depends on control dependency: [if], data = [none] } if (_uiGrid.getFilterList().stream() .filter(filter -> filter.getFieldId() == column.getField().getId()) .findFirst().isPresent()) { // to prevent jumping of the modal filter dialog, close and open it final String varName = RandomUtil.randomAlphabetic(4); dialogJs.append("var ").append(varName) .append(" = registry.byId('").append("fttd_" + column.getField().getId()).append("');\n") // depends on control dependency: [if], data = [none] .append("if (").append(varName).append(" && !(").append(varName) .append(".domNode.offsetHeight == 0 && ") .append(varName).append(".domNode.offsetWidth == 0)) {\n") .append(varName).append(".onBlur();\n") // depends on control dependency: [if], data = [none] .append("var nl = query(\".gridxHeaderMenuBtn\", dom.byId('grid-") .append(column.getField().getId()).append("'));\n") // depends on control dependency: [if], data = [none] .append("nl[0].click();\n") .append("}\n"); // depends on control dependency: [if], data = [none] } } js.append("});\n") // depends on control dependency: [for], data = [none] .append("grid.setColumns(grid.structure);\n") .append(dialogJs); // depends on control dependency: [for], data = [none] } js.append("grid.model.clearCache();\n") .append("grid.model.store.setData(items);\n") .append("grid.body.refresh();\n"); return DojoWrapper.require(js, DojoClasses.registry, DojoClasses.array, DojoClasses.dom, DojoClasses.query); } }
public class class_name { protected AbstractTokenizer generateTokenizer() { Class<? extends AbstractTokenizer> tokenizer = parameters.getTokenizer(); if(tokenizer==null) { return null; } try { return tokenizer.newInstance(); } catch (InstantiationException | IllegalAccessException ex) { throw new RuntimeException(ex); } } }
public class class_name { protected AbstractTokenizer generateTokenizer() { Class<? extends AbstractTokenizer> tokenizer = parameters.getTokenizer(); if(tokenizer==null) { return null; // depends on control dependency: [if], data = [none] } try { return tokenizer.newInstance(); // depends on control dependency: [try], data = [none] } catch (InstantiationException | IllegalAccessException ex) { throw new RuntimeException(ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public DiffNode dispatch(final DiffNode parentNode, final Instances parentInstances, final Accessor accessor) { Assert.notNull(parentInstances, "parentInstances"); Assert.notNull(accessor, "accessor"); final DiffNode node = compare(parentNode, parentInstances, accessor); if (parentNode != null && isReturnableResolver.isReturnable(node)) { parentNode.addChild(node); } if (node != null) { node.addCategories(categoryResolver.resolveCategories(node)); } return node; } }
public class class_name { public DiffNode dispatch(final DiffNode parentNode, final Instances parentInstances, final Accessor accessor) { Assert.notNull(parentInstances, "parentInstances"); Assert.notNull(accessor, "accessor"); final DiffNode node = compare(parentNode, parentInstances, accessor); if (parentNode != null && isReturnableResolver.isReturnable(node)) { parentNode.addChild(node); // depends on control dependency: [if], data = [none] } if (node != null) { node.addCategories(categoryResolver.resolveCategories(node)); // depends on control dependency: [if], data = [(node] } return node; } }
public class class_name { public <E, F> List<UnifiedDiffBlock<E, F>> unified(List<E> previous, List<E> next, UnifiedDiffConfiguration<E, F> config) { setError(null); try { return this.unifiedDiffDisplayer.display(this.diffManager.diff(previous, next, null), config); } catch (DiffException e) { setError(e); return null; } } }
public class class_name { public <E, F> List<UnifiedDiffBlock<E, F>> unified(List<E> previous, List<E> next, UnifiedDiffConfiguration<E, F> config) { setError(null); try { return this.unifiedDiffDisplayer.display(this.diffManager.diff(previous, next, null), config); // depends on control dependency: [try], data = [none] } catch (DiffException e) { setError(e); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean isDirectory() { if (sftp.getVersion() > 3) { return type == SSH_FILEXFER_TYPE_DIRECTORY; } else if (permissions != null && (permissions.longValue() & SftpFileAttributes.S_IFDIR) == SftpFileAttributes.S_IFDIR) { return true; } else { return false; } } }
public class class_name { public boolean isDirectory() { if (sftp.getVersion() > 3) { return type == SSH_FILEXFER_TYPE_DIRECTORY; // depends on control dependency: [if], data = [none] } else if (permissions != null && (permissions.longValue() & SftpFileAttributes.S_IFDIR) == SftpFileAttributes.S_IFDIR) { return true; // depends on control dependency: [if], data = [none] } else { return false; // depends on control dependency: [if], data = [none] } } }
public class class_name { public GetCorsPolicyResult withCorsPolicy(CorsRule... corsPolicy) { if (this.corsPolicy == null) { setCorsPolicy(new java.util.ArrayList<CorsRule>(corsPolicy.length)); } for (CorsRule ele : corsPolicy) { this.corsPolicy.add(ele); } return this; } }
public class class_name { public GetCorsPolicyResult withCorsPolicy(CorsRule... corsPolicy) { if (this.corsPolicy == null) { setCorsPolicy(new java.util.ArrayList<CorsRule>(corsPolicy.length)); // depends on control dependency: [if], data = [none] } for (CorsRule ele : corsPolicy) { this.corsPolicy.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { @Override public Kam getKam(KamInfo kamInfo, KamFilter kamFilter) { if (kamInfo == null) throw new InvalidArgument(DEFAULT_MSG); if (!exists(kamInfo)) return null; try { return getKam(kamStoreDao(kamInfo), kamInfo, kamFilter); } catch (SQLException e) { final String msg = "error getting KAM"; throw new KAMStoreException(msg, e); } } }
public class class_name { @Override public Kam getKam(KamInfo kamInfo, KamFilter kamFilter) { if (kamInfo == null) throw new InvalidArgument(DEFAULT_MSG); if (!exists(kamInfo)) return null; try { return getKam(kamStoreDao(kamInfo), kamInfo, kamFilter); // depends on control dependency: [try], data = [none] } catch (SQLException e) { final String msg = "error getting KAM"; throw new KAMStoreException(msg, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void validatePath() throws CertificateVerificationException { Security.addProvider(new BouncyCastleProvider()); CollectionCertStoreParameters params = new CollectionCertStoreParameters(fullCertChain); try { CertStore store = CertStore.getInstance("Collection", params, Constants.BOUNCY_CASTLE_PROVIDER); // create certificate path CertificateFactory fact = CertificateFactory .getInstance(Constants.X_509, Constants.BOUNCY_CASTLE_PROVIDER); CertPath certPath = fact.generateCertPath(certChain); TrustAnchor trustAnchor = new TrustAnchor(fullCertChain.get(fullCertChain.size() - 1), null); Set<TrustAnchor> trust = Collections.singleton(trustAnchor); // perform validation CertPathValidator validator = CertPathValidator .getInstance(Constants.ALGORITHM, Constants.BOUNCY_CASTLE_PROVIDER); PKIXParameters param = new PKIXParameters(trust); param.addCertPathChecker(pathChecker); param.setRevocationEnabled(false); param.addCertStore(store); param.setDate(new Date()); validator.validate(certPath, param); if (LOG.isInfoEnabled()) { LOG.info("Certificate path validated"); } } catch (CertPathValidatorException e) { throw new CertificateVerificationException( "Certificate path validation failed on certificate number " + e.getIndex() + ", details: " + e .getMessage(), e); } catch (Exception e) { throw new CertificateVerificationException("Certificate path validation failed", e); } } }
public class class_name { public void validatePath() throws CertificateVerificationException { Security.addProvider(new BouncyCastleProvider()); CollectionCertStoreParameters params = new CollectionCertStoreParameters(fullCertChain); try { CertStore store = CertStore.getInstance("Collection", params, Constants.BOUNCY_CASTLE_PROVIDER); // create certificate path CertificateFactory fact = CertificateFactory .getInstance(Constants.X_509, Constants.BOUNCY_CASTLE_PROVIDER); CertPath certPath = fact.generateCertPath(certChain); TrustAnchor trustAnchor = new TrustAnchor(fullCertChain.get(fullCertChain.size() - 1), null); Set<TrustAnchor> trust = Collections.singleton(trustAnchor); // perform validation CertPathValidator validator = CertPathValidator .getInstance(Constants.ALGORITHM, Constants.BOUNCY_CASTLE_PROVIDER); PKIXParameters param = new PKIXParameters(trust); param.addCertPathChecker(pathChecker); param.setRevocationEnabled(false); param.addCertStore(store); param.setDate(new Date()); validator.validate(certPath, param); if (LOG.isInfoEnabled()) { LOG.info("Certificate path validated"); // depends on control dependency: [if], data = [none] } } catch (CertPathValidatorException e) { throw new CertificateVerificationException( "Certificate path validation failed on certificate number " + e.getIndex() + ", details: " + e .getMessage(), e); } catch (Exception e) { throw new CertificateVerificationException("Certificate path validation failed", e); } } }
public class class_name { private void configureInputFormat(CsvInputFormat<?> format) { format.setCharset(this.charset); format.setDelimiter(this.lineDelimiter); format.setFieldDelimiter(this.fieldDelimiter); format.setCommentPrefix(this.commentPrefix); format.setSkipFirstLineAsHeader(skipFirstLineAsHeader); format.setLenient(ignoreInvalidLines); if (this.parseQuotedStrings) { format.enableQuotedStringParsing(this.quoteCharacter); } } }
public class class_name { private void configureInputFormat(CsvInputFormat<?> format) { format.setCharset(this.charset); format.setDelimiter(this.lineDelimiter); format.setFieldDelimiter(this.fieldDelimiter); format.setCommentPrefix(this.commentPrefix); format.setSkipFirstLineAsHeader(skipFirstLineAsHeader); format.setLenient(ignoreInvalidLines); if (this.parseQuotedStrings) { format.enableQuotedStringParsing(this.quoteCharacter); // depends on control dependency: [if], data = [none] } } }
public class class_name { public <T> ExpectedCondition<T> getConditionIgnoringStaleElement(final ExpectedCondition<T> condition) { return d -> { try { return condition.apply(webDriver); } catch (WebDriverException e) { if (isStaleElementException(e)) { return null; } else { throw e; } } }; } }
public class class_name { public <T> ExpectedCondition<T> getConditionIgnoringStaleElement(final ExpectedCondition<T> condition) { return d -> { try { return condition.apply(webDriver); // depends on control dependency: [try], data = [none] } catch (WebDriverException e) { if (isStaleElementException(e)) { return null; // depends on control dependency: [if], data = [none] } else { throw e; } } // depends on control dependency: [catch], data = [none] }; } }
public class class_name { @Override public AFPChain align(Atom[] ca1, Atom[] ca2, Object param) throws StructureException{ if ( ! (param instanceof CECPParameters)) throw new IllegalArgumentException("CE algorithm needs an object of call CeParameters as argument."); CECPParameters cpparams = (CECPParameters) param; this.params = cpparams; boolean duplicateRight; switch( cpparams.getDuplicationHint() ) { case LEFT: duplicateRight = false; break; case RIGHT: duplicateRight = true; break; case SHORTER: duplicateRight = ca1.length >= ca2.length; break; default: duplicateRight = true; } if( duplicateRight ) { return alignRight(ca1, ca2, cpparams); } else { if(debug) { System.out.println("Swapping alignment order."); } AFPChain afpChain = this.alignRight(ca2, ca1, cpparams); return invertAlignment(afpChain); } } }
public class class_name { @Override public AFPChain align(Atom[] ca1, Atom[] ca2, Object param) throws StructureException{ if ( ! (param instanceof CECPParameters)) throw new IllegalArgumentException("CE algorithm needs an object of call CeParameters as argument."); CECPParameters cpparams = (CECPParameters) param; this.params = cpparams; boolean duplicateRight; switch( cpparams.getDuplicationHint() ) { case LEFT: duplicateRight = false; break; case RIGHT: duplicateRight = true; break; case SHORTER: duplicateRight = ca1.length >= ca2.length; break; default: duplicateRight = true; } if( duplicateRight ) { return alignRight(ca1, ca2, cpparams); } else { if(debug) { System.out.println("Swapping alignment order."); // depends on control dependency: [if], data = [none] } AFPChain afpChain = this.alignRight(ca2, ca1, cpparams); return invertAlignment(afpChain); } } }
public class class_name { protected Attachment getFileMessage(MessageEnvelope envelope) { if (envelope != null && envelope.getMessage() != null && envelope.getMessage().getAttachments() != null && envelope.getMessage().getAttachments().get(0) != null && envelope.getMessage().getAttachments().get(0).getType() == AttachmentType.FILE) { return envelope.getMessage().getAttachments().get(0); } return null; } }
public class class_name { protected Attachment getFileMessage(MessageEnvelope envelope) { if (envelope != null && envelope.getMessage() != null && envelope.getMessage().getAttachments() != null && envelope.getMessage().getAttachments().get(0) != null && envelope.getMessage().getAttachments().get(0).getType() == AttachmentType.FILE) { return envelope.getMessage().getAttachments().get(0); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { protected Record next(Writer writer) { Value value = recordIte.next(); if (valuesLabels != null) { int i = 0; for (ValueWritable vw : value.getValues()) { vw.getLabel().set(valuesLabels[i++]); } } currentRecord.setValue(value); writer.setDefaultRecord(currentRecord); return currentRecord; } }
public class class_name { protected Record next(Writer writer) { Value value = recordIte.next(); if (valuesLabels != null) { int i = 0; for (ValueWritable vw : value.getValues()) { vw.getLabel().set(valuesLabels[i++]); // depends on control dependency: [for], data = [vw] } } currentRecord.setValue(value); writer.setDefaultRecord(currentRecord); return currentRecord; } }
public class class_name { @Override protected void preparePaintComponent(final Request request) { if (!isInitialised()) { // Defaults rbsSelect.setSelected(WTable.SelectMode.NONE); rbsSelectAll.setSelected(WTable.SelectAllType.NONE); rbsExpand.setSelected(WTable.ExpandMode.NONE); rbsPaging.setSelected(WTable.PaginationMode.NONE); rbsStriping.setSelected(WTable.StripingType.NONE); rbsSeparator.setSelected(WTable.SeparatorType.NONE); rbsSorting.setSelected(WTable.SortMode.NONE); showColHeaders.setSelected(true); paginationControlsLocation.setSelected(WTable.PaginationLocation.AUTO); applySettings(); // Set the data used by the tables setBean(ExampleDataUtil.createExampleData()); setInitialised(true); } displaySelected(); } }
public class class_name { @Override protected void preparePaintComponent(final Request request) { if (!isInitialised()) { // Defaults rbsSelect.setSelected(WTable.SelectMode.NONE); // depends on control dependency: [if], data = [none] rbsSelectAll.setSelected(WTable.SelectAllType.NONE); // depends on control dependency: [if], data = [none] rbsExpand.setSelected(WTable.ExpandMode.NONE); // depends on control dependency: [if], data = [none] rbsPaging.setSelected(WTable.PaginationMode.NONE); // depends on control dependency: [if], data = [none] rbsStriping.setSelected(WTable.StripingType.NONE); // depends on control dependency: [if], data = [none] rbsSeparator.setSelected(WTable.SeparatorType.NONE); // depends on control dependency: [if], data = [none] rbsSorting.setSelected(WTable.SortMode.NONE); // depends on control dependency: [if], data = [none] showColHeaders.setSelected(true); // depends on control dependency: [if], data = [none] paginationControlsLocation.setSelected(WTable.PaginationLocation.AUTO); // depends on control dependency: [if], data = [none] applySettings(); // depends on control dependency: [if], data = [none] // Set the data used by the tables setBean(ExampleDataUtil.createExampleData()); // depends on control dependency: [if], data = [none] setInitialised(true); // depends on control dependency: [if], data = [none] } displaySelected(); } }
public class class_name { public static List<String> getDeclaredFields(Class<?> baseClass) { Method[] methods = baseClass.getMethods(); List<String> fields = new ArrayList<>(); for (Method fm : methods) { if ((!fm.getName().startsWith(GET_METHOD_PREFIX) && !fm.getName().startsWith(IS_METHOD_PREFIX)) || fm.getParameterCount() != 0) { continue; } String field = getFieldNameFromMethod(fm); fields.add(field); } return fields; } }
public class class_name { public static List<String> getDeclaredFields(Class<?> baseClass) { Method[] methods = baseClass.getMethods(); List<String> fields = new ArrayList<>(); for (Method fm : methods) { if ((!fm.getName().startsWith(GET_METHOD_PREFIX) && !fm.getName().startsWith(IS_METHOD_PREFIX)) || fm.getParameterCount() != 0) { continue; } String field = getFieldNameFromMethod(fm); fields.add(field); // depends on control dependency: [for], data = [none] } return fields; } }
public class class_name { private String getManagedBeansInternalEJBName(ClassInfo classInfo, AnnotationInfo managedBeanAnn) { String name = getStringValue(managedBeanAnn, "value"); if (name == null) { name = '$' + classInfo.getName(); } return name; } }
public class class_name { private String getManagedBeansInternalEJBName(ClassInfo classInfo, AnnotationInfo managedBeanAnn) { String name = getStringValue(managedBeanAnn, "value"); if (name == null) { name = '$' + classInfo.getName(); // depends on control dependency: [if], data = [none] } return name; } }
public class class_name { public Chronology withZone(DateTimeZone zone) { if (zone == null) { zone = DateTimeZone.getDefault(); } if (zone == getZone()) { return this; } if (zone == DateTimeZone.UTC && iWithUTC != null) { return iWithUTC; } DateTime lowerLimit = iLowerLimit; if (lowerLimit != null) { MutableDateTime mdt = lowerLimit.toMutableDateTime(); mdt.setZoneRetainFields(zone); lowerLimit = mdt.toDateTime(); } DateTime upperLimit = iUpperLimit; if (upperLimit != null) { MutableDateTime mdt = upperLimit.toMutableDateTime(); mdt.setZoneRetainFields(zone); upperLimit = mdt.toDateTime(); } LimitChronology chrono = getInstance (getBase().withZone(zone), lowerLimit, upperLimit); if (zone == DateTimeZone.UTC) { iWithUTC = chrono; } return chrono; } }
public class class_name { public Chronology withZone(DateTimeZone zone) { if (zone == null) { zone = DateTimeZone.getDefault(); // depends on control dependency: [if], data = [none] } if (zone == getZone()) { return this; // depends on control dependency: [if], data = [none] } if (zone == DateTimeZone.UTC && iWithUTC != null) { return iWithUTC; // depends on control dependency: [if], data = [none] } DateTime lowerLimit = iLowerLimit; if (lowerLimit != null) { MutableDateTime mdt = lowerLimit.toMutableDateTime(); mdt.setZoneRetainFields(zone); // depends on control dependency: [if], data = [none] lowerLimit = mdt.toDateTime(); // depends on control dependency: [if], data = [none] } DateTime upperLimit = iUpperLimit; if (upperLimit != null) { MutableDateTime mdt = upperLimit.toMutableDateTime(); mdt.setZoneRetainFields(zone); // depends on control dependency: [if], data = [none] upperLimit = mdt.toDateTime(); // depends on control dependency: [if], data = [none] } LimitChronology chrono = getInstance (getBase().withZone(zone), lowerLimit, upperLimit); if (zone == DateTimeZone.UTC) { iWithUTC = chrono; // depends on control dependency: [if], data = [none] } return chrono; } }
public class class_name { Number readNumber() throws IOException { StringBuilder intPart = null; // ###.xxxExxx StringBuilder fraPart = null; // xxx.###Exxx StringBuilder expPart = null; // xxx.xxxE### boolean hasFraPart = false; boolean hasExpPart = false; char ch = reader.peek(); boolean minusSign = ch == '-'; boolean expMinusSign = false; if (minusSign) { reader.next(); } int status = READ_NUMBER_INT_PART; for (;;) { if (reader.hasMore()) { ch = reader.peek(); } else { status = READ_NUMBER_END; } switch (status) { case READ_NUMBER_INT_PART: if (ch >= '0' && ch <= '9') { if (intPart == null) { intPart = new StringBuilder(10); } intPart.append(reader.next()); } else if (ch == '.') { if (intPart == null) { throw new JsonParseException("Unexpected char: " + ch, reader.readed); } reader.next(); hasFraPart = true; status = READ_NUMBER_FRA_PART; } else if (ch == 'e' || ch == 'E') { reader.next(); hasExpPart = true; // try to determin exp part's sign: char signChar = reader.peek(); if (signChar == '-' || signChar == '+') { expMinusSign = signChar == '-'; reader.next(); } status = READ_NUMBER_EXP_PART; } else { if (intPart == null) { throw new JsonParseException("Unexpected char: " + reader.next(), reader.readed); } // end of number: status = READ_NUMBER_END; } continue; case READ_NUMBER_FRA_PART: if (ch >= '0' && ch <= '9') { if (fraPart == null) { fraPart = new StringBuilder(10); } fraPart.append(reader.next()); } else if (ch == 'e' || ch == 'E') { reader.next(); hasExpPart = true; // try to determin exp part's sign: char signChar = reader.peek(); if (signChar == '-' || signChar == '+') { expMinusSign = signChar == '-'; reader.next(); } status = READ_NUMBER_EXP_PART; } else { if (fraPart == null) { throw new JsonParseException("Unexpected char: " + reader.next(), reader.readed); } // end of number: status = READ_NUMBER_END; } continue; case READ_NUMBER_EXP_PART: if (ch >= '0' && ch <= '9') { if (expPart == null) { expPart = new StringBuilder(10); } expPart.append(reader.next()); } else { if (expPart == null) { throw new JsonParseException("Unexpected char: " + reader.next(), reader.readed); } // end of number: status = READ_NUMBER_END; } continue; case READ_NUMBER_END: // build parsed number: int readed = reader.readed; if (intPart==null) { throw new JsonParseException("Missing integer part of number.", readed); } long lInt = minusSign ? -string2Long(intPart, readed) : string2Long(intPart, readed); if (!hasFraPart && !hasExpPart) { return new Long(lInt); } if (hasFraPart && fraPart == null) { throw new JsonParseException("Missing fraction part of number.", readed); } double dFraPart = hasFraPart ? (minusSign ? -string2Fraction(fraPart, readed) : string2Fraction(fraPart, readed)) : 0.0; double number = hasExpPart ? (lInt + dFraPart) * Math.pow(10, expMinusSign ? -string2Long(expPart, readed) : string2Long(expPart, readed)) : (lInt + dFraPart); if (number > MAX_SAFE_DOUBLE) { throw new NumberFormatException( "Exceeded maximum value: 1.7976931348623157e+308"); } return new Double(number); } continue; } } }
public class class_name { Number readNumber() throws IOException { StringBuilder intPart = null; // ###.xxxExxx StringBuilder fraPart = null; // xxx.###Exxx StringBuilder expPart = null; // xxx.xxxE### boolean hasFraPart = false; boolean hasExpPart = false; char ch = reader.peek(); boolean minusSign = ch == '-'; boolean expMinusSign = false; if (minusSign) { reader.next(); } int status = READ_NUMBER_INT_PART; for (;;) { if (reader.hasMore()) { ch = reader.peek(); // depends on control dependency: [if], data = [none] } else { status = READ_NUMBER_END; // depends on control dependency: [if], data = [none] } switch (status) { case READ_NUMBER_INT_PART: if (ch >= '0' && ch <= '9') { if (intPart == null) { intPart = new StringBuilder(10); // depends on control dependency: [if], data = [none] } intPart.append(reader.next()); // depends on control dependency: [if], data = [none] } else if (ch == '.') { if (intPart == null) { throw new JsonParseException("Unexpected char: " + ch, reader.readed); } reader.next(); // depends on control dependency: [if], data = [none] hasFraPart = true; // depends on control dependency: [if], data = [none] status = READ_NUMBER_FRA_PART; // depends on control dependency: [if], data = [none] } else if (ch == 'e' || ch == 'E') { reader.next(); // depends on control dependency: [if], data = [none] hasExpPart = true; // depends on control dependency: [if], data = [none] // try to determin exp part's sign: char signChar = reader.peek(); if (signChar == '-' || signChar == '+') { expMinusSign = signChar == '-'; // depends on control dependency: [if], data = [none] reader.next(); // depends on control dependency: [if], data = [none] } status = READ_NUMBER_EXP_PART; // depends on control dependency: [if], data = [none] } else { if (intPart == null) { throw new JsonParseException("Unexpected char: " + reader.next(), reader.readed); } // end of number: status = READ_NUMBER_END; // depends on control dependency: [if], data = [none] } continue; case READ_NUMBER_FRA_PART: if (ch >= '0' && ch <= '9') { if (fraPart == null) { fraPart = new StringBuilder(10); // depends on control dependency: [if], data = [none] } fraPart.append(reader.next()); // depends on control dependency: [if], data = [none] } else if (ch == 'e' || ch == 'E') { reader.next(); // depends on control dependency: [if], data = [none] hasExpPart = true; // depends on control dependency: [if], data = [none] // try to determin exp part's sign: char signChar = reader.peek(); if (signChar == '-' || signChar == '+') { expMinusSign = signChar == '-'; // depends on control dependency: [if], data = [none] reader.next(); // depends on control dependency: [if], data = [none] } status = READ_NUMBER_EXP_PART; // depends on control dependency: [if], data = [none] } else { if (fraPart == null) { throw new JsonParseException("Unexpected char: " + reader.next(), reader.readed); } // end of number: status = READ_NUMBER_END; // depends on control dependency: [if], data = [none] } continue; case READ_NUMBER_EXP_PART: if (ch >= '0' && ch <= '9') { if (expPart == null) { expPart = new StringBuilder(10); // depends on control dependency: [if], data = [none] } expPart.append(reader.next()); // depends on control dependency: [if], data = [none] } else { if (expPart == null) { throw new JsonParseException("Unexpected char: " + reader.next(), reader.readed); } // end of number: status = READ_NUMBER_END; // depends on control dependency: [if], data = [none] } continue; case READ_NUMBER_END: // build parsed number: int readed = reader.readed; if (intPart==null) { throw new JsonParseException("Missing integer part of number.", readed); } long lInt = minusSign ? -string2Long(intPart, readed) : string2Long(intPart, readed); if (!hasFraPart && !hasExpPart) { return new Long(lInt); // depends on control dependency: [if], data = [none] } if (hasFraPart && fraPart == null) { throw new JsonParseException("Missing fraction part of number.", readed); } double dFraPart = hasFraPart ? (minusSign ? -string2Fraction(fraPart, readed) : string2Fraction(fraPart, readed)) : 0.0; double number = hasExpPart ? (lInt + dFraPart) * Math.pow(10, expMinusSign ? -string2Long(expPart, readed) : string2Long(expPart, readed)) : (lInt + dFraPart); if (number > MAX_SAFE_DOUBLE) { throw new NumberFormatException( "Exceeded maximum value: 1.7976931348623157e+308"); } return new Double(number); } continue; } } }
public class class_name { public OtpConnection accept() throws IOException, OtpAuthException { OtpTransport newsock = null; while (true) { try { newsock = sock.accept(); return new OtpConnection(this, newsock); } catch (final IOException e) { try { if (newsock != null) { newsock.close(); } } catch (final IOException f) {/* ignore close errors */ } throw e; } } } }
public class class_name { public OtpConnection accept() throws IOException, OtpAuthException { OtpTransport newsock = null; while (true) { try { newsock = sock.accept(); // depends on control dependency: [try], data = [none] return new OtpConnection(this, newsock); // depends on control dependency: [try], data = [none] } catch (final IOException e) { try { if (newsock != null) { newsock.close(); // depends on control dependency: [if], data = [none] } } catch (final IOException f) {/* ignore close errors */ } // depends on control dependency: [catch], data = [none] throw e; } // depends on control dependency: [catch], data = [none] } } }
public class class_name { @Deprecated public Map<String, Map<String, PatternInfo>> getRawPatterns() { LinkedHashMap<String, Map<String, PatternInfo>> result = new LinkedHashMap<String, Map<String, PatternInfo>>(); for (Entry<String, Map<String, PatternInfo>> entry : fIntervalPatterns.entrySet()) { result.put(entry.getKey(), new LinkedHashMap<String, PatternInfo>(entry.getValue())); } return result; } }
public class class_name { @Deprecated public Map<String, Map<String, PatternInfo>> getRawPatterns() { LinkedHashMap<String, Map<String, PatternInfo>> result = new LinkedHashMap<String, Map<String, PatternInfo>>(); for (Entry<String, Map<String, PatternInfo>> entry : fIntervalPatterns.entrySet()) { result.put(entry.getKey(), new LinkedHashMap<String, PatternInfo>(entry.getValue())); // depends on control dependency: [for], data = [entry] } return result; } }
public class class_name { public static ImageUtils getImageUtils() { if (imageUtils == null) { try { Class clazz = Class.forName(IMAGE_UTILS); imageUtils = (ImageUtils) clazz.newInstance(); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { LOGGER.warn("Cannot instanciate util: {}", e.getMessage()); throw new IllegalStateException(e); } } return imageUtils; } }
public class class_name { public static ImageUtils getImageUtils() { if (imageUtils == null) { try { Class clazz = Class.forName(IMAGE_UTILS); imageUtils = (ImageUtils) clazz.newInstance(); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { LOGGER.warn("Cannot instanciate util: {}", e.getMessage()); throw new IllegalStateException(e); } // depends on control dependency: [catch], data = [none] } return imageUtils; } }
public class class_name { public static Mono<Void> shutdownLater() { return Mono.defer(() -> { UdpResources resources = udpResources.getAndSet(null); if (resources != null) { return resources._disposeLater(); } return Mono.empty(); }); } }
public class class_name { public static Mono<Void> shutdownLater() { return Mono.defer(() -> { UdpResources resources = udpResources.getAndSet(null); if (resources != null) { return resources._disposeLater(); // depends on control dependency: [if], data = [none] } return Mono.empty(); }); } }
public class class_name { public EventFilter withAvailabilityZones(String... availabilityZones) { if (this.availabilityZones == null) { setAvailabilityZones(new java.util.ArrayList<String>(availabilityZones.length)); } for (String ele : availabilityZones) { this.availabilityZones.add(ele); } return this; } }
public class class_name { public EventFilter withAvailabilityZones(String... availabilityZones) { if (this.availabilityZones == null) { setAvailabilityZones(new java.util.ArrayList<String>(availabilityZones.length)); // depends on control dependency: [if], data = [none] } for (String ele : availabilityZones) { this.availabilityZones.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { @Pure @SuppressWarnings("checkstyle:magicnumber") public static String parseHTML(String html) { if (html == null) { return null; } final Map<String, Integer> transTbl = getHtmlToJavaTranslationTable(); assert transTbl != null; if (transTbl.isEmpty()) { return html; } final Pattern pattern = Pattern.compile("[&](([a-zA-Z]+)|(#x?[0-9]+))[;]"); //$NON-NLS-1$ final Matcher matcher = pattern.matcher(html); final StringBuilder result = new StringBuilder(); String entity; Integer isoCode; int lastIndex = 0; while (matcher.find()) { final int idx = matcher.start(); result.append(html.substring(lastIndex, idx)); lastIndex = matcher.end(); entity = matcher.group(1); if (entity.startsWith("#x")) { //$NON-NLS-1$ try { isoCode = Integer.valueOf(entity.substring(2), 16); } catch (Throwable exception) { isoCode = null; } } else if (entity.startsWith("#")) { //$NON-NLS-1$ try { isoCode = Integer.valueOf(entity.substring(1)); } catch (Throwable exception) { isoCode = null; } } else { isoCode = transTbl.get(entity); } if (isoCode == null) { result.append(matcher.group()); } else { result.append((char) isoCode.intValue()); } } if (lastIndex < html.length()) { result.append(html.substring(lastIndex)); } return result.toString(); } }
public class class_name { @Pure @SuppressWarnings("checkstyle:magicnumber") public static String parseHTML(String html) { if (html == null) { return null; // depends on control dependency: [if], data = [none] } final Map<String, Integer> transTbl = getHtmlToJavaTranslationTable(); assert transTbl != null; if (transTbl.isEmpty()) { return html; // depends on control dependency: [if], data = [none] } final Pattern pattern = Pattern.compile("[&](([a-zA-Z]+)|(#x?[0-9]+))[;]"); //$NON-NLS-1$ final Matcher matcher = pattern.matcher(html); final StringBuilder result = new StringBuilder(); String entity; Integer isoCode; int lastIndex = 0; while (matcher.find()) { final int idx = matcher.start(); result.append(html.substring(lastIndex, idx)); lastIndex = matcher.end(); entity = matcher.group(1); if (entity.startsWith("#x")) { //$NON-NLS-1$ try { isoCode = Integer.valueOf(entity.substring(2), 16); } catch (Throwable exception) { isoCode = null; } } else if (entity.startsWith("#")) { //$NON-NLS-1$ try { isoCode = Integer.valueOf(entity.substring(1)); } catch (Throwable exception) { isoCode = null; } } else { isoCode = transTbl.get(entity); } if (isoCode == null) { result.append(matcher.group()); } else { result.append((char) isoCode.intValue()); } } if (lastIndex < html.length()) { result.append(html.substring(lastIndex)); } return result.toString(); } }
public class class_name { protected Set<String> getAccessTokenDependencies(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) { Set<String> deps = new TreeSet<String>(); if (getObjectDefinitionSource() != null) { FilterInvocation invocation = new FilterInvocation(request, response, filterChain); Collection<ConfigAttribute> attributes = getObjectDefinitionSource().getAttributes(invocation); if (attributes != null) { for (ConfigAttribute attribute : attributes) { deps.add(attribute.getAttribute()); } } } return deps; } }
public class class_name { protected Set<String> getAccessTokenDependencies(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) { Set<String> deps = new TreeSet<String>(); if (getObjectDefinitionSource() != null) { FilterInvocation invocation = new FilterInvocation(request, response, filterChain); Collection<ConfigAttribute> attributes = getObjectDefinitionSource().getAttributes(invocation); if (attributes != null) { for (ConfigAttribute attribute : attributes) { deps.add(attribute.getAttribute()); // depends on control dependency: [for], data = [attribute] } } } return deps; } }
public class class_name { private void computeEvents( KeyspaceMetadata oldKeyspace, KeyspaceMetadata newKeyspace, ImmutableList.Builder<Object> events) { if (oldKeyspace == null) { events.add(KeyspaceChangeEvent.created(newKeyspace)); } else { if (!shallowEquals(oldKeyspace, newKeyspace)) { events.add(KeyspaceChangeEvent.updated(oldKeyspace, newKeyspace)); } computeChildEvents(oldKeyspace, newKeyspace, events); } } }
public class class_name { private void computeEvents( KeyspaceMetadata oldKeyspace, KeyspaceMetadata newKeyspace, ImmutableList.Builder<Object> events) { if (oldKeyspace == null) { events.add(KeyspaceChangeEvent.created(newKeyspace)); // depends on control dependency: [if], data = [none] } else { if (!shallowEquals(oldKeyspace, newKeyspace)) { events.add(KeyspaceChangeEvent.updated(oldKeyspace, newKeyspace)); // depends on control dependency: [if], data = [none] } computeChildEvents(oldKeyspace, newKeyspace, events); // depends on control dependency: [if], data = [(oldKeyspace] } } }
public class class_name { public void marshall(UpdateVpcLinkRequest updateVpcLinkRequest, ProtocolMarshaller protocolMarshaller) { if (updateVpcLinkRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateVpcLinkRequest.getVpcLinkId(), VPCLINKID_BINDING); protocolMarshaller.marshall(updateVpcLinkRequest.getPatchOperations(), PATCHOPERATIONS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(UpdateVpcLinkRequest updateVpcLinkRequest, ProtocolMarshaller protocolMarshaller) { if (updateVpcLinkRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateVpcLinkRequest.getVpcLinkId(), VPCLINKID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updateVpcLinkRequest.getPatchOperations(), PATCHOPERATIONS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean isFileServingEnabled() { // PK54499 START disallowAllFileServingProp = WCCustomProperties.DISALLOW_ALL_FILE_SERVING; if (disallowAllFileServingProp != null && !this.getApplicationName().equalsIgnoreCase("isclite")) { try { if (Boolean.valueOf(disallowAllFileServingProp).booleanValue()) { this.fileServingEnabled = Boolean.FALSE; } if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINE)) { logger.logp(Level.FINE, CLASS_NAME, "isFileServing", "PK54499: disallowAllFileServingProp set to " + disallowAllFileServingProp + " for application: " + this.getApplicationName()); } } catch (Exception x) { logger.logp(Level.SEVERE, CLASS_NAME, "isFileServing", "Illegal value set for property com.ibm.ws.webcontainer.disallowallfileserving"); } } // PK54499 END if (this.fileServingEnabled != null) return this.fileServingEnabled.booleanValue(); return WCCustomProperties.FILE_SERVING_ENABLED; } }
public class class_name { public boolean isFileServingEnabled() { // PK54499 START disallowAllFileServingProp = WCCustomProperties.DISALLOW_ALL_FILE_SERVING; if (disallowAllFileServingProp != null && !this.getApplicationName().equalsIgnoreCase("isclite")) { try { if (Boolean.valueOf(disallowAllFileServingProp).booleanValue()) { this.fileServingEnabled = Boolean.FALSE; // depends on control dependency: [if], data = [none] } if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINE)) { logger.logp(Level.FINE, CLASS_NAME, "isFileServing", "PK54499: disallowAllFileServingProp set to " + disallowAllFileServingProp + " for application: " + this.getApplicationName()); // depends on control dependency: [if], data = [none] } } catch (Exception x) { logger.logp(Level.SEVERE, CLASS_NAME, "isFileServing", "Illegal value set for property com.ibm.ws.webcontainer.disallowallfileserving"); } } // PK54499 END if (this.fileServingEnabled != null) return this.fileServingEnabled.booleanValue(); return WCCustomProperties.FILE_SERVING_ENABLED; } } // depends on control dependency: [catch], data = [none]
public class class_name { public MessageDescriptor<? extends M> getSubtype( @Nullable final Enum<?> discriminatorValue) { if (subtypeMap == null) { getSubtypes(); } return subtypeMap.get(discriminatorValue); } }
public class class_name { public MessageDescriptor<? extends M> getSubtype( @Nullable final Enum<?> discriminatorValue) { if (subtypeMap == null) { getSubtypes(); // depends on control dependency: [if], data = [none] } return subtypeMap.get(discriminatorValue); } }
public class class_name { private void eraseValue(HeaderElement elem) { // wipe out the removed value if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Erasing existing header: " + elem.getName()); } int next_index = this.lastCRLFBufferIndex; int next_pos = this.lastCRLFPosition; if (null != elem.nextSequence && !elem.nextSequence.wasAdded()) { next_index = elem.nextSequence.getLastCRLFBufferIndex(); next_pos = elem.nextSequence.getLastCRLFPosition(); } int start = elem.getLastCRLFPosition(); // if it's only in one buffer, this for loop does nothing for (int x = elem.getLastCRLFBufferIndex(); x < next_index; x++) { // wiping out this buffer from start to limit this.parseBuffers[x].position(start); this.parseBuffers[x].limit(start); start = 0; } // last buffer, scribble from start until next_pos scribbleWhiteSpace(this.parseBuffers[next_index], start, next_pos); } }
public class class_name { private void eraseValue(HeaderElement elem) { // wipe out the removed value if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Erasing existing header: " + elem.getName()); // depends on control dependency: [if], data = [none] } int next_index = this.lastCRLFBufferIndex; int next_pos = this.lastCRLFPosition; if (null != elem.nextSequence && !elem.nextSequence.wasAdded()) { next_index = elem.nextSequence.getLastCRLFBufferIndex(); // depends on control dependency: [if], data = [none] next_pos = elem.nextSequence.getLastCRLFPosition(); // depends on control dependency: [if], data = [none] } int start = elem.getLastCRLFPosition(); // if it's only in one buffer, this for loop does nothing for (int x = elem.getLastCRLFBufferIndex(); x < next_index; x++) { // wiping out this buffer from start to limit this.parseBuffers[x].position(start); // depends on control dependency: [for], data = [x] this.parseBuffers[x].limit(start); // depends on control dependency: [for], data = [x] start = 0; // depends on control dependency: [for], data = [none] } // last buffer, scribble from start until next_pos scribbleWhiteSpace(this.parseBuffers[next_index], start, next_pos); } }
public class class_name { public static final Phrase getInstance(int leading, String string, Font font) { Phrase p = new Phrase(true); p.setLeading(leading); p.font = font; if (font.getFamily() != Font.SYMBOL && font.getFamily() != Font.ZAPFDINGBATS && font.getBaseFont() == null) { int index; while((index = SpecialSymbol.index(string)) > -1) { if (index > 0) { String firstPart = string.substring(0, index); ((ArrayList)p).add(new Chunk(firstPart, font)); string = string.substring(index); } Font symbol = new Font(Font.SYMBOL, font.getSize(), font.getStyle(), font.getColor()); StringBuffer buf = new StringBuffer(); buf.append(SpecialSymbol.getCorrespondingSymbol(string.charAt(0))); string = string.substring(1); while (SpecialSymbol.index(string) == 0) { buf.append(SpecialSymbol.getCorrespondingSymbol(string.charAt(0))); string = string.substring(1); } ((ArrayList)p).add(new Chunk(buf.toString(), symbol)); } } if (string != null && string.length() != 0) { ((ArrayList)p).add(new Chunk(string, font)); } return p; } }
public class class_name { public static final Phrase getInstance(int leading, String string, Font font) { Phrase p = new Phrase(true); p.setLeading(leading); p.font = font; if (font.getFamily() != Font.SYMBOL && font.getFamily() != Font.ZAPFDINGBATS && font.getBaseFont() == null) { int index; while((index = SpecialSymbol.index(string)) > -1) { if (index > 0) { String firstPart = string.substring(0, index); ((ArrayList)p).add(new Chunk(firstPart, font)); // depends on control dependency: [if], data = [none] string = string.substring(index); // depends on control dependency: [if], data = [(index] } Font symbol = new Font(Font.SYMBOL, font.getSize(), font.getStyle(), font.getColor()); StringBuffer buf = new StringBuffer(); buf.append(SpecialSymbol.getCorrespondingSymbol(string.charAt(0))); // depends on control dependency: [while], data = [none] string = string.substring(1); // depends on control dependency: [while], data = [none] while (SpecialSymbol.index(string) == 0) { buf.append(SpecialSymbol.getCorrespondingSymbol(string.charAt(0))); // depends on control dependency: [while], data = [0)] string = string.substring(1); // depends on control dependency: [while], data = [none] } ((ArrayList)p).add(new Chunk(buf.toString(), symbol)); // depends on control dependency: [while], data = [none] } } if (string != null && string.length() != 0) { ((ArrayList)p).add(new Chunk(string, font)); // depends on control dependency: [if], data = [(string] } return p; } }
public class class_name { static NodeInfo selectClosestSide( NodeInfo a , NodeInfo b ) { double ratio = 1.7321; NodeInfo best = null; double bestDistance = Double.MAX_VALUE; Edge bestEdgeA = null; Edge bestEdgeB = null; for (int i = 0; i < a.edges.size; i++) { NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); db = Math.sqrt(db); double max,min; if( da>db) { max = da;min = db; } else { max = db;min = da; } // see how much it deviates from the ideal length with no distortion double diffRatio = Math.abs(max-min*ratio)/max; if( diffRatio > 0.25 ) continue; // TODO reject if too far double d = da+db; if( d < bestDistance ) { bestDistance = d; best = aa; bestEdgeA = a.edges.get(i); bestEdgeB = b.edges.get(j); } break; } } } // check the angles if( best != null ) { double angleA = UtilAngle.distanceCW(bestEdgeA.angle,bestEdgeB.angle); if( angleA < Math.PI*0.25 ) // expected with zero distortion is 30 degrees return best; else return null; } return null; } }
public class class_name { static NodeInfo selectClosestSide( NodeInfo a , NodeInfo b ) { double ratio = 1.7321; NodeInfo best = null; double bestDistance = Double.MAX_VALUE; Edge bestEdgeA = null; Edge bestEdgeB = null; for (int i = 0; i < a.edges.size; i++) { NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); // depends on control dependency: [if], data = [none] db = Math.sqrt(db); // depends on control dependency: [if], data = [none] double max,min; if( da>db) { max = da;min = db; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { max = db;min = da; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } // see how much it deviates from the ideal length with no distortion double diffRatio = Math.abs(max-min*ratio)/max; if( diffRatio > 0.25 ) continue; // TODO reject if too far double d = da+db; if( d < bestDistance ) { bestDistance = d; // depends on control dependency: [if], data = [none] best = aa; // depends on control dependency: [if], data = [none] bestEdgeA = a.edges.get(i); // depends on control dependency: [if], data = [none] bestEdgeB = b.edges.get(j); // depends on control dependency: [if], data = [none] } break; } } } // check the angles if( best != null ) { double angleA = UtilAngle.distanceCW(bestEdgeA.angle,bestEdgeB.angle); if( angleA < Math.PI*0.25 ) // expected with zero distortion is 30 degrees return best; else return null; } return null; } }
public class class_name { private static File _getClassSource (final String className) { if (className != null) { final String classResource = className.replace ('.', '/') + ".class"; return _getResourceSource (classResource, Thread.currentThread ().getContextClassLoader ()); } return null; } }
public class class_name { private static File _getClassSource (final String className) { if (className != null) { final String classResource = className.replace ('.', '/') + ".class"; return _getResourceSource (classResource, Thread.currentThread ().getContextClassLoader ()); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public void marshall(GetDomainsRequest getDomainsRequest, ProtocolMarshaller protocolMarshaller) { if (getDomainsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDomainsRequest.getPageToken(), PAGETOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetDomainsRequest getDomainsRequest, ProtocolMarshaller protocolMarshaller) { if (getDomainsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDomainsRequest.getPageToken(), PAGETOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override protected void visitForNode(ForNode node) { boolean hasIfempty = (node.numChildren() == 2); // NOTE: below we call id(varName) on a number of variables instead of using // VariableDeclaration.ref(), this is because the refs() might be referenced on the other side // of a call to visitChildrenReturningCodeChunk. // That will break the normal behavior of FormattingContext being able to tell whether or not an // initial statement has already been generated because it eagerly coerces the value to a // string. This will lead to redundant variable declarations. // When visitChildrenReturningCodeChunk is gone, this can be cleaned up, but for now we have to // manually decide where to declare the variables. List<Statement> statements = new ArrayList<>(); // Build some local variable names. ForNonemptyNode nonEmptyNode = (ForNonemptyNode) node.getChild(0); String varPrefix = nonEmptyNode.getVarName() + node.getId(); // TODO(b/32224284): A more consistent pattern for local variable management. String limitName = varPrefix + "ListLen"; Expression limitInitializer; Optional<RangeArgs> args = RangeArgs.createFromNode(node); Function<Expression, Expression> getDataItemFunction; if (args.isPresent()) { RangeArgs range = args.get(); // if any of the expressions are too expensive, allocate local variables for them final Expression start = maybeStashInLocal( range.start().isPresent() ? translateExpr(range.start().get()) : Expression.number(0), varPrefix + "_RangeStart", statements); final Expression end = maybeStashInLocal(translateExpr(range.limit()), varPrefix + "_RangeEnd", statements); final Expression step = maybeStashInLocal( range.increment().isPresent() ? translateExpr(range.increment().get()) : Expression.number(1), varPrefix + "_RangeStep", statements); // the logic we want is // step * (end-start) < 0 ? 0 : ( (end-start)/step + ((end-start) % step == 0 ? 0 : 1)); // but given that all javascript numbers are doubles we can simplify this somewhat. // Math.max(0, Match.ceil((end - start)/step)) // should yield identical results. limitInitializer = dottedIdNoRequire("Math.max") .call( number(0), dottedIdNoRequire("Math.ceil").call(end.minus(start).divideBy(step))); // optimize for foreach over a range getDataItemFunction = index -> start.plus(index.times(step)); } else { // Define list var and list-len var. Expression dataRef = translateExpr(node.getExpr()); final String listVarName = varPrefix + "List"; Expression listVar = VariableDeclaration.builder(listVarName).setRhs(dataRef).build().ref(); // does it make sense to store this in a variable? limitInitializer = listVar.dotAccess("length"); getDataItemFunction = index -> id(listVarName).bracketAccess(index); } // Generate the foreach body as a CodeChunk. Expression limit = id(limitName); statements.add(VariableDeclaration.builder(limitName).setRhs(limitInitializer).build()); Statement foreachBody = handleForeachLoop(nonEmptyNode, limit, getDataItemFunction); if (hasIfempty) { // If there is an ifempty node, wrap the foreach body in an if statement and append the // ifempty body as the else clause. Statement ifemptyBody = visitChildrenReturningCodeChunk(node.getChild(1)); Expression limitCheck = limit.op(Operator.GREATER_THAN, number(0)); foreachBody = ifStatement(limitCheck, foreachBody).setElse(ifemptyBody).build(); } statements.add(foreachBody); jsCodeBuilder.append(Statement.of(statements)); } }
public class class_name { @Override protected void visitForNode(ForNode node) { boolean hasIfempty = (node.numChildren() == 2); // NOTE: below we call id(varName) on a number of variables instead of using // VariableDeclaration.ref(), this is because the refs() might be referenced on the other side // of a call to visitChildrenReturningCodeChunk. // That will break the normal behavior of FormattingContext being able to tell whether or not an // initial statement has already been generated because it eagerly coerces the value to a // string. This will lead to redundant variable declarations. // When visitChildrenReturningCodeChunk is gone, this can be cleaned up, but for now we have to // manually decide where to declare the variables. List<Statement> statements = new ArrayList<>(); // Build some local variable names. ForNonemptyNode nonEmptyNode = (ForNonemptyNode) node.getChild(0); String varPrefix = nonEmptyNode.getVarName() + node.getId(); // TODO(b/32224284): A more consistent pattern for local variable management. String limitName = varPrefix + "ListLen"; Expression limitInitializer; Optional<RangeArgs> args = RangeArgs.createFromNode(node); Function<Expression, Expression> getDataItemFunction; if (args.isPresent()) { RangeArgs range = args.get(); // if any of the expressions are too expensive, allocate local variables for them final Expression start = maybeStashInLocal( range.start().isPresent() ? translateExpr(range.start().get()) : Expression.number(0), varPrefix + "_RangeStart", statements); final Expression end = maybeStashInLocal(translateExpr(range.limit()), varPrefix + "_RangeEnd", statements); final Expression step = maybeStashInLocal( range.increment().isPresent() ? translateExpr(range.increment().get()) : Expression.number(1), varPrefix + "_RangeStep", statements); // the logic we want is // step * (end-start) < 0 ? 0 : ( (end-start)/step + ((end-start) % step == 0 ? 0 : 1)); // but given that all javascript numbers are doubles we can simplify this somewhat. // Math.max(0, Match.ceil((end - start)/step)) // should yield identical results. limitInitializer = dottedIdNoRequire("Math.max") .call( number(0), dottedIdNoRequire("Math.ceil").call(end.minus(start).divideBy(step))); // depends on control dependency: [if], data = [none] // optimize for foreach over a range getDataItemFunction = index -> start.plus(index.times(step)); // depends on control dependency: [if], data = [none] } else { // Define list var and list-len var. Expression dataRef = translateExpr(node.getExpr()); final String listVarName = varPrefix + "List"; Expression listVar = VariableDeclaration.builder(listVarName).setRhs(dataRef).build().ref(); // does it make sense to store this in a variable? limitInitializer = listVar.dotAccess("length"); // depends on control dependency: [if], data = [none] getDataItemFunction = index -> id(listVarName).bracketAccess(index); // depends on control dependency: [if], data = [none] } // Generate the foreach body as a CodeChunk. Expression limit = id(limitName); statements.add(VariableDeclaration.builder(limitName).setRhs(limitInitializer).build()); Statement foreachBody = handleForeachLoop(nonEmptyNode, limit, getDataItemFunction); if (hasIfempty) { // If there is an ifempty node, wrap the foreach body in an if statement and append the // ifempty body as the else clause. Statement ifemptyBody = visitChildrenReturningCodeChunk(node.getChild(1)); Expression limitCheck = limit.op(Operator.GREATER_THAN, number(0)); foreachBody = ifStatement(limitCheck, foreachBody).setElse(ifemptyBody).build(); // depends on control dependency: [if], data = [none] } statements.add(foreachBody); jsCodeBuilder.append(Statement.of(statements)); } }
public class class_name { public void setRedactEnabled(final boolean redactEnabled) { final SimpleFilterProvider simpleFilterProvider = new SimpleFilterProvider(); if (redactEnabled) { simpleFilterProvider.addFilter(RedactionFilter.REDACTION_FILTER_ID, new RedactionFilter(!_redactNull)); } else { simpleFilterProvider.addFilter(RedactionFilter.REDACTION_FILTER_ID, SimpleBeanPropertyFilter.serializeAllExcept(Collections.<String>emptySet())); } _objectMapper.setFilterProvider(simpleFilterProvider); _redactEnabled = redactEnabled; } }
public class class_name { public void setRedactEnabled(final boolean redactEnabled) { final SimpleFilterProvider simpleFilterProvider = new SimpleFilterProvider(); if (redactEnabled) { simpleFilterProvider.addFilter(RedactionFilter.REDACTION_FILTER_ID, new RedactionFilter(!_redactNull)); // depends on control dependency: [if], data = [none] } else { simpleFilterProvider.addFilter(RedactionFilter.REDACTION_FILTER_ID, SimpleBeanPropertyFilter.serializeAllExcept(Collections.<String>emptySet())); // depends on control dependency: [if], data = [none] } _objectMapper.setFilterProvider(simpleFilterProvider); _redactEnabled = redactEnabled; } }
public class class_name { @Override public R scan(Tree tree, P p) { if (tree == null) return null; TreePath prev = path; path = new TreePath(path, tree); try { return tree.accept(this, p); } finally { path = prev; } } }
public class class_name { @Override public R scan(Tree tree, P p) { if (tree == null) return null; TreePath prev = path; path = new TreePath(path, tree); try { return tree.accept(this, p); // depends on control dependency: [try], data = [none] } finally { path = prev; } } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T get(Object key) { try { return (T) CACHE.get(key); } catch (ClassCastException e) { System.out.print("E/Cache: Can't use cached object: wrong type"); } return null; } }
public class class_name { @SuppressWarnings("unchecked") public static <T> T get(Object key) { try { return (T) CACHE.get(key); // depends on control dependency: [try], data = [none] } catch (ClassCastException e) { System.out.print("E/Cache: Can't use cached object: wrong type"); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { private File subDirForId(String id) { File subDir = new File(objs, id.substring(0, SUBDIR_POLICY)); if (!subDir.exists()) { subDir.mkdirs(); } return subDir; } }
public class class_name { private File subDirForId(String id) { File subDir = new File(objs, id.substring(0, SUBDIR_POLICY)); if (!subDir.exists()) { subDir.mkdirs(); // depends on control dependency: [if], data = [none] } return subDir; } }
public class class_name { public Collection<Object> getAllFacts(Solution_ solution) { Collection<Object> facts = new ArrayList<>(); // Adds both entities and facts Arrays.asList(entityMemberAccessorMap, problemFactMemberAccessorMap) .forEach(map -> map.forEach((key, memberAccessor) -> { Object object = extractMemberObject(memberAccessor, solution); if (object != null) { facts.add(object); } })); entityCollectionMemberAccessorMap.forEach( (key, memberAccessor) -> facts.addAll(extractMemberCollectionOrArray(memberAccessor, solution, false))); problemFactCollectionMemberAccessorMap.forEach( (key, memberAccessor) -> facts.addAll(extractMemberCollectionOrArray(memberAccessor, solution, true))); return facts; } }
public class class_name { public Collection<Object> getAllFacts(Solution_ solution) { Collection<Object> facts = new ArrayList<>(); // Adds both entities and facts Arrays.asList(entityMemberAccessorMap, problemFactMemberAccessorMap) .forEach(map -> map.forEach((key, memberAccessor) -> { Object object = extractMemberObject(memberAccessor, solution); if (object != null) { facts.add(object); // depends on control dependency: [if], data = [(object] } })); entityCollectionMemberAccessorMap.forEach( (key, memberAccessor) -> facts.addAll(extractMemberCollectionOrArray(memberAccessor, solution, false))); problemFactCollectionMemberAccessorMap.forEach( (key, memberAccessor) -> facts.addAll(extractMemberCollectionOrArray(memberAccessor, solution, true))); return facts; } }
public class class_name { public static boolean areEqual(SparseArray<?> sparseArray, Object o) { if (sparseArray == o) { return true; } if (o == null) { return false; } SparseArray<?> other = (SparseArray<?>) o; if (sparseArray.size() == other.size()) { return sparseArray.keyStream().allMatch(key -> Objects.equals(sparseArray.get(key), other.get(key))); } return false; } }
public class class_name { public static boolean areEqual(SparseArray<?> sparseArray, Object o) { if (sparseArray == o) { return true; // depends on control dependency: [if], data = [none] } if (o == null) { return false; // depends on control dependency: [if], data = [none] } SparseArray<?> other = (SparseArray<?>) o; if (sparseArray.size() == other.size()) { return sparseArray.keyStream().allMatch(key -> Objects.equals(sparseArray.get(key), other.get(key))); // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public void setBugzillaKeywords(final String bugzillaKeywords) { if (bugzillaKeywords == null && this.bugzillaKeywords == null) { return; } else if (bugzillaKeywords == null) { removeChild(this.bugzillaKeywords); this.bugzillaKeywords = null; } else if (this.bugzillaKeywords == null) { this.bugzillaKeywords = new KeyValueNode<String>(CommonConstants.CS_BUGZILLA_KEYWORDS_TITLE, bugzillaKeywords); appendChild(this.bugzillaKeywords, false); } else { this.bugzillaKeywords.setValue(bugzillaKeywords); } } }
public class class_name { public void setBugzillaKeywords(final String bugzillaKeywords) { if (bugzillaKeywords == null && this.bugzillaKeywords == null) { return; // depends on control dependency: [if], data = [none] } else if (bugzillaKeywords == null) { removeChild(this.bugzillaKeywords); // depends on control dependency: [if], data = [none] this.bugzillaKeywords = null; // depends on control dependency: [if], data = [none] } else if (this.bugzillaKeywords == null) { this.bugzillaKeywords = new KeyValueNode<String>(CommonConstants.CS_BUGZILLA_KEYWORDS_TITLE, bugzillaKeywords); // depends on control dependency: [if], data = [none] appendChild(this.bugzillaKeywords, false); // depends on control dependency: [if], data = [(this.bugzillaKeywords] } else { this.bugzillaKeywords.setValue(bugzillaKeywords); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void delete(Object entity, Object key) { // All Modifying Neo4J operations must be executed within a transaction checkActiveTransaction(); GraphDatabaseService graphDb = getConnection(); // Find Node for this particular entity EntityMetadata m = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entity.getClass()); Node node = mapper.searchNode(key, m, graphDb, true); if (node != null) { // Remove this particular node, if not already deleted in current // transaction if (!((Neo4JTransaction) resource).containsNodeId(node.getId())) { node.delete(); // Manually remove node index if applicable indexer.deleteNodeIndex(m, graphDb, node); // Remove all relationship edges attached to this node // (otherwise an // exception is thrown) for (Relationship relationship : node.getRelationships()) { relationship.delete(); // Manually remove relationship index if applicable indexer.deleteRelationshipIndex(m, graphDb, relationship); } ((Neo4JTransaction) resource).addNodeId(node.getId()); } } else { if (log.isDebugEnabled()) log.debug("Entity to be deleted doesn't exist in graph. Doing nothing"); } } }
public class class_name { @Override public void delete(Object entity, Object key) { // All Modifying Neo4J operations must be executed within a transaction checkActiveTransaction(); GraphDatabaseService graphDb = getConnection(); // Find Node for this particular entity EntityMetadata m = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entity.getClass()); Node node = mapper.searchNode(key, m, graphDb, true); if (node != null) { // Remove this particular node, if not already deleted in current // transaction if (!((Neo4JTransaction) resource).containsNodeId(node.getId())) { node.delete(); // depends on control dependency: [if], data = [none] // Manually remove node index if applicable indexer.deleteNodeIndex(m, graphDb, node); // depends on control dependency: [if], data = [none] // Remove all relationship edges attached to this node // (otherwise an // exception is thrown) for (Relationship relationship : node.getRelationships()) { relationship.delete(); // depends on control dependency: [for], data = [relationship] // Manually remove relationship index if applicable indexer.deleteRelationshipIndex(m, graphDb, relationship); // depends on control dependency: [for], data = [relationship] } ((Neo4JTransaction) resource).addNodeId(node.getId()); // depends on control dependency: [if], data = [none] } } else { if (log.isDebugEnabled()) log.debug("Entity to be deleted doesn't exist in graph. Doing nothing"); } } }
public class class_name { protected boolean combine(Object[] srcs, TupleWritable dst) { assert srcs.length == dst.size(); for (int i = 0; i < srcs.length; ++i) { if (!dst.has(i)) { return false; } } return true; } }
public class class_name { protected boolean combine(Object[] srcs, TupleWritable dst) { assert srcs.length == dst.size(); for (int i = 0; i < srcs.length; ++i) { if (!dst.has(i)) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { public Observable<ServiceResponse<Page<ExpressRouteCircuitInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<ExpressRouteCircuitInner>>, Observable<ServiceResponse<Page<ExpressRouteCircuitInner>>>>() { @Override public Observable<ServiceResponse<Page<ExpressRouteCircuitInner>>> call(ServiceResponse<Page<ExpressRouteCircuitInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public Observable<ServiceResponse<Page<ExpressRouteCircuitInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<ExpressRouteCircuitInner>>, Observable<ServiceResponse<Page<ExpressRouteCircuitInner>>>>() { @Override public Observable<ServiceResponse<Page<ExpressRouteCircuitInner>>> call(ServiceResponse<Page<ExpressRouteCircuitInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); // depends on control dependency: [if], data = [none] } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public static String getAlias(Function function) { String functionAlias = getStringProperty(function,Function.PROP_NAME); if(!functionAlias.isEmpty()) { return functionAlias; } return function.getClass().getSimpleName(); } }
public class class_name { public static String getAlias(Function function) { String functionAlias = getStringProperty(function,Function.PROP_NAME); if(!functionAlias.isEmpty()) { return functionAlias; // depends on control dependency: [if], data = [none] } return function.getClass().getSimpleName(); } }
public class class_name { public void plug(final Map<String, Object> dataModel, final RequestContext context) { String content = (String) dataModel.get(Plugin.PLUGINS); if (null == content) { dataModel.put(Plugin.PLUGINS, ""); } handleLangs(dataModel); fillDefault(dataModel); postPlug(dataModel, context); content = (String) dataModel.get(Plugin.PLUGINS); final StringBuilder contentBuilder = new StringBuilder(content); contentBuilder.append(getViewContent(dataModel)); final String pluginsContent = contentBuilder.toString(); dataModel.put(Plugin.PLUGINS, pluginsContent); LOGGER.log(Level.DEBUG, "Plugin[name={0}] has been plugged", getName()); } }
public class class_name { public void plug(final Map<String, Object> dataModel, final RequestContext context) { String content = (String) dataModel.get(Plugin.PLUGINS); if (null == content) { dataModel.put(Plugin.PLUGINS, ""); // depends on control dependency: [if], data = [none] } handleLangs(dataModel); fillDefault(dataModel); postPlug(dataModel, context); content = (String) dataModel.get(Plugin.PLUGINS); final StringBuilder contentBuilder = new StringBuilder(content); contentBuilder.append(getViewContent(dataModel)); final String pluginsContent = contentBuilder.toString(); dataModel.put(Plugin.PLUGINS, pluginsContent); LOGGER.log(Level.DEBUG, "Plugin[name={0}] has been plugged", getName()); } }
public class class_name { public final EObject entryRuleRichStringLiteralStart() throws RecognitionException { EObject current = null; EObject iv_ruleRichStringLiteralStart = null; try { // InternalSARL.g:10858:63: (iv_ruleRichStringLiteralStart= ruleRichStringLiteralStart EOF ) // InternalSARL.g:10859:2: iv_ruleRichStringLiteralStart= ruleRichStringLiteralStart EOF { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getRichStringLiteralStartRule()); } pushFollow(FOLLOW_1); iv_ruleRichStringLiteralStart=ruleRichStringLiteralStart(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { current =iv_ruleRichStringLiteralStart; } match(input,EOF,FOLLOW_2); if (state.failed) return current; } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public final EObject entryRuleRichStringLiteralStart() throws RecognitionException { EObject current = null; EObject iv_ruleRichStringLiteralStart = null; try { // InternalSARL.g:10858:63: (iv_ruleRichStringLiteralStart= ruleRichStringLiteralStart EOF ) // InternalSARL.g:10859:2: iv_ruleRichStringLiteralStart= ruleRichStringLiteralStart EOF { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getRichStringLiteralStartRule()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_1); iv_ruleRichStringLiteralStart=ruleRichStringLiteralStart(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { current =iv_ruleRichStringLiteralStart; // depends on control dependency: [if], data = [none] } match(input,EOF,FOLLOW_2); if (state.failed) return current; } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { static public Logger.LEVEL getLogLevel() { final Future<Logger.LEVEL> task = ThreadPoolWorkQueue.submit(new Callable<Logger.LEVEL>() { @Override public Logger.LEVEL call() { return getLevelSync(); } }); try { return task.get(); } catch (Exception e) { return getLevelSync(); } } }
public class class_name { static public Logger.LEVEL getLogLevel() { final Future<Logger.LEVEL> task = ThreadPoolWorkQueue.submit(new Callable<Logger.LEVEL>() { @Override public Logger.LEVEL call() { return getLevelSync(); } }); try { return task.get(); // depends on control dependency: [try], data = [none] } catch (Exception e) { return getLevelSync(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void addValueIfNotNull( @Nonnull Map<String, String> parameters, @Nonnull String key, Object value ) { if( value == null ) { return; } parameters.put(key, value.toString()); } }
public class class_name { public static void addValueIfNotNull( @Nonnull Map<String, String> parameters, @Nonnull String key, Object value ) { if( value == null ) { return; // depends on control dependency: [if], data = [none] } parameters.put(key, value.toString()); } }
public class class_name { public void setZoomManager(ZoomManager newManager) { if ( zoomManager != null ) { zoomManager.removeZoomListener( this ); } zoomManager = newManager; if ( zoomManager != null ) { zoomManager.addZoomListener( this ); } } }
public class class_name { public void setZoomManager(ZoomManager newManager) { if ( zoomManager != null ) { zoomManager.removeZoomListener( this ); // depends on control dependency: [if], data = [none] } zoomManager = newManager; if ( zoomManager != null ) { zoomManager.addZoomListener( this ); // depends on control dependency: [if], data = [none] } } }
public class class_name { TopicData getTopicData(Topic topic, String topicName) { TopicData topicData = null; if (topic != null) { topicData = topic.getTopicData(); } if (topicData == null) { topicData = topicDataCache.get(topicName); if (topic != null && topicData != null) { topic.setTopicDataReference(topicData.getReference()); } } if (topicData == null) { synchronized (this) { topicData = buildTopicData(topicName); if (topic != null) { topic.setTopicDataReference(topicData.getReference()); } } } return topicData; } }
public class class_name { TopicData getTopicData(Topic topic, String topicName) { TopicData topicData = null; if (topic != null) { topicData = topic.getTopicData(); // depends on control dependency: [if], data = [none] } if (topicData == null) { topicData = topicDataCache.get(topicName); // depends on control dependency: [if], data = [none] if (topic != null && topicData != null) { topic.setTopicDataReference(topicData.getReference()); // depends on control dependency: [if], data = [(topic] } } if (topicData == null) { synchronized (this) { // depends on control dependency: [if], data = [none] topicData = buildTopicData(topicName); if (topic != null) { topic.setTopicDataReference(topicData.getReference()); // depends on control dependency: [if], data = [(topic] } } } return topicData; } }
public class class_name { public static <T> T getFirst(Collection<T> collection) { if (isEmpty(collection)) { return null; } if (collection instanceof List) { return ((List<T>) collection).get(0); } return collection.iterator().next(); } }
public class class_name { public static <T> T getFirst(Collection<T> collection) { if (isEmpty(collection)) { return null; // depends on control dependency: [if], data = [none] } if (collection instanceof List) { return ((List<T>) collection).get(0); // depends on control dependency: [if], data = [none] } return collection.iterator().next(); } }
public class class_name { public static void initDelegatingConfigs(ModuleConfig moduleConfig, ServletContext servletContext) { ActionConfig[] actionConfigs = moduleConfig.findActionConfigs(); // Initialize action configs. for (int i = 0; i < actionConfigs.length; i++) { ActionConfig actionConfig = actionConfigs[i]; if (actionConfig instanceof DelegatingActionMapping) { ((DelegatingActionMapping) actionConfig).init(servletContext); } else { // Initialize action-level exception configs. ExceptionConfig[] exceptionConfigs = actionConfig.findExceptionConfigs(); for (int j = 0; j < exceptionConfigs.length; j++) { ExceptionConfig exceptionConfig = exceptionConfigs[j]; if (exceptionConfig instanceof DelegatingExceptionConfig) { ((DelegatingExceptionConfig) exceptionConfig).init(servletContext); } } } } // Initialize module-level exception configs. ExceptionConfig[] exceptionConfigs = moduleConfig.findExceptionConfigs(); for (int i = 0; i < exceptionConfigs.length; i++) { ExceptionConfig exceptionConfig = exceptionConfigs[i]; if (exceptionConfig instanceof DelegatingExceptionConfig) { ((DelegatingExceptionConfig) exceptionConfig).init(servletContext); } } } }
public class class_name { public static void initDelegatingConfigs(ModuleConfig moduleConfig, ServletContext servletContext) { ActionConfig[] actionConfigs = moduleConfig.findActionConfigs(); // Initialize action configs. for (int i = 0; i < actionConfigs.length; i++) { ActionConfig actionConfig = actionConfigs[i]; if (actionConfig instanceof DelegatingActionMapping) { ((DelegatingActionMapping) actionConfig).init(servletContext); // depends on control dependency: [if], data = [none] } else { // Initialize action-level exception configs. ExceptionConfig[] exceptionConfigs = actionConfig.findExceptionConfigs(); for (int j = 0; j < exceptionConfigs.length; j++) { ExceptionConfig exceptionConfig = exceptionConfigs[j]; if (exceptionConfig instanceof DelegatingExceptionConfig) { ((DelegatingExceptionConfig) exceptionConfig).init(servletContext); // depends on control dependency: [if], data = [none] } } } } // Initialize module-level exception configs. ExceptionConfig[] exceptionConfigs = moduleConfig.findExceptionConfigs(); for (int i = 0; i < exceptionConfigs.length; i++) { ExceptionConfig exceptionConfig = exceptionConfigs[i]; if (exceptionConfig instanceof DelegatingExceptionConfig) { ((DelegatingExceptionConfig) exceptionConfig).init(servletContext); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void defer(ICommandData[] commands) { deferredCommands.addAll(Arrays.asList(commands)); for (ICommandData command : commands) { deferInternal(command); } } }
public class class_name { public void defer(ICommandData[] commands) { deferredCommands.addAll(Arrays.asList(commands)); for (ICommandData command : commands) { deferInternal(command); // depends on control dependency: [for], data = [command] } } }
public class class_name { public void cleanup() throws InconsistentLocalTranException, IllegalStateException, RolledbackException { if (tc.isEntryEnabled()) Tr.entry(tc, "cleanup", this); try { if (_state != Running) { final IllegalStateException ise = new IllegalStateException("Cannot cleanup LocalTransactionContainment. LocalTransactionCoordinator is completing or completed."); FFDCFilter.processException(ise, "com.ibm.tx.ltc.LocalTranCoordImpl.cleanup", "958", this); Tr.error(tc, "ERR_LTC_COMPLETE"); if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup", ise); throw ise; } // On WebSphere for z/OS we need to make sure that the unit // of work we are about to complete is associated with the // current thread of execution. If we're not executing on // the current thread, the work done out of the registered // synchronizations will not be done on the right runtime // context. This method should never fail unless we're // down an activity session path that we haven't coded for. ensureActive(); // Are there any dangling RMLTs? boolean danglers = zosCheckInterests(); if ((_cleanupResources != null) && (!_cleanupResources.isEmpty())) { danglers = true; } // Set the outcome direction in which to resolve any danglers. // If there are no danglers, then this is influenced only by whether we are // set to rollbackOnly. @116975 if (danglers) { _outcomeRollback = (_rollbackOnly || !_unresActionIsCommit); } else { _outcomeRollback = _rollbackOnly; } // Only generate completion performance metrics if the following conditions hold: // 1. The performance metrics recorder is available // 2. At least one resource has been registered with the LTC (otherwise we would // not have made the original started() call and the metrics would be damaged) boolean generateCommitMetrics = false; if (startTime != 0) { generateCommitMetrics=true; perfCommitRequested(); } // // Need to fire synchronizations // if we are commiting the work. // if (!_outcomeRollback) { informSynchronizations(true); } // Defect 122329 // // Change state once synchronizations have been called // to allow enlistment during beforeCompletion. // _state = Completing; // // Need to re-calculate if we have any danglers // as some may have been enlisted during beforeCompletion // danglers = zosCheckInterests(); if ((_cleanupResources != null) && (!_cleanupResources.isEmpty())) { danglers = true; } // APAR PK08578 // // Need to check again for rollback_only since it may be set during // beforeCompletion processing. if (danglers) /* @PK08578A*/ { /*2@PK08578A*/ _outcomeRollback = (_rollbackOnly || !_unresActionIsCommit); } /* @PK08578A*/ else /* @PK08578A*/ { /* @PK08578A*/ _outcomeRollback = _rollbackOnly; /* @PK08578A*/ } /* @PK08578A*/ List<String> failures = null; if ((danglers) && (_cleanupResources != null)) // @D220424C { for (int i = 0; i < _cleanupResources.size(); i++) { OnePhaseXAResource resource = _cleanupResources.get(i); try { if (_outcomeRollback) { if (tc.isDebugEnabled()) Tr.debug(tc, "Calling rollback on resource " + resource); resource.rollback(null); // 130828 add messages for resources rolledback in cleanup Tr.warning(tc, "WRN_RESOURCE_UNRESOLVED_LTC_ROLLEDBACK", resource.getResourceName()); } else { if (tc.isDebugEnabled()) Tr.debug(tc, "Calling commit on resource " + resource); resource.commit(null, true); } } catch (XAException xe) { // // Log any failures. We don't need to throw // as user has already detached. // FFDCFilter.processException(xe, "com.ibm.tx.ltc.LocalTranCoordImpl.cleanup", "755", this); if (failures == null) { failures = new ArrayList<String>(); } failures.add(resource.getResourceName()); Tr.error(tc, "ERR_XA_RESOURCE_COMPLETE", new Object[] {resource.getResourceName(), xe}); } } } final String zosFailure = zosComplete(); if (zosFailure != null) { if (failures == null) { failures = new ArrayList<String>(); } failures.add(zosFailure); } postCleanup(); _state = Completed; // // Need to fire synchronizations even if // there is no work to be done. // informSynchronizations(true); if (generateCommitMetrics) { perfCompleted(0, startTime, !_outcomeRollback); } // // Cleanup all our outstanding objects for // garbage collection. // _cleanupResources = null; _enlistedResources = null; _syncs = null; // Defect 130218.2 // // Clear the ltc from the Thread context // if (_current.getLocalTranCoord() == this) { if (tc.isDebugEnabled()) Tr.debug(tc, "Completed LTC is on thread so set current LTC to null"); _current.setCoordinator(null); } // // Defect 115015 // // Need to throw any exceptions after all Synchronizations // have been informed to ensure any dependant cleanup takes // place. // if (failures != null) { final String[] out = new String[failures.size()]; final InconsistentLocalTranException ilte = new InconsistentLocalTranException("Resource(s) failed to complete.", failures.toArray(out)); if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup", ilte); throw ilte; } // // Defect 116861 // // Throw exception if we rolledback any dangers // if (_outcomeRollback && !_rollbackOnlyFromApplicationCode) { RolledbackException rbe = null; // 130828 vary trace on cause of rollback // Only output a message and throw a RBE if rollback only was set by // the WAS runtime. If the rollback was requested by application code // then the rollback is part of application logic and shouldn't be // treated as an exception. if (_rollbackOnly) { Tr.error(tc, "ERR_XA_RESOURCE_ROLLEDBACK"); rbe = new RolledbackException("Resources rolled back due to setRollbackOnly() being called."); } else { Tr.warning(tc, "WRN_LTC_UNRESOLVED_ROLLEDBACK"); rbe = new RolledbackException("Resources rolled back due to unresolved action of rollback."); } if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup", rbe); throw rbe; } } finally { if(_current != null) { if (tc.isDebugEnabled()) Tr.debug(tc, "Drive invokeEventListener processing"); _current.invokeEventListener(this, UOWEventListener.POST_END, null); } else { if (tc.isDebugEnabled()) Tr.debug(tc, "current is null"); } } if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup"); } }
public class class_name { public void cleanup() throws InconsistentLocalTranException, IllegalStateException, RolledbackException { if (tc.isEntryEnabled()) Tr.entry(tc, "cleanup", this); try { if (_state != Running) { final IllegalStateException ise = new IllegalStateException("Cannot cleanup LocalTransactionContainment. LocalTransactionCoordinator is completing or completed."); FFDCFilter.processException(ise, "com.ibm.tx.ltc.LocalTranCoordImpl.cleanup", "958", this); // depends on control dependency: [if], data = [none] Tr.error(tc, "ERR_LTC_COMPLETE"); // depends on control dependency: [if], data = [none] if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup", ise); throw ise; } // On WebSphere for z/OS we need to make sure that the unit // of work we are about to complete is associated with the // current thread of execution. If we're not executing on // the current thread, the work done out of the registered // synchronizations will not be done on the right runtime // context. This method should never fail unless we're // down an activity session path that we haven't coded for. ensureActive(); // Are there any dangling RMLTs? boolean danglers = zosCheckInterests(); if ((_cleanupResources != null) && (!_cleanupResources.isEmpty())) { danglers = true; // depends on control dependency: [if], data = [none] } // Set the outcome direction in which to resolve any danglers. // If there are no danglers, then this is influenced only by whether we are // set to rollbackOnly. @116975 if (danglers) { _outcomeRollback = (_rollbackOnly || !_unresActionIsCommit); // depends on control dependency: [if], data = [none] } else { _outcomeRollback = _rollbackOnly; // depends on control dependency: [if], data = [none] } // Only generate completion performance metrics if the following conditions hold: // 1. The performance metrics recorder is available // 2. At least one resource has been registered with the LTC (otherwise we would // not have made the original started() call and the metrics would be damaged) boolean generateCommitMetrics = false; if (startTime != 0) { generateCommitMetrics=true; // depends on control dependency: [if], data = [none] perfCommitRequested(); // depends on control dependency: [if], data = [none] } // // Need to fire synchronizations // if we are commiting the work. // if (!_outcomeRollback) { informSynchronizations(true); // depends on control dependency: [if], data = [none] } // Defect 122329 // // Change state once synchronizations have been called // to allow enlistment during beforeCompletion. // _state = Completing; // // Need to re-calculate if we have any danglers // as some may have been enlisted during beforeCompletion // danglers = zosCheckInterests(); if ((_cleanupResources != null) && (!_cleanupResources.isEmpty())) { danglers = true; // depends on control dependency: [if], data = [none] } // APAR PK08578 // // Need to check again for rollback_only since it may be set during // beforeCompletion processing. if (danglers) /* @PK08578A*/ { /*2@PK08578A*/ _outcomeRollback = (_rollbackOnly || !_unresActionIsCommit); // depends on control dependency: [if], data = [none] } /* @PK08578A*/ else /* @PK08578A*/ { /* @PK08578A*/ _outcomeRollback = _rollbackOnly; /* @PK08578A*/ // depends on control dependency: [if], data = [none] } /* @PK08578A*/ List<String> failures = null; if ((danglers) && (_cleanupResources != null)) // @D220424C { for (int i = 0; i < _cleanupResources.size(); i++) { OnePhaseXAResource resource = _cleanupResources.get(i); try { if (_outcomeRollback) { if (tc.isDebugEnabled()) Tr.debug(tc, "Calling rollback on resource " + resource); resource.rollback(null); // depends on control dependency: [if], data = [none] // 130828 add messages for resources rolledback in cleanup Tr.warning(tc, "WRN_RESOURCE_UNRESOLVED_LTC_ROLLEDBACK", resource.getResourceName()); // depends on control dependency: [if], data = [none] } else { if (tc.isDebugEnabled()) Tr.debug(tc, "Calling commit on resource " + resource); resource.commit(null, true); // depends on control dependency: [if], data = [none] } } catch (XAException xe) { // // Log any failures. We don't need to throw // as user has already detached. // FFDCFilter.processException(xe, "com.ibm.tx.ltc.LocalTranCoordImpl.cleanup", "755", this); if (failures == null) { failures = new ArrayList<String>(); // depends on control dependency: [if], data = [none] } failures.add(resource.getResourceName()); Tr.error(tc, "ERR_XA_RESOURCE_COMPLETE", new Object[] {resource.getResourceName(), xe}); } // depends on control dependency: [catch], data = [none] } } final String zosFailure = zosComplete(); if (zosFailure != null) { if (failures == null) { failures = new ArrayList<String>(); // depends on control dependency: [if], data = [none] } failures.add(zosFailure); // depends on control dependency: [if], data = [(zosFailure] } postCleanup(); _state = Completed; // // Need to fire synchronizations even if // there is no work to be done. // informSynchronizations(true); if (generateCommitMetrics) { perfCompleted(0, startTime, !_outcomeRollback); // depends on control dependency: [if], data = [none] } // // Cleanup all our outstanding objects for // garbage collection. // _cleanupResources = null; _enlistedResources = null; _syncs = null; // Defect 130218.2 // // Clear the ltc from the Thread context // if (_current.getLocalTranCoord() == this) { if (tc.isDebugEnabled()) Tr.debug(tc, "Completed LTC is on thread so set current LTC to null"); _current.setCoordinator(null); // depends on control dependency: [if], data = [none] } // // Defect 115015 // // Need to throw any exceptions after all Synchronizations // have been informed to ensure any dependant cleanup takes // place. // if (failures != null) { final String[] out = new String[failures.size()]; final InconsistentLocalTranException ilte = new InconsistentLocalTranException("Resource(s) failed to complete.", failures.toArray(out)); if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup", ilte); throw ilte; } // // Defect 116861 // // Throw exception if we rolledback any dangers // if (_outcomeRollback && !_rollbackOnlyFromApplicationCode) { RolledbackException rbe = null; // 130828 vary trace on cause of rollback // Only output a message and throw a RBE if rollback only was set by // the WAS runtime. If the rollback was requested by application code // then the rollback is part of application logic and shouldn't be // treated as an exception. if (_rollbackOnly) { Tr.error(tc, "ERR_XA_RESOURCE_ROLLEDBACK"); // depends on control dependency: [if], data = [none] rbe = new RolledbackException("Resources rolled back due to setRollbackOnly() being called."); // depends on control dependency: [if], data = [none] } else { Tr.warning(tc, "WRN_LTC_UNRESOLVED_ROLLEDBACK"); // depends on control dependency: [if], data = [none] rbe = new RolledbackException("Resources rolled back due to unresolved action of rollback."); // depends on control dependency: [if], data = [none] } if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup", rbe); throw rbe; } } finally { if(_current != null) { if (tc.isDebugEnabled()) Tr.debug(tc, "Drive invokeEventListener processing"); _current.invokeEventListener(this, UOWEventListener.POST_END, null); // depends on control dependency: [if], data = [null)] } else { if (tc.isDebugEnabled()) Tr.debug(tc, "current is null"); } } if (tc.isEntryEnabled()) Tr.exit(tc, "cleanup"); } }
public class class_name { public void remove( int index ) { if( (index < 0) || (index >= size) ) { throw new IndexOutOfBoundsException(); } --size; for( int i = index; i < size; ++i ) { array[i] = array[i + 1]; } array[size] = null; } }
public class class_name { public void remove( int index ) { if( (index < 0) || (index >= size) ) { throw new IndexOutOfBoundsException(); } --size; for( int i = index; i < size; ++i ) { array[i] = array[i + 1]; // depends on control dependency: [for], data = [i] } array[size] = null; } }
public class class_name { private <T> Option<T> getOption(String parameter) { try { Field field = UndertowOptions.class.getDeclaredField(parameter); if (Option.class.getName().equals(field.getType().getTypeName())) { Object value = field.get(null); return (Option<T>) value; } } catch (Exception e) { log.debug("getting Option type for parameter {} failed with {}", parameter, e); } return null; } }
public class class_name { private <T> Option<T> getOption(String parameter) { try { Field field = UndertowOptions.class.getDeclaredField(parameter); if (Option.class.getName().equals(field.getType().getTypeName())) { Object value = field.get(null); return (Option<T>) value; // depends on control dependency: [if], data = [none] } } catch (Exception e) { log.debug("getting Option type for parameter {} failed with {}", parameter, e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { private void updateLookAndFeel(String lookAndFeelClassName) { setLookAndFeel(lookAndFeelClassName); MultimediaContainerManager.updateLookAndFeel(); SwingUtilities.updateComponentTreeUI(this); pack(); for (Window window : windows) { SwingUtilities.updateComponentTreeUI(window); window.pack(); } } }
public class class_name { private void updateLookAndFeel(String lookAndFeelClassName) { setLookAndFeel(lookAndFeelClassName); MultimediaContainerManager.updateLookAndFeel(); SwingUtilities.updateComponentTreeUI(this); pack(); for (Window window : windows) { SwingUtilities.updateComponentTreeUI(window); window.pack(); // depends on control dependency: [for], data = [window] // depends on control dependency: [for], data = [window] } } }
public class class_name { private OAuthHmacCredential new10aCredential(String userId) { ClientParametersAuthentication clientAuthentication = (ClientParametersAuthentication) getClientAuthentication(); OAuthHmacCredential.Builder builder = new OAuthHmacCredential.Builder(getMethod(), clientAuthentication.getClientId(), clientAuthentication.getClientSecret()) .setTransport(getTransport()) .setJsonFactory(getJsonFactory()) .setTokenServerEncodedUrl(getTokenServerEncodedUrl()) .setClientAuthentication(getClientAuthentication()) .setRequestInitializer(getRequestInitializer()) .setClock(getClock()); if (getCredentialStore() != null) { builder.addRefreshListener( new CredentialStoreRefreshListener(userId, getCredentialStore())); } builder.getRefreshListeners().addAll(getRefreshListeners()); return builder.build(); } }
public class class_name { private OAuthHmacCredential new10aCredential(String userId) { ClientParametersAuthentication clientAuthentication = (ClientParametersAuthentication) getClientAuthentication(); OAuthHmacCredential.Builder builder = new OAuthHmacCredential.Builder(getMethod(), clientAuthentication.getClientId(), clientAuthentication.getClientSecret()) .setTransport(getTransport()) .setJsonFactory(getJsonFactory()) .setTokenServerEncodedUrl(getTokenServerEncodedUrl()) .setClientAuthentication(getClientAuthentication()) .setRequestInitializer(getRequestInitializer()) .setClock(getClock()); if (getCredentialStore() != null) { builder.addRefreshListener( new CredentialStoreRefreshListener(userId, getCredentialStore())); // depends on control dependency: [if], data = [none] } builder.getRefreshListeners().addAll(getRefreshListeners()); return builder.build(); } }
public class class_name { private boolean replaceValues(Locale locale) { try { SortedProperties localization = getLocalization(locale); if (hasDescriptor()) { for (Object itemId : m_container.getItemIds()) { Item item = m_container.getItem(itemId); String key = item.getItemProperty(TableProperty.KEY).getValue().toString(); Object value = localization.get(key); item.getItemProperty(TableProperty.TRANSLATION).setValue(null == value ? "" : value); } } else { m_container.removeAllItems(); Set<Object> keyset = m_keyset.getKeySet(); for (Object key : keyset) { Object itemId = m_container.addItem(); Item item = m_container.getItem(itemId); item.getItemProperty(TableProperty.KEY).setValue(key); Object value = localization.get(key); item.getItemProperty(TableProperty.TRANSLATION).setValue(null == value ? "" : value); } if (m_container.getItemIds().isEmpty()) { m_container.addItem(); } } return true; } catch (IOException | CmsException e) { // The problem should typically be a problem with locking or reading the file containing the translation. // This should be reported in the editor, if false is returned here. return false; } } }
public class class_name { private boolean replaceValues(Locale locale) { try { SortedProperties localization = getLocalization(locale); if (hasDescriptor()) { for (Object itemId : m_container.getItemIds()) { Item item = m_container.getItem(itemId); String key = item.getItemProperty(TableProperty.KEY).getValue().toString(); Object value = localization.get(key); item.getItemProperty(TableProperty.TRANSLATION).setValue(null == value ? "" : value); // depends on control dependency: [for], data = [none] } } else { m_container.removeAllItems(); // depends on control dependency: [if], data = [none] Set<Object> keyset = m_keyset.getKeySet(); for (Object key : keyset) { Object itemId = m_container.addItem(); Item item = m_container.getItem(itemId); item.getItemProperty(TableProperty.KEY).setValue(key); // depends on control dependency: [for], data = [key] Object value = localization.get(key); item.getItemProperty(TableProperty.TRANSLATION).setValue(null == value ? "" : value); // depends on control dependency: [for], data = [none] } if (m_container.getItemIds().isEmpty()) { m_container.addItem(); // depends on control dependency: [if], data = [none] } } return true; // depends on control dependency: [try], data = [none] } catch (IOException | CmsException e) { // The problem should typically be a problem with locking or reading the file containing the translation. // This should be reported in the editor, if false is returned here. return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static String generateTab(int depth) { StringBuilder builder = new StringBuilder(); if (depth != 0) { builder.append(" "); } for (int i = 1; i < depth; i++) { builder.append("| "); } return builder.append("+--").toString(); } }
public class class_name { private static String generateTab(int depth) { StringBuilder builder = new StringBuilder(); if (depth != 0) { builder.append(" "); // depends on control dependency: [if], data = [none] } for (int i = 1; i < depth; i++) { builder.append("| "); // depends on control dependency: [for], data = [none] } return builder.append("+--").toString(); } }
public class class_name { public ManagementGroupEdge getForwardEdge(final int index) { if (index < this.forwardEdges.size()) { return this.forwardEdges.get(index); } return null; } }
public class class_name { public ManagementGroupEdge getForwardEdge(final int index) { if (index < this.forwardEdges.size()) { return this.forwardEdges.get(index); // depends on control dependency: [if], data = [(index] } return null; } }
public class class_name { public static ShortStream of(final Supplier<ShortList> supplier) { final ShortIterator iter = new ShortIteratorEx() { private ShortIterator iterator = null; @Override public boolean hasNext() { if (iterator == null) { init(); } return iterator.hasNext(); } @Override public short nextShort() { if (iterator == null) { init(); } return iterator.nextShort(); } private void init() { final ShortList c = supplier.get(); if (N.isNullOrEmpty(c)) { iterator = ShortIterator.empty(); } else { iterator = c.iterator(); } } }; return of(iter); } }
public class class_name { public static ShortStream of(final Supplier<ShortList> supplier) { final ShortIterator iter = new ShortIteratorEx() { private ShortIterator iterator = null; @Override public boolean hasNext() { if (iterator == null) { init(); // depends on control dependency: [if], data = [none] } return iterator.hasNext(); } @Override public short nextShort() { if (iterator == null) { init(); // depends on control dependency: [if], data = [none] } return iterator.nextShort(); } private void init() { final ShortList c = supplier.get(); if (N.isNullOrEmpty(c)) { iterator = ShortIterator.empty(); // depends on control dependency: [if], data = [none] } else { iterator = c.iterator(); // depends on control dependency: [if], data = [none] } } }; return of(iter); } }
public class class_name { @XmlTransient public void addSqlInput(SQLColumn sqlInputToAddParam){ if(this.sqlInputs == null){ this.sqlInputs = new ArrayList<>(); } if(sqlInputToAddParam == null){ return; } this.sqlInputs.add(sqlInputToAddParam); } }
public class class_name { @XmlTransient public void addSqlInput(SQLColumn sqlInputToAddParam){ if(this.sqlInputs == null){ this.sqlInputs = new ArrayList<>(); // depends on control dependency: [if], data = [none] } if(sqlInputToAddParam == null){ return; // depends on control dependency: [if], data = [none] } this.sqlInputs.add(sqlInputToAddParam); } }