code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { @Override public C.Sequence<T> prepend(C.Sequence<? extends T> seq) { if (seq instanceof C.List) { return prependList((C.List<T>) seq); } if (isLazy()) { return new CompositeSeq<T>(seq, this); } return unLazyPrepend(seq); } }
public class class_name { @Override public C.Sequence<T> prepend(C.Sequence<? extends T> seq) { if (seq instanceof C.List) { return prependList((C.List<T>) seq); // depends on control dependency: [if], data = [none] } if (isLazy()) { return new CompositeSeq<T>(seq, this); // depends on control dependency: [if], data = [none] } return unLazyPrepend(seq); } }
public class class_name { public boolean register(final Object pProvider) { Iterator<Class<?>> categories = compatibleCategories(pProvider); boolean registered = false; while (categories.hasNext()) { Class<?> category = categories.next(); if (registerImpl(pProvider, category) && !registered) { registered = true; } } return registered; } }
public class class_name { public boolean register(final Object pProvider) { Iterator<Class<?>> categories = compatibleCategories(pProvider); boolean registered = false; while (categories.hasNext()) { Class<?> category = categories.next(); if (registerImpl(pProvider, category) && !registered) { registered = true; // depends on control dependency: [if], data = [none] } } return registered; } }
public class class_name { private void fillOsubMatrix( Coordinate[] controlPoints, GeneralMatrix L ) { int controlPointsNum = controlPoints.length; for( int i = controlPointsNum; i < (controlPointsNum + 3); i++ ) { for( int j = controlPointsNum; j < (controlPointsNum + 3); j++ ) { L.setElement(i, j, 0); } } } }
public class class_name { private void fillOsubMatrix( Coordinate[] controlPoints, GeneralMatrix L ) { int controlPointsNum = controlPoints.length; for( int i = controlPointsNum; i < (controlPointsNum + 3); i++ ) { for( int j = controlPointsNum; j < (controlPointsNum + 3); j++ ) { L.setElement(i, j, 0); // depends on control dependency: [for], data = [j] } } } }
public class class_name { public void marshall(ListStreamProcessorsRequest listStreamProcessorsRequest, ProtocolMarshaller protocolMarshaller) { if (listStreamProcessorsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listStreamProcessorsRequest.getNextToken(), NEXTTOKEN_BINDING); protocolMarshaller.marshall(listStreamProcessorsRequest.getMaxResults(), MAXRESULTS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ListStreamProcessorsRequest listStreamProcessorsRequest, ProtocolMarshaller protocolMarshaller) { if (listStreamProcessorsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listStreamProcessorsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listStreamProcessorsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public final <V, C> Flux<T> distinct( Function<? super T, ? extends V> keySelector, Supplier<C> distinctStoreSupplier, BiPredicate<C, V> distinctPredicate, Consumer<C> cleanup) { if (this instanceof Fuseable) { return onAssembly(new FluxDistinctFuseable<>(this, keySelector, distinctStoreSupplier, distinctPredicate, cleanup)); } return onAssembly(new FluxDistinct<>(this, keySelector, distinctStoreSupplier, distinctPredicate, cleanup)); } }
public class class_name { public final <V, C> Flux<T> distinct( Function<? super T, ? extends V> keySelector, Supplier<C> distinctStoreSupplier, BiPredicate<C, V> distinctPredicate, Consumer<C> cleanup) { if (this instanceof Fuseable) { return onAssembly(new FluxDistinctFuseable<>(this, keySelector, distinctStoreSupplier, distinctPredicate, cleanup)); // depends on control dependency: [if], data = [none] } return onAssembly(new FluxDistinct<>(this, keySelector, distinctStoreSupplier, distinctPredicate, cleanup)); } }
public class class_name { public Set<Permission> authorize(AuthenticatedUser user, IResource resource) { if (user.isSuper()) return Permission.ALL; UntypedResultSet result; try { ResultMessage.Rows rows = authorizeStatement.execute(QueryState.forInternalCalls(), QueryOptions.forInternalCalls(ConsistencyLevel.LOCAL_ONE, Lists.newArrayList(ByteBufferUtil.bytes(user.getName()), ByteBufferUtil.bytes(resource.getName())))); result = UntypedResultSet.create(rows.result); } catch (RequestValidationException e) { throw new AssertionError(e); // not supposed to happen } catch (RequestExecutionException e) { logger.warn("CassandraAuthorizer failed to authorize {} for {}", user, resource); return Permission.NONE; } if (result.isEmpty() || !result.one().has(PERMISSIONS)) return Permission.NONE; Set<Permission> permissions = EnumSet.noneOf(Permission.class); for (String perm : result.one().getSet(PERMISSIONS, UTF8Type.instance)) permissions.add(Permission.valueOf(perm)); return permissions; } }
public class class_name { public Set<Permission> authorize(AuthenticatedUser user, IResource resource) { if (user.isSuper()) return Permission.ALL; UntypedResultSet result; try { ResultMessage.Rows rows = authorizeStatement.execute(QueryState.forInternalCalls(), QueryOptions.forInternalCalls(ConsistencyLevel.LOCAL_ONE, Lists.newArrayList(ByteBufferUtil.bytes(user.getName()), ByteBufferUtil.bytes(resource.getName())))); result = UntypedResultSet.create(rows.result); // depends on control dependency: [try], data = [none] } catch (RequestValidationException e) { throw new AssertionError(e); // not supposed to happen } // depends on control dependency: [catch], data = [none] catch (RequestExecutionException e) { logger.warn("CassandraAuthorizer failed to authorize {} for {}", user, resource); return Permission.NONE; } // depends on control dependency: [catch], data = [none] if (result.isEmpty() || !result.one().has(PERMISSIONS)) return Permission.NONE; Set<Permission> permissions = EnumSet.noneOf(Permission.class); for (String perm : result.one().getSet(PERMISSIONS, UTF8Type.instance)) permissions.add(Permission.valueOf(perm)); return permissions; } }
public class class_name { synchronized static TraceSpecification setTraceSpec(String spec) { // If logger is used & configured by logger properties, // we're done as far as trace string processing is concerned if (WsLogManager.isConfiguredByLoggingProperties()) { return null; } // If the specified string is null, or it is equal to a string, // or the sensitive flag has not been toggled, // we've already parsed, skip it. if ((spec == null || spec.equals(traceString)) && Tr.activeTraceSpec.isSensitiveTraceSuppressed() == suppressSensitiveTrace) { return null; } traceString = spec; // Parse the trace specification string, this will gather // exceptions that occur for different elements of the string TraceSpecification newTs = new TraceSpecification(spec, safeLevelsIndex, suppressSensitiveTrace); TraceSpecificationException tex = newTs.getExceptions(); if (tex != null) { do { tex.warning(loggingConfig.get() != null); tex = tex.getPreviousException(); } while (tex != null); } Tr.setTraceSpec(newTs); // Return the new/updated TraceSpecification to the caller. The caller can // then determine whether or not all elements of the TraceSpecification // were known to the system or not. return newTs; } }
public class class_name { synchronized static TraceSpecification setTraceSpec(String spec) { // If logger is used & configured by logger properties, // we're done as far as trace string processing is concerned if (WsLogManager.isConfiguredByLoggingProperties()) { return null; // depends on control dependency: [if], data = [none] } // If the specified string is null, or it is equal to a string, // or the sensitive flag has not been toggled, // we've already parsed, skip it. if ((spec == null || spec.equals(traceString)) && Tr.activeTraceSpec.isSensitiveTraceSuppressed() == suppressSensitiveTrace) { return null; // depends on control dependency: [if], data = [none] } traceString = spec; // Parse the trace specification string, this will gather // exceptions that occur for different elements of the string TraceSpecification newTs = new TraceSpecification(spec, safeLevelsIndex, suppressSensitiveTrace); TraceSpecificationException tex = newTs.getExceptions(); if (tex != null) { do { tex.warning(loggingConfig.get() != null); tex = tex.getPreviousException(); } while (tex != null); } Tr.setTraceSpec(newTs); // Return the new/updated TraceSpecification to the caller. The caller can // then determine whether or not all elements of the TraceSpecification // were known to the system or not. return newTs; } }
public class class_name { public Observable<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>> listKeysNextWithServiceResponseAsync(final String nextPageLink) { return listKeysNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>, Observable<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>>>() { @Override public Observable<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>> call(ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listKeysNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public Observable<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>> listKeysNextWithServiceResponseAsync(final String nextPageLink) { return listKeysNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>, Observable<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>>>() { @Override public Observable<ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>>> call(ServiceResponse<Page<SharedAccessSignatureAuthorizationRuleInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); // depends on control dependency: [if], data = [none] } return Observable.just(page).concatWith(listKeysNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public Optional<ImmutableSubstitution<ImmutableTerm>> computeUnidirectionalSubstitution(ImmutableTerm sourceTerm, ImmutableTerm targetTerm) { /* * Variable */ if (sourceTerm instanceof Variable) { Variable sourceVariable = (Variable) sourceTerm; // Constraint if ((!sourceVariable.equals(targetTerm)) && (targetTerm instanceof ImmutableFunctionalTerm) && ((ImmutableFunctionalTerm)targetTerm).getVariables().contains(sourceVariable)) { return Optional.empty(); } ImmutableSubstitution<ImmutableTerm> substitution = substitutionFactory.getSubstitution( ImmutableMap.of(sourceVariable, targetTerm)); return Optional.of(substitution); } /* * Functional term */ else if (sourceTerm instanceof ImmutableFunctionalTerm) { if (targetTerm instanceof ImmutableFunctionalTerm) { return computeUnidirectionalSubstitutionOfFunctionalTerms((ImmutableFunctionalTerm) sourceTerm, (ImmutableFunctionalTerm) targetTerm); } else { return Optional.empty(); } } /* * Constant */ else if(sourceTerm.equals(targetTerm)) { return Optional.of(substitutionFactory.getSubstitution()); } else { return Optional.empty(); } } }
public class class_name { public Optional<ImmutableSubstitution<ImmutableTerm>> computeUnidirectionalSubstitution(ImmutableTerm sourceTerm, ImmutableTerm targetTerm) { /* * Variable */ if (sourceTerm instanceof Variable) { Variable sourceVariable = (Variable) sourceTerm; // Constraint if ((!sourceVariable.equals(targetTerm)) && (targetTerm instanceof ImmutableFunctionalTerm) && ((ImmutableFunctionalTerm)targetTerm).getVariables().contains(sourceVariable)) { return Optional.empty(); // depends on control dependency: [if], data = [none] } ImmutableSubstitution<ImmutableTerm> substitution = substitutionFactory.getSubstitution( ImmutableMap.of(sourceVariable, targetTerm)); return Optional.of(substitution); // depends on control dependency: [if], data = [none] } /* * Functional term */ else if (sourceTerm instanceof ImmutableFunctionalTerm) { if (targetTerm instanceof ImmutableFunctionalTerm) { return computeUnidirectionalSubstitutionOfFunctionalTerms((ImmutableFunctionalTerm) sourceTerm, (ImmutableFunctionalTerm) targetTerm); // depends on control dependency: [if], data = [none] } else { return Optional.empty(); // depends on control dependency: [if], data = [none] } } /* * Constant */ else if(sourceTerm.equals(targetTerm)) { return Optional.of(substitutionFactory.getSubstitution()); // depends on control dependency: [if], data = [none] } else { return Optional.empty(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private Connector findSipConnector(String ipAddress, int port, String transport) { Connector connectorToRemove = null; for (Connector connector : connectors) { final ProtocolHandler protocolHandler = connector.getProtocolHandler(); if(protocolHandler instanceof SipProtocolHandler) { final SipProtocolHandler sipProtocolHandler = (SipProtocolHandler) protocolHandler; if(sipProtocolHandler.getIpAddress().equals(ipAddress) && sipProtocolHandler.getPort() == port && sipProtocolHandler.getSignalingTransport().equalsIgnoreCase(transport)) { // connector.destroy(); connectorToRemove = connector; break; } } } return connectorToRemove; } }
public class class_name { private Connector findSipConnector(String ipAddress, int port, String transport) { Connector connectorToRemove = null; for (Connector connector : connectors) { final ProtocolHandler protocolHandler = connector.getProtocolHandler(); if(protocolHandler instanceof SipProtocolHandler) { final SipProtocolHandler sipProtocolHandler = (SipProtocolHandler) protocolHandler; if(sipProtocolHandler.getIpAddress().equals(ipAddress) && sipProtocolHandler.getPort() == port && sipProtocolHandler.getSignalingTransport().equalsIgnoreCase(transport)) { // connector.destroy(); connectorToRemove = connector; // depends on control dependency: [if], data = [none] break; } } } return connectorToRemove; } }
public class class_name { boolean doScoring(Frame fTrain, Frame fValid, Key<Job> jobKey, int iteration, boolean finalScoring) { final long now = System.currentTimeMillis(); final double time_since_last_iter = now - _timeLastIterationEnter; updateTiming(jobKey); _timeLastIterationEnter = now; epoch_counter = (double)model_info().get_processed_total()/training_rows; boolean keep_running; // Auto-tuning // if multi-node and auto-tuning and at least 10 ms for communication and per-iteration overhead (to avoid doing thins on multi-JVM on same node), // then adjust the auto-tuning parameter 'actual_train_samples_per_iteration' such that the targeted ratio of comm to comp is achieved if (get_params()._train_samples_per_iteration == -2 && iteration > 1) { Log.debug("Auto-tuning train_samples_per_iteration."); if (time_for_iteration_overhead_ms > 10) { Log.debug(" Time taken for per-iteration comm overhead: " + PrettyPrint.msecs(time_for_iteration_overhead_ms, true)); Log.debug(" Time taken for Map/Reduce iteration: " + PrettyPrint.msecs((long) time_since_last_iter, true)); final double comm_to_work_ratio = time_for_iteration_overhead_ms / time_since_last_iter; Log.debug(" Ratio of per-iteration comm overhead to computation: " + String.format("%.5f", comm_to_work_ratio)); Log.debug(" target_comm_to_work: " + get_params()._target_ratio_comm_to_comp); Log.debug("Old value of train_samples_per_iteration: " + actual_train_samples_per_iteration); double correction = get_params()._target_ratio_comm_to_comp / comm_to_work_ratio; correction = Math.max(0.5,Math.min(2, correction)); //it's ok to train up to 2x more training rows per iteration, but not fewer than half. if (Math.abs(correction) < 0.8 || Math.abs(correction) > 1.2) { //don't correct unless it's significant (avoid slow drift) actual_train_samples_per_iteration /= correction; actual_train_samples_per_iteration = Math.max(1, actual_train_samples_per_iteration); Log.debug("New value of train_samples_per_iteration: " + actual_train_samples_per_iteration); } else { Log.debug("Keeping value of train_samples_per_iteration the same (would deviate too little from previous value): " + actual_train_samples_per_iteration); } } else { Log.debug("Iteration overhead is faster than 10 ms. Not modifying train_samples_per_iteration: " + actual_train_samples_per_iteration); } } keep_running = (epoch_counter < get_params()._epochs) && !stopped_early; final long sinceLastScore = now -_timeLastScoreStart; // this is potentially slow - only do every so often if( !keep_running || get_params()._score_each_iteration || (sinceLastScore > get_params()._score_interval *1000 //don't score too often &&(double)(_timeLastScoreEnd-_timeLastScoreStart)/sinceLastScore < get_params()._score_duty_cycle) ) { //duty cycle Log.info(logNvidiaStats()); jobKey.get().update(0,"Scoring on " + fTrain.numRows() + " training samples" +(fValid != null ? (", " + fValid.numRows() + " validation samples") : "")); final boolean printme = !get_params()._quiet_mode; _timeLastScoreStart = System.currentTimeMillis(); DeepWaterScoringInfo scoringInfo = new DeepWaterScoringInfo(); scoringInfo.time_stamp_ms = _timeLastScoreStart; updateTiming(jobKey); scoringInfo.total_training_time_ms = total_training_time_ms; scoringInfo.total_scoring_time_ms = total_scoring_time_ms; scoringInfo.total_setup_time_ms = total_setup_time_ms; scoringInfo.epoch_counter = epoch_counter; scoringInfo.iterations = iterations; scoringInfo.training_samples = (double)model_info().get_processed_total(); scoringInfo.validation = fValid != null; scoringInfo.score_training_samples = fTrain.numRows(); scoringInfo.score_validation_samples = get_params()._score_validation_samples; scoringInfo.is_classification = _output.isClassifier(); scoringInfo.is_autoencoder = _output.isAutoencoder(); if (printme) Log.info("Scoring the model."); // compute errors final String m = model_info().toString(); if (m.length() > 0) Log.info(m); // For GainsLift and Huber, we need the full predictions to compute the model metrics boolean needPreds = _output.nclasses() == 2 /* gains/lift table requires predictions */ || get_params()._distribution==DistributionFamily.huber; // Scoring on training data ModelMetrics mtrain; Frame preds = null; if (needPreds) { // allocate predictions since they are needed preds = score(fTrain); mtrain = ModelMetrics.getFromDKV(this, fTrain); } else { // no need to allocate predictions ModelMetrics.MetricBuilder mb = scoreMetrics(fTrain); mtrain = mb.makeModelMetrics(this,fTrain,fTrain,null); } if (preds!=null) preds.remove(); _output._training_metrics = mtrain; scoringInfo.scored_train = new ScoreKeeper(mtrain); ModelMetricsSupervised mm1 = (ModelMetricsSupervised)mtrain; if (mm1 instanceof ModelMetricsBinomial) { ModelMetricsBinomial mm = (ModelMetricsBinomial)(mm1); scoringInfo.training_AUC = mm._auc; } if (fTrain.numRows() != training_rows) { _output._training_metrics._description = "Metrics reported on temporary training frame with " + fTrain.numRows() + " samples"; } else if (fTrain._key != null && fTrain._key.toString().contains("chunks")){ _output._training_metrics._description = "Metrics reported on temporary (load-balanced) training frame"; } else { _output._training_metrics._description = "Metrics reported on full training frame"; } // Scoring on validation data ModelMetrics mvalid; if (fValid != null) { preds = null; if (needPreds) { // allocate predictions since they are needed preds = score(fValid); mvalid = ModelMetrics.getFromDKV(this, fValid); } else { // no need to allocate predictions ModelMetrics.MetricBuilder mb = scoreMetrics(fValid); mvalid = mb.makeModelMetrics(this, fValid, fValid,null); } if (preds!=null) preds.remove(); _output._validation_metrics = mvalid; scoringInfo.scored_valid = new ScoreKeeper(mvalid); if (mvalid != null) { if (mvalid instanceof ModelMetricsBinomial) { ModelMetricsBinomial mm = (ModelMetricsBinomial) mvalid; scoringInfo.validation_AUC = mm._auc; } if (fValid.numRows() != validation_rows) { _output._validation_metrics._description = "Metrics reported on temporary validation frame with " + fValid.numRows() + " samples"; } else if (fValid._key != null && fValid._key.toString().contains("chunks")){ _output._validation_metrics._description = "Metrics reported on temporary (load-balanced) validation frame"; } else { _output._validation_metrics._description = "Metrics reported on full validation frame"; } } } // if (get_params()._variable_importances) { // if (!get_params()._quiet_mode) Log.info("Computing variable importances."); // throw H2O.unimpl(); // final float[] vi = model_info().computeVariableImportances(); // scoringInfo.variable_importances = new VarImp(vi, Arrays.copyOfRange(model_info().data_info().coefNames(), 0, vi.length)); // } _timeLastScoreEnd = System.currentTimeMillis(); long scoringTime = _timeLastScoreEnd - _timeLastScoreStart; total_scoring_time_ms += scoringTime; updateTiming(jobKey); // update the scoringInfo object to report proper speed scoringInfo.total_training_time_ms = total_training_time_ms; scoringInfo.total_scoring_time_ms = total_scoring_time_ms; scoringInfo.this_scoring_time_ms = scoringTime; // enlarge the error array by one, push latest score back if (this.scoringInfo == null) { this.scoringInfo = new DeepWaterScoringInfo[]{scoringInfo}; } else { DeepWaterScoringInfo[] err2 = new DeepWaterScoringInfo[this.scoringInfo.length + 1]; System.arraycopy(this.scoringInfo, 0, err2, 0, this.scoringInfo.length); err2[err2.length - 1] = scoringInfo; this.scoringInfo = err2; } _output.errors = last_scored(); _output._scoring_history = DeepWaterScoringInfo.createScoringHistoryTable(this.scoringInfo, (null != get_params()._valid), false, _output.getModelCategory(), _output.isAutoencoder()); _output._variable_importances = calcVarImp(last_scored().variable_importances); _output._model_summary = model_info.createSummaryTable(); // always keep a copy of the best model so far (based on the following criterion) if (!finalScoring) { if (actual_best_model_key != null && get_params()._overwrite_with_best_model && ( // if we have a best_model in DKV, then compare against its error() (unless it's a different model as judged by the network size) (DKV.get(actual_best_model_key) != null && !(loss() >= DKV.get(actual_best_model_key).<DeepWaterModel>get().loss() ) ) || // otherwise, compare against our own _bestError (DKV.get(actual_best_model_key) == null && loss() < _bestLoss) ) ) { _bestLoss = loss(); model_info.nativeToJava(); putMeAsBestModel(actual_best_model_key); } // print the freshly scored model to ASCII if (keep_running && printme) Log.info(toString()); if (ScoreKeeper.stopEarly(ScoringInfo.scoreKeepers(scoring_history()), get_params()._stopping_rounds, _output.isClassifier(), get_params()._stopping_metric, get_params()._stopping_tolerance, "model's last", true )) { Log.info("Convergence detected based on simple moving average of the loss function for the past " + get_params()._stopping_rounds + " scoring events. Model building completed."); stopped_early = true; } if (printme) Log.info("Time taken for scoring and diagnostics: " + PrettyPrint.msecs(scoringInfo.this_scoring_time_ms, true)); } } if (stopped_early) { // pretend as if we finished all epochs to get the progress bar pretty (especially for N-fold and grid-search) ((Job) DKV.getGet(jobKey)).update((long) (get_params()._epochs * training_rows)); update(jobKey); return false; } progressUpdate(jobKey, keep_running); //update(jobKey); return keep_running; } }
public class class_name { boolean doScoring(Frame fTrain, Frame fValid, Key<Job> jobKey, int iteration, boolean finalScoring) { final long now = System.currentTimeMillis(); final double time_since_last_iter = now - _timeLastIterationEnter; updateTiming(jobKey); _timeLastIterationEnter = now; epoch_counter = (double)model_info().get_processed_total()/training_rows; boolean keep_running; // Auto-tuning // if multi-node and auto-tuning and at least 10 ms for communication and per-iteration overhead (to avoid doing thins on multi-JVM on same node), // then adjust the auto-tuning parameter 'actual_train_samples_per_iteration' such that the targeted ratio of comm to comp is achieved if (get_params()._train_samples_per_iteration == -2 && iteration > 1) { Log.debug("Auto-tuning train_samples_per_iteration."); // depends on control dependency: [if], data = [none] if (time_for_iteration_overhead_ms > 10) { Log.debug(" Time taken for per-iteration comm overhead: " + PrettyPrint.msecs(time_for_iteration_overhead_ms, true)); // depends on control dependency: [if], data = [(time_for_iteration_overhead_ms] Log.debug(" Time taken for Map/Reduce iteration: " + PrettyPrint.msecs((long) time_since_last_iter, true)); // depends on control dependency: [if], data = [none] final double comm_to_work_ratio = time_for_iteration_overhead_ms / time_since_last_iter; Log.debug(" Ratio of per-iteration comm overhead to computation: " + String.format("%.5f", comm_to_work_ratio)); // depends on control dependency: [if], data = [none] Log.debug(" target_comm_to_work: " + get_params()._target_ratio_comm_to_comp); // depends on control dependency: [if], data = [none] Log.debug("Old value of train_samples_per_iteration: " + actual_train_samples_per_iteration); // depends on control dependency: [if], data = [none] double correction = get_params()._target_ratio_comm_to_comp / comm_to_work_ratio; correction = Math.max(0.5,Math.min(2, correction)); //it's ok to train up to 2x more training rows per iteration, but not fewer than half. // depends on control dependency: [if], data = [none] if (Math.abs(correction) < 0.8 || Math.abs(correction) > 1.2) { //don't correct unless it's significant (avoid slow drift) actual_train_samples_per_iteration /= correction; // depends on control dependency: [if], data = [none] actual_train_samples_per_iteration = Math.max(1, actual_train_samples_per_iteration); // depends on control dependency: [if], data = [none] Log.debug("New value of train_samples_per_iteration: " + actual_train_samples_per_iteration); // depends on control dependency: [if], data = [none] } else { Log.debug("Keeping value of train_samples_per_iteration the same (would deviate too little from previous value): " + actual_train_samples_per_iteration); // depends on control dependency: [if], data = [none] } } else { Log.debug("Iteration overhead is faster than 10 ms. Not modifying train_samples_per_iteration: " + actual_train_samples_per_iteration); // depends on control dependency: [if], data = [none] } } keep_running = (epoch_counter < get_params()._epochs) && !stopped_early; final long sinceLastScore = now -_timeLastScoreStart; // this is potentially slow - only do every so often if( !keep_running || get_params()._score_each_iteration || (sinceLastScore > get_params()._score_interval *1000 //don't score too often &&(double)(_timeLastScoreEnd-_timeLastScoreStart)/sinceLastScore < get_params()._score_duty_cycle) ) { //duty cycle Log.info(logNvidiaStats()); // depends on control dependency: [if], data = [none] jobKey.get().update(0,"Scoring on " + fTrain.numRows() + " training samples" +(fValid != null ? (", " + fValid.numRows() + " validation samples") : "")); // depends on control dependency: [if], data = [none] final boolean printme = !get_params()._quiet_mode; _timeLastScoreStart = System.currentTimeMillis(); // depends on control dependency: [if], data = [none] DeepWaterScoringInfo scoringInfo = new DeepWaterScoringInfo(); scoringInfo.time_stamp_ms = _timeLastScoreStart; // depends on control dependency: [if], data = [none] updateTiming(jobKey); // depends on control dependency: [if], data = [none] scoringInfo.total_training_time_ms = total_training_time_ms; // depends on control dependency: [if], data = [none] scoringInfo.total_scoring_time_ms = total_scoring_time_ms; // depends on control dependency: [if], data = [none] scoringInfo.total_setup_time_ms = total_setup_time_ms; // depends on control dependency: [if], data = [none] scoringInfo.epoch_counter = epoch_counter; // depends on control dependency: [if], data = [none] scoringInfo.iterations = iterations; // depends on control dependency: [if], data = [none] scoringInfo.training_samples = (double)model_info().get_processed_total(); // depends on control dependency: [if], data = [none] scoringInfo.validation = fValid != null; // depends on control dependency: [if], data = [none] scoringInfo.score_training_samples = fTrain.numRows(); // depends on control dependency: [if], data = [none] scoringInfo.score_validation_samples = get_params()._score_validation_samples; // depends on control dependency: [if], data = [none] scoringInfo.is_classification = _output.isClassifier(); // depends on control dependency: [if], data = [none] scoringInfo.is_autoencoder = _output.isAutoencoder(); // depends on control dependency: [if], data = [none] if (printme) Log.info("Scoring the model."); // compute errors final String m = model_info().toString(); if (m.length() > 0) Log.info(m); // For GainsLift and Huber, we need the full predictions to compute the model metrics boolean needPreds = _output.nclasses() == 2 /* gains/lift table requires predictions */ || get_params()._distribution==DistributionFamily.huber; // Scoring on training data ModelMetrics mtrain; Frame preds = null; if (needPreds) { // allocate predictions since they are needed preds = score(fTrain); // depends on control dependency: [if], data = [none] mtrain = ModelMetrics.getFromDKV(this, fTrain); // depends on control dependency: [if], data = [none] } else { // no need to allocate predictions ModelMetrics.MetricBuilder mb = scoreMetrics(fTrain); mtrain = mb.makeModelMetrics(this,fTrain,fTrain,null); // depends on control dependency: [if], data = [none] } if (preds!=null) preds.remove(); _output._training_metrics = mtrain; // depends on control dependency: [if], data = [none] scoringInfo.scored_train = new ScoreKeeper(mtrain); // depends on control dependency: [if], data = [none] ModelMetricsSupervised mm1 = (ModelMetricsSupervised)mtrain; if (mm1 instanceof ModelMetricsBinomial) { ModelMetricsBinomial mm = (ModelMetricsBinomial)(mm1); scoringInfo.training_AUC = mm._auc; // depends on control dependency: [if], data = [none] } if (fTrain.numRows() != training_rows) { _output._training_metrics._description = "Metrics reported on temporary training frame with " + fTrain.numRows() + " samples"; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else if (fTrain._key != null && fTrain._key.toString().contains("chunks")){ _output._training_metrics._description = "Metrics reported on temporary (load-balanced) training frame"; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { _output._training_metrics._description = "Metrics reported on full training frame"; // depends on control dependency: [if], data = [none] } // Scoring on validation data ModelMetrics mvalid; if (fValid != null) { preds = null; // depends on control dependency: [if], data = [none] if (needPreds) { // allocate predictions since they are needed preds = score(fValid); // depends on control dependency: [if], data = [none] mvalid = ModelMetrics.getFromDKV(this, fValid); // depends on control dependency: [if], data = [none] } else { // no need to allocate predictions ModelMetrics.MetricBuilder mb = scoreMetrics(fValid); mvalid = mb.makeModelMetrics(this, fValid, fValid,null); // depends on control dependency: [if], data = [none] } if (preds!=null) preds.remove(); _output._validation_metrics = mvalid; // depends on control dependency: [if], data = [none] scoringInfo.scored_valid = new ScoreKeeper(mvalid); // depends on control dependency: [if], data = [none] if (mvalid != null) { if (mvalid instanceof ModelMetricsBinomial) { ModelMetricsBinomial mm = (ModelMetricsBinomial) mvalid; scoringInfo.validation_AUC = mm._auc; // depends on control dependency: [if], data = [none] } if (fValid.numRows() != validation_rows) { _output._validation_metrics._description = "Metrics reported on temporary validation frame with " + fValid.numRows() + " samples"; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else if (fValid._key != null && fValid._key.toString().contains("chunks")){ _output._validation_metrics._description = "Metrics reported on temporary (load-balanced) validation frame"; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { _output._validation_metrics._description = "Metrics reported on full validation frame"; // depends on control dependency: [if], data = [none] } } } // if (get_params()._variable_importances) { // if (!get_params()._quiet_mode) Log.info("Computing variable importances."); // throw H2O.unimpl(); // final float[] vi = model_info().computeVariableImportances(); // scoringInfo.variable_importances = new VarImp(vi, Arrays.copyOfRange(model_info().data_info().coefNames(), 0, vi.length)); // } _timeLastScoreEnd = System.currentTimeMillis(); // depends on control dependency: [if], data = [none] long scoringTime = _timeLastScoreEnd - _timeLastScoreStart; total_scoring_time_ms += scoringTime; // depends on control dependency: [if], data = [none] updateTiming(jobKey); // depends on control dependency: [if], data = [none] // update the scoringInfo object to report proper speed scoringInfo.total_training_time_ms = total_training_time_ms; // depends on control dependency: [if], data = [none] scoringInfo.total_scoring_time_ms = total_scoring_time_ms; // depends on control dependency: [if], data = [none] scoringInfo.this_scoring_time_ms = scoringTime; // depends on control dependency: [if], data = [none] // enlarge the error array by one, push latest score back if (this.scoringInfo == null) { this.scoringInfo = new DeepWaterScoringInfo[]{scoringInfo}; // depends on control dependency: [if], data = [none] } else { DeepWaterScoringInfo[] err2 = new DeepWaterScoringInfo[this.scoringInfo.length + 1]; System.arraycopy(this.scoringInfo, 0, err2, 0, this.scoringInfo.length); // depends on control dependency: [if], data = [(this.scoringInfo] err2[err2.length - 1] = scoringInfo; // depends on control dependency: [if], data = [none] this.scoringInfo = err2; // depends on control dependency: [if], data = [none] } _output.errors = last_scored(); // depends on control dependency: [if], data = [none] _output._scoring_history = DeepWaterScoringInfo.createScoringHistoryTable(this.scoringInfo, (null != get_params()._valid), false, _output.getModelCategory(), _output.isAutoencoder()); // depends on control dependency: [if], data = [none] _output._variable_importances = calcVarImp(last_scored().variable_importances); // depends on control dependency: [if], data = [none] _output._model_summary = model_info.createSummaryTable(); // depends on control dependency: [if], data = [none] // always keep a copy of the best model so far (based on the following criterion) if (!finalScoring) { if (actual_best_model_key != null && get_params()._overwrite_with_best_model && ( // if we have a best_model in DKV, then compare against its error() (unless it's a different model as judged by the network size) (DKV.get(actual_best_model_key) != null && !(loss() >= DKV.get(actual_best_model_key).<DeepWaterModel>get().loss() ) ) || // otherwise, compare against our own _bestError (DKV.get(actual_best_model_key) == null && loss() < _bestLoss) ) ) { _bestLoss = loss(); // depends on control dependency: [if], data = [] model_info.nativeToJava(); // depends on control dependency: [if], data = [] putMeAsBestModel(actual_best_model_key); // depends on control dependency: [if], data = [] } // print the freshly scored model to ASCII if (keep_running && printme) Log.info(toString()); if (ScoreKeeper.stopEarly(ScoringInfo.scoreKeepers(scoring_history()), get_params()._stopping_rounds, _output.isClassifier(), get_params()._stopping_metric, get_params()._stopping_tolerance, "model's last", true )) { Log.info("Convergence detected based on simple moving average of the loss function for the past " + get_params()._stopping_rounds + " scoring events. Model building completed."); // depends on control dependency: [if], data = [] stopped_early = true; // depends on control dependency: [if], data = [] } if (printme) Log.info("Time taken for scoring and diagnostics: " + PrettyPrint.msecs(scoringInfo.this_scoring_time_ms, true)); } } if (stopped_early) { // pretend as if we finished all epochs to get the progress bar pretty (especially for N-fold and grid-search) ((Job) DKV.getGet(jobKey)).update((long) (get_params()._epochs * training_rows)); // depends on control dependency: [if], data = [none] update(jobKey); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } progressUpdate(jobKey, keep_running); //update(jobKey); return keep_running; } }
public class class_name { static SpatialOperation parseSpatialOperation(String operation) { if (operation == null) { throw new IndexException("Operation is required"); } else if (operation.equalsIgnoreCase("is_within")) { return SpatialOperation.IsWithin; } else if (operation.equalsIgnoreCase("contains")) { return SpatialOperation.Contains; } else if (operation.equalsIgnoreCase("intersects")) { return SpatialOperation.Intersects; } else { throw new IndexException("Operation is invalid: {}", operation); } } }
public class class_name { static SpatialOperation parseSpatialOperation(String operation) { if (operation == null) { throw new IndexException("Operation is required"); } else if (operation.equalsIgnoreCase("is_within")) { return SpatialOperation.IsWithin; // depends on control dependency: [if], data = [none] } else if (operation.equalsIgnoreCase("contains")) { return SpatialOperation.Contains; // depends on control dependency: [if], data = [none] } else if (operation.equalsIgnoreCase("intersects")) { return SpatialOperation.Intersects; // depends on control dependency: [if], data = [none] } else { throw new IndexException("Operation is invalid: {}", operation); } } }
public class class_name { @LinearTime public static <K> BinaryArrayBulkInsertWeakHeap<K> heapify(K[] array, Comparator<? super K> comparator) { if (array == null) { throw new IllegalArgumentException("Array cannot be null"); } if (array.length == 0) { return new BinaryArrayBulkInsertWeakHeap<K>(comparator); } BinaryArrayBulkInsertWeakHeap<K> h = new BinaryArrayBulkInsertWeakHeap<K>(comparator, array.length); System.arraycopy(array, 0, h.array, 0, array.length); h.size = array.length; for (int j = h.size - 1; j > 0; j--) { h.joinWithComparator(h.dancestor(j), j); } return h; } }
public class class_name { @LinearTime public static <K> BinaryArrayBulkInsertWeakHeap<K> heapify(K[] array, Comparator<? super K> comparator) { if (array == null) { throw new IllegalArgumentException("Array cannot be null"); } if (array.length == 0) { return new BinaryArrayBulkInsertWeakHeap<K>(comparator); // depends on control dependency: [if], data = [none] } BinaryArrayBulkInsertWeakHeap<K> h = new BinaryArrayBulkInsertWeakHeap<K>(comparator, array.length); System.arraycopy(array, 0, h.array, 0, array.length); h.size = array.length; for (int j = h.size - 1; j > 0; j--) { h.joinWithComparator(h.dancestor(j), j); // depends on control dependency: [for], data = [j] } return h; } }
public class class_name { public static LocalDate parse(String date) { if (StringUtils.isEmpty(date)) return null; try { return parse_(date); } catch (DateTimeParseException x) { return InvalidValues.createInvalidLocalDate(date); } } }
public class class_name { public static LocalDate parse(String date) { if (StringUtils.isEmpty(date)) return null; try { return parse_(date); // depends on control dependency: [try], data = [none] } catch (DateTimeParseException x) { return InvalidValues.createInvalidLocalDate(date); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") public V get(final int key) { V value = null; @DoNotSub final int setNumber = Hashing.hash(key, mask); @DoNotSub final int setBeginIndex = setNumber << setSizeShift; for (@DoNotSub int i = setBeginIndex, setEndIndex = setBeginIndex + setSize; i < setEndIndex; i++) { if (null == values[i]) { break; } if (key == keys[i]) { value = (V)values[i]; break; } } if (null == value) { cacheMisses++; } else { cacheHits++; } return value; } }
public class class_name { @SuppressWarnings("unchecked") public V get(final int key) { V value = null; @DoNotSub final int setNumber = Hashing.hash(key, mask); @DoNotSub final int setBeginIndex = setNumber << setSizeShift; for (@DoNotSub int i = setBeginIndex, setEndIndex = setBeginIndex + setSize; i < setEndIndex; i++) { if (null == values[i]) { break; } if (key == keys[i]) { value = (V)values[i]; // depends on control dependency: [if], data = [none] break; } } if (null == value) { cacheMisses++; // depends on control dependency: [if], data = [none] } else { cacheHits++; // depends on control dependency: [if], data = [none] } return value; } }
public class class_name { public static String fixName(String name) { if (name == null) { throw MESSAGES.nullArgument("name"); } int length = name.length(); if (length <= 1) { return name; } if (name.charAt(length - 1) == '/') { return name.substring(0, length - 1); } return name; } }
public class class_name { public static String fixName(String name) { if (name == null) { throw MESSAGES.nullArgument("name"); } int length = name.length(); if (length <= 1) { return name; } // depends on control dependency: [if], data = [none] if (name.charAt(length - 1) == '/') { return name.substring(0, length - 1); } // depends on control dependency: [if], data = [none] return name; } }
public class class_name { private void indexSupertypes(TypeElement rootElement, TypeElement element) throws IOException { for (TypeMirror mirror : types.directSupertypes(element.asType())) { if (mirror.getKind() != TypeKind.DECLARED) { continue; } DeclaredType superType = (DeclaredType) mirror; TypeElement superTypeElement = (TypeElement) superType.asElement(); storeSubclass(superTypeElement, rootElement); for (AnnotationMirror annotationMirror : superTypeElement.getAnnotationMirrors()) { TypeElement annotationElement = (TypeElement) annotationMirror.getAnnotationType() .asElement(); if (hasAnnotation(annotationElement, Inherited.class)) { storeAnnotation(annotationElement, rootElement); } } indexSupertypes(rootElement, superTypeElement); } } }
public class class_name { private void indexSupertypes(TypeElement rootElement, TypeElement element) throws IOException { for (TypeMirror mirror : types.directSupertypes(element.asType())) { if (mirror.getKind() != TypeKind.DECLARED) { continue; } DeclaredType superType = (DeclaredType) mirror; TypeElement superTypeElement = (TypeElement) superType.asElement(); storeSubclass(superTypeElement, rootElement); for (AnnotationMirror annotationMirror : superTypeElement.getAnnotationMirrors()) { TypeElement annotationElement = (TypeElement) annotationMirror.getAnnotationType() .asElement(); if (hasAnnotation(annotationElement, Inherited.class)) { storeAnnotation(annotationElement, rootElement); // depends on control dependency: [if], data = [none] } } indexSupertypes(rootElement, superTypeElement); } } }
public class class_name { @Override protected int handleGetYearLength(int extendedYear) { int length =0; if (cType == CalculationType.ISLAMIC_CIVIL || cType == CalculationType.ISLAMIC_TBLA || (cType == CalculationType.ISLAMIC_UMALQURA && (extendedYear < UMALQURA_YEAR_START || extendedYear > UMALQURA_YEAR_END) )) { length = 354 + (civilLeapYear(extendedYear) ? 1 : 0); } else if (cType == CalculationType.ISLAMIC) { int month = 12*(extendedYear-1); length = (int)(trueMonthStart(month + 12) - trueMonthStart(month)); } else if (cType == CalculationType.ISLAMIC_UMALQURA) { for(int i=0; i<12; i++) length += handleGetMonthLength(extendedYear, i); } return length; } }
public class class_name { @Override protected int handleGetYearLength(int extendedYear) { int length =0; if (cType == CalculationType.ISLAMIC_CIVIL || cType == CalculationType.ISLAMIC_TBLA || (cType == CalculationType.ISLAMIC_UMALQURA && (extendedYear < UMALQURA_YEAR_START || extendedYear > UMALQURA_YEAR_END) )) { length = 354 + (civilLeapYear(extendedYear) ? 1 : 0); // depends on control dependency: [if], data = [none] } else if (cType == CalculationType.ISLAMIC) { int month = 12*(extendedYear-1); length = (int)(trueMonthStart(month + 12) - trueMonthStart(month)); // depends on control dependency: [if], data = [none] } else if (cType == CalculationType.ISLAMIC_UMALQURA) { for(int i=0; i<12; i++) length += handleGetMonthLength(extendedYear, i); } return length; } }
public class class_name { @Override public Class<?> classForName(String name) { try { if (classes.containsKey(name)) { return classes.get(name); } final Class<?> clazz = module.getClassLoader().loadClass(name); classes.put(name, clazz); return clazz; } catch (ClassNotFoundException | LinkageError e) { throw new ResourceLoadingException(e); } } }
public class class_name { @Override public Class<?> classForName(String name) { try { if (classes.containsKey(name)) { return classes.get(name); // depends on control dependency: [if], data = [none] } final Class<?> clazz = module.getClassLoader().loadClass(name); classes.put(name, clazz); return clazz; } catch (ClassNotFoundException | LinkageError e) { throw new ResourceLoadingException(e); } } }
public class class_name { public void marshall(Compliance compliance, ProtocolMarshaller protocolMarshaller) { if (compliance == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(compliance.getStatus(), STATUS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(Compliance compliance, ProtocolMarshaller protocolMarshaller) { if (compliance == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(compliance.getStatus(), STATUS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public FlowStepRule createFlowStepExitRule(FlowStepRule flowStepRuleParam) { if(flowStepRuleParam != null && this.serviceTicket != null) { flowStepRuleParam.setServiceTicket(this.serviceTicket); } return new FlowStepRule(this.putJson( flowStepRuleParam, WS.Path.FlowStepRule.Version1.flowStepRuleExitCreate())); } }
public class class_name { public FlowStepRule createFlowStepExitRule(FlowStepRule flowStepRuleParam) { if(flowStepRuleParam != null && this.serviceTicket != null) { flowStepRuleParam.setServiceTicket(this.serviceTicket); // depends on control dependency: [if], data = [none] } return new FlowStepRule(this.putJson( flowStepRuleParam, WS.Path.FlowStepRule.Version1.flowStepRuleExitCreate())); } }
public class class_name { public static String getSiteSelectOptionsStatic(CmsObject cms, Locale locale) { List<CmsSite> sites = OpenCms.getSiteManager().getAvailableSites( cms, true, false, cms.getRequestContext().getOuFqn()); StringBuffer resultBuffer = new StringBuffer(); Iterator<CmsSite> i = sites.iterator(); int counter = 0; while (i.hasNext()) { CmsSite site = i.next(); String siteRoot = site.getSiteRoot(); if (!siteRoot.endsWith("/")) { siteRoot += "/"; } if (counter != 0) { resultBuffer.append("|"); } resultBuffer.append(siteRoot).append(":").append( CmsWorkplace.substituteSiteTitleStatic(site.getTitle(), locale)); counter++; } if (sites.size() < 1) { // no site found, assure that at least the current site is shown in the selector String siteRoot = cms.getRequestContext().getSiteRoot(); CmsSite site = OpenCms.getSiteManager().getSiteForSiteRoot(siteRoot); if (!siteRoot.endsWith("/")) { siteRoot += "/"; } String title = ""; if (site != null) { title = site.getTitle(); } resultBuffer.append(siteRoot).append(":").append(title); } return resultBuffer.toString(); } }
public class class_name { public static String getSiteSelectOptionsStatic(CmsObject cms, Locale locale) { List<CmsSite> sites = OpenCms.getSiteManager().getAvailableSites( cms, true, false, cms.getRequestContext().getOuFqn()); StringBuffer resultBuffer = new StringBuffer(); Iterator<CmsSite> i = sites.iterator(); int counter = 0; while (i.hasNext()) { CmsSite site = i.next(); String siteRoot = site.getSiteRoot(); if (!siteRoot.endsWith("/")) { siteRoot += "/"; // depends on control dependency: [if], data = [none] } if (counter != 0) { resultBuffer.append("|"); // depends on control dependency: [if], data = [none] } resultBuffer.append(siteRoot).append(":").append( CmsWorkplace.substituteSiteTitleStatic(site.getTitle(), locale)); // depends on control dependency: [while], data = [none] counter++; // depends on control dependency: [while], data = [none] } if (sites.size() < 1) { // no site found, assure that at least the current site is shown in the selector String siteRoot = cms.getRequestContext().getSiteRoot(); CmsSite site = OpenCms.getSiteManager().getSiteForSiteRoot(siteRoot); if (!siteRoot.endsWith("/")) { siteRoot += "/"; // depends on control dependency: [if], data = [none] } String title = ""; if (site != null) { title = site.getTitle(); // depends on control dependency: [if], data = [none] } resultBuffer.append(siteRoot).append(":").append(title); // depends on control dependency: [if], data = [none] } return resultBuffer.toString(); } }
public class class_name { public static void addCookie(HttpServletRequest request, HttpServletResponse response, String name, String value, String path, int age) { LOG.debug("add cookie[name:{},value={},path={}]", new String[] { name, value, path }); Cookie cookie = null; try { cookie = new Cookie(name, URLEncoder.encode(value, "utf-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } cookie.setSecure(false); cookie.setPath(path); cookie.setMaxAge(age); cookie.setHttpOnly(true); response.addCookie(cookie); } }
public class class_name { public static void addCookie(HttpServletRequest request, HttpServletResponse response, String name, String value, String path, int age) { LOG.debug("add cookie[name:{},value={},path={}]", new String[] { name, value, path }); Cookie cookie = null; try { cookie = new Cookie(name, URLEncoder.encode(value, "utf-8")); // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] cookie.setSecure(false); cookie.setPath(path); cookie.setMaxAge(age); cookie.setHttpOnly(true); response.addCookie(cookie); } }
public class class_name { public void addConstantMembers(ClassDoc cd, List<FieldDoc> fields, Content classConstantTree) { currentClassDoc = cd; Content tbody = new HtmlTree(HtmlTag.TBODY); for (int i = 0; i < fields.size(); ++i) { HtmlTree tr = new HtmlTree(HtmlTag.TR); if (i%2 == 0) tr.addStyle(HtmlStyle.altColor); else tr.addStyle(HtmlStyle.rowColor); addConstantMember(fields.get(i), tr); tbody.addContent(tr); } Content table = getConstantMembersHeader(cd); table.addContent(tbody); Content li = HtmlTree.LI(HtmlStyle.blockList, table); classConstantTree.addContent(li); } }
public class class_name { public void addConstantMembers(ClassDoc cd, List<FieldDoc> fields, Content classConstantTree) { currentClassDoc = cd; Content tbody = new HtmlTree(HtmlTag.TBODY); for (int i = 0; i < fields.size(); ++i) { HtmlTree tr = new HtmlTree(HtmlTag.TR); if (i%2 == 0) tr.addStyle(HtmlStyle.altColor); else tr.addStyle(HtmlStyle.rowColor); addConstantMember(fields.get(i), tr); // depends on control dependency: [for], data = [i] tbody.addContent(tr); // depends on control dependency: [for], data = [none] } Content table = getConstantMembersHeader(cd); table.addContent(tbody); Content li = HtmlTree.LI(HtmlStyle.blockList, table); classConstantTree.addContent(li); } }
public class class_name { private static @CheckForNull String codeSource(@Nonnull Class<?> c) { CodeSource cs = c.getProtectionDomain().getCodeSource(); if (cs == null) { return null; } URL loc = cs.getLocation(); if (loc == null) { return null; } String r = loc.toString(); if (r.endsWith(".class")) { // JENKINS-49147: Tomcat bug. Now do the more expensive check… String suffix = c.getName().replace('.', '/') + ".class"; if (r.endsWith(suffix)) { r = r.substring(0, r.length() - suffix.length()); } } if (r.startsWith("jar:file:/") && r.endsWith(".jar!/")) { // JENKINS-49543: also an old behavior of Tomcat. Legal enough, but unexpected by isLocationWhitelisted. r = r.substring(4, r.length() - 2); } return r; } }
public class class_name { private static @CheckForNull String codeSource(@Nonnull Class<?> c) { CodeSource cs = c.getProtectionDomain().getCodeSource(); if (cs == null) { return null; // depends on control dependency: [if], data = [none] } URL loc = cs.getLocation(); if (loc == null) { return null; // depends on control dependency: [if], data = [none] } String r = loc.toString(); if (r.endsWith(".class")) { // JENKINS-49147: Tomcat bug. Now do the more expensive check… String suffix = c.getName().replace('.', '/') + ".class"; if (r.endsWith(suffix)) { r = r.substring(0, r.length() - suffix.length()); // depends on control dependency: [if], data = [none] } } if (r.startsWith("jar:file:/") && r.endsWith(".jar!/")) { // JENKINS-49543: also an old behavior of Tomcat. Legal enough, but unexpected by isLocationWhitelisted. r = r.substring(4, r.length() - 2); // depends on control dependency: [if], data = [none] } return r; } }
public class class_name { public static void setInt(int n, byte[] b, int off, boolean littleEndian) { if (littleEndian) { b[off] = (byte) n; b[off + 1] = (byte) (n >>> 8); b[off + 2] = (byte) (n >>> 16); b[off + 3] = (byte) (n >>> 24); } else { b[off] = (byte) (n >>> 24); b[off + 1] = (byte) (n >>> 16); b[off + 2] = (byte) (n >>> 8); b[off + 3] = (byte) n; } } }
public class class_name { public static void setInt(int n, byte[] b, int off, boolean littleEndian) { if (littleEndian) { b[off] = (byte) n; // depends on control dependency: [if], data = [none] b[off + 1] = (byte) (n >>> 8); // depends on control dependency: [if], data = [none] b[off + 2] = (byte) (n >>> 16); // depends on control dependency: [if], data = [none] b[off + 3] = (byte) (n >>> 24); // depends on control dependency: [if], data = [none] } else { b[off] = (byte) (n >>> 24); // depends on control dependency: [if], data = [none] b[off + 1] = (byte) (n >>> 16); // depends on control dependency: [if], data = [none] b[off + 2] = (byte) (n >>> 8); // depends on control dependency: [if], data = [none] b[off + 3] = (byte) n; // depends on control dependency: [if], data = [none] } } }
public class class_name { public synchronized void finish() { if (!isFinished) { isFinished = true; try { writer.close(); // Re-open as a random access file so we can overwrite the 3 int // header that specifies the number of dimensions and values. // Note that the location of the matrix data is dependent on // whether the matrix is to be transposed. RandomAccessFile matrixRaf = new RandomAccessFile(matrixFile, "rw"); // Write the header in the first 100 characters. The header is // the number rows, the number of columns, and the number of non // zeros on a single line with spaces between them. StringBuilder sb = new StringBuilder(); sb.append(curRow).append(" "); sb.append(numCols).append(" "); sb.append(nonZeroValues).append(" "); matrixRaf.write(sb.toString().getBytes()); } catch (IOException ioe) { throw new IOError(ioe); } } } }
public class class_name { public synchronized void finish() { if (!isFinished) { isFinished = true; // depends on control dependency: [if], data = [none] try { writer.close(); // depends on control dependency: [try], data = [none] // Re-open as a random access file so we can overwrite the 3 int // header that specifies the number of dimensions and values. // Note that the location of the matrix data is dependent on // whether the matrix is to be transposed. RandomAccessFile matrixRaf = new RandomAccessFile(matrixFile, "rw"); // Write the header in the first 100 characters. The header is // the number rows, the number of columns, and the number of non // zeros on a single line with spaces between them. StringBuilder sb = new StringBuilder(); sb.append(curRow).append(" "); // depends on control dependency: [try], data = [none] sb.append(numCols).append(" "); // depends on control dependency: [try], data = [none] sb.append(nonZeroValues).append(" "); // depends on control dependency: [try], data = [none] matrixRaf.write(sb.toString().getBytes()); // depends on control dependency: [try], data = [none] } catch (IOException ioe) { throw new IOError(ioe); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { private ContentType getAcceptType() { // If the format header is present, it overrides the ACCEPT header. String format = m_variableMap.get("format"); if (format != null) { return new ContentType(format); } String acceptParts = m_request.getHeader(HttpDefs.ACCEPT); if (!Utils.isEmpty(acceptParts)) { for (String acceptPart : acceptParts.split(",")) { ContentType acceptType = new ContentType(acceptPart); if (acceptType.isJSON() || acceptType.isXML() || acceptType.isPlainText()) { return acceptType; } } } return getContentType(); } }
public class class_name { private ContentType getAcceptType() { // If the format header is present, it overrides the ACCEPT header. String format = m_variableMap.get("format"); if (format != null) { return new ContentType(format); // depends on control dependency: [if], data = [(format] } String acceptParts = m_request.getHeader(HttpDefs.ACCEPT); if (!Utils.isEmpty(acceptParts)) { for (String acceptPart : acceptParts.split(",")) { ContentType acceptType = new ContentType(acceptPart); if (acceptType.isJSON() || acceptType.isXML() || acceptType.isPlainText()) { return acceptType; // depends on control dependency: [if], data = [none] } } } return getContentType(); } }
public class class_name { public void addWorldGroup(String name) { if (!worldGroupExist(name)) { list.put(name, new WorldGroup(name)); } } }
public class class_name { public void addWorldGroup(String name) { if (!worldGroupExist(name)) { list.put(name, new WorldGroup(name)); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public EClass getIfcCsgPrimitive3D() { if (ifcCsgPrimitive3DEClass == null) { ifcCsgPrimitive3DEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(156); } return ifcCsgPrimitive3DEClass; } }
public class class_name { @Override public EClass getIfcCsgPrimitive3D() { if (ifcCsgPrimitive3DEClass == null) { ifcCsgPrimitive3DEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(156); // depends on control dependency: [if], data = [none] } return ifcCsgPrimitive3DEClass; } }
public class class_name { private static boolean isOptionCodeMatch(final Object option, final Object data) { // If the option is an instance of Option, check if the data value is the "CODE" value on the option if (option instanceof Option) { String optionCode = ((Option) option).getCode(); String matchAsString = String.valueOf(data); boolean equal = Util.equals(optionCode, matchAsString); return equal; } return false; } }
public class class_name { private static boolean isOptionCodeMatch(final Object option, final Object data) { // If the option is an instance of Option, check if the data value is the "CODE" value on the option if (option instanceof Option) { String optionCode = ((Option) option).getCode(); String matchAsString = String.valueOf(data); boolean equal = Util.equals(optionCode, matchAsString); return equal; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static Collection<Subquery> subqueries( Visitable visitable, final boolean includeNestedSubqueries ) { final Collection<Subquery> subqueries = new LinkedList<Subquery>(); Visitors.visitAll(visitable, new Visitors.AbstractVisitor() { @Override public void visit( Subquery subquery ) { subqueries.add(subquery); if (includeNestedSubqueries) { // Now look for any subqueries in the subquery ... subquery.getQuery().accept(this); } } }); return subqueries; } }
public class class_name { public static Collection<Subquery> subqueries( Visitable visitable, final boolean includeNestedSubqueries ) { final Collection<Subquery> subqueries = new LinkedList<Subquery>(); Visitors.visitAll(visitable, new Visitors.AbstractVisitor() { @Override public void visit( Subquery subquery ) { subqueries.add(subquery); if (includeNestedSubqueries) { // Now look for any subqueries in the subquery ... subquery.getQuery().accept(this); // depends on control dependency: [if], data = [none] } } }); return subqueries; } }
public class class_name { public void send(final MailInfo mailInfo) { try { final MultiPartEmail email = new MultiPartEmail(); email.setCharset("utf-8"); mailInfo.fillEmail(email); email.send(); } catch (Exception e) { JK.throww(e); } } }
public class class_name { public void send(final MailInfo mailInfo) { try { final MultiPartEmail email = new MultiPartEmail(); email.setCharset("utf-8"); // depends on control dependency: [try], data = [none] mailInfo.fillEmail(email); // depends on control dependency: [try], data = [none] email.send(); // depends on control dependency: [try], data = [none] } catch (Exception e) { JK.throww(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { String copyFile( final ExecutionContext executionContext, final File file, final InputStream input, final String content, final INodeEntry node, String destination ) throws FileCopierException { final String pluginname = getProvider().getName(); final Map<String, Map<String, String>> localDataContext = createScriptDataContext( executionContext.getFramework(), executionContext.getFrameworkProject(), executionContext.getDataContext()); //add node context data localDataContext.put("node", DataContextUtils.nodeData(node)); Description pluginDesc = getDescription(); //load config.* property values in from project or framework scope final Map<String, Map<String, String>> finalDataContext; try { finalDataContext = loadConfigData(executionContext, loadInstanceDataFromNodeAttributes(node, pluginDesc), localDataContext, pluginDesc, ServiceNameConstants.FileCopier ); } catch (ConfigurationException e) { throw new FileCopierException("[" + pluginname + "]: "+e.getMessage(), StepFailureReason.ConfigurationFailure, e); } final File srcFile = null != file ? file : BaseFileCopier.writeTempFile(executionContext, null, input, content); String destFilePath = destination; if (null == destFilePath) { String identity = null!=executionContext.getDataContext() && null!=executionContext.getDataContext().get("job")? executionContext.getDataContext().get("job").get("execid"):null; destFilePath = BaseFileCopier.generateRemoteFilepathForNode( node, executionContext.getFramework().getFrameworkProjectMgr().getFrameworkProject(executionContext.getFrameworkProject()), executionContext.getFramework(), (null != file ? file.getName() : "dispatch-script"), null, identity ); } //put file in a directory if (null != destFilePath && destFilePath.endsWith("/")) { destFilePath += srcFile.getName(); } //add some more data context values to allow templatized script-copy attribute final HashMap<String, String> scptexec = new HashMap<>(); //set up the data context to include the local temp file scptexec.put("file", srcFile.getAbsolutePath()); scptexec.put("destination", null != destFilePath ? destFilePath : ""); finalDataContext.put("file-copy", scptexec); Map<String, Map<String, String>> fileCopyContext = DataContextUtils.addContext( "file-copy", scptexec, null ); final ExecArgList execArgList = createScriptArgsList(fileCopyContext); final String localNodeOsFamily=getFramework().createFrameworkNode().getOsFamily(); executionContext.getExecutionListener().log(3, "[" + getProvider().getName() + "] executing: " + execArgList.asFlatStringList() ); final ByteArrayOutputStream captureSysOut = new ByteArrayOutputStream(); try { final int result = getScriptExecHelper().runLocalCommand( localNodeOsFamily, execArgList, finalDataContext, null, captureSysOut, System.err ); executionContext.getExecutionListener().log(3, "[" + pluginname + "]: result code: " + result); if(result!=0){ throw new FileCopierException("[" + pluginname + "]: external script failed with exit code: " + result, NodeStepFailureReason.NonZeroResultCode); } } catch (IOException e) { throw new FileCopierException("[" + pluginname + "] " + e.getMessage(), StepFailureReason.IOFailure); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new FileCopierException("[" + pluginname + "] " + e.getMessage(), StepFailureReason.Interrupted); }finally { if(null == file) { if (!ScriptfileUtils.releaseTempFile(srcFile)) { executionContext.getExecutionListener().log( Constants.WARN_LEVEL, "Unable to remove local temp file: " + srcFile.getAbsolutePath() ); } } } //load string of output from outputstream final String output = captureSysOut.toString(); if (null == output || output.length() < 1) { if (null != destFilePath) { return destFilePath; } throw new FileCopierException("[" + pluginname + "]: No output from external script", ScriptPluginFailureReason.ScriptPluginFileCopierOutputMissing ); } //TODO: require any specific format for the data? //look for first line of output final String[] split1 = output.split("(\\r?\\n)"); if (split1.length < 1) { if (null != destFilePath) { return destFilePath; } throw new FileCopierException("[" + pluginname + "]: No output from external script", ScriptPluginFailureReason.ScriptPluginFileCopierOutputMissing); } final String remotefilepath = split1[0]; executionContext.getExecutionListener().log(3, "[" + pluginname + "]: result filepath: " + remotefilepath); return remotefilepath; } }
public class class_name { String copyFile( final ExecutionContext executionContext, final File file, final InputStream input, final String content, final INodeEntry node, String destination ) throws FileCopierException { final String pluginname = getProvider().getName(); final Map<String, Map<String, String>> localDataContext = createScriptDataContext( executionContext.getFramework(), executionContext.getFrameworkProject(), executionContext.getDataContext()); //add node context data localDataContext.put("node", DataContextUtils.nodeData(node)); Description pluginDesc = getDescription(); //load config.* property values in from project or framework scope final Map<String, Map<String, String>> finalDataContext; try { finalDataContext = loadConfigData(executionContext, loadInstanceDataFromNodeAttributes(node, pluginDesc), localDataContext, pluginDesc, ServiceNameConstants.FileCopier ); } catch (ConfigurationException e) { throw new FileCopierException("[" + pluginname + "]: "+e.getMessage(), StepFailureReason.ConfigurationFailure, e); } final File srcFile = null != file ? file : BaseFileCopier.writeTempFile(executionContext, null, input, content); String destFilePath = destination; if (null == destFilePath) { String identity = null!=executionContext.getDataContext() && null!=executionContext.getDataContext().get("job")? executionContext.getDataContext().get("job").get("execid"):null; destFilePath = BaseFileCopier.generateRemoteFilepathForNode( node, executionContext.getFramework().getFrameworkProjectMgr().getFrameworkProject(executionContext.getFrameworkProject()), executionContext.getFramework(), (null != file ? file.getName() : "dispatch-script"), null, identity ); } //put file in a directory if (null != destFilePath && destFilePath.endsWith("/")) { destFilePath += srcFile.getName(); } //add some more data context values to allow templatized script-copy attribute final HashMap<String, String> scptexec = new HashMap<>(); //set up the data context to include the local temp file scptexec.put("file", srcFile.getAbsolutePath()); scptexec.put("destination", null != destFilePath ? destFilePath : ""); finalDataContext.put("file-copy", scptexec); Map<String, Map<String, String>> fileCopyContext = DataContextUtils.addContext( "file-copy", scptexec, null ); final ExecArgList execArgList = createScriptArgsList(fileCopyContext); final String localNodeOsFamily=getFramework().createFrameworkNode().getOsFamily(); executionContext.getExecutionListener().log(3, "[" + getProvider().getName() + "] executing: " + execArgList.asFlatStringList() ); final ByteArrayOutputStream captureSysOut = new ByteArrayOutputStream(); try { final int result = getScriptExecHelper().runLocalCommand( localNodeOsFamily, execArgList, finalDataContext, null, captureSysOut, System.err ); executionContext.getExecutionListener().log(3, "[" + pluginname + "]: result code: " + result); if(result!=0){ throw new FileCopierException("[" + pluginname + "]: external script failed with exit code: " + result, NodeStepFailureReason.NonZeroResultCode); } } catch (IOException e) { throw new FileCopierException("[" + pluginname + "] " + e.getMessage(), StepFailureReason.IOFailure); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new FileCopierException("[" + pluginname + "] " + e.getMessage(), StepFailureReason.Interrupted); }finally { if(null == file) { if (!ScriptfileUtils.releaseTempFile(srcFile)) { executionContext.getExecutionListener().log( Constants.WARN_LEVEL, "Unable to remove local temp file: " + srcFile.getAbsolutePath() ); // depends on control dependency: [if], data = [none] } } } //load string of output from outputstream final String output = captureSysOut.toString(); if (null == output || output.length() < 1) { if (null != destFilePath) { return destFilePath; // depends on control dependency: [if], data = [none] } throw new FileCopierException("[" + pluginname + "]: No output from external script", ScriptPluginFailureReason.ScriptPluginFileCopierOutputMissing ); } //TODO: require any specific format for the data? //look for first line of output final String[] split1 = output.split("(\\r?\\n)"); if (split1.length < 1) { if (null != destFilePath) { return destFilePath; } throw new FileCopierException("[" + pluginname + "]: No output from external script", ScriptPluginFailureReason.ScriptPluginFileCopierOutputMissing); } final String remotefilepath = split1[0]; executionContext.getExecutionListener().log(3, "[" + pluginname + "]: result filepath: " + remotefilepath); return remotefilepath; } }
public class class_name { @Override public <T> TypedQuery<T> createQuery(CriteriaQuery<T> arg0) { try { return ivEm.createQuery(arg0); } finally { if (!inJTATransaction()) { ivEm.clear(); } } } }
public class class_name { @Override public <T> TypedQuery<T> createQuery(CriteriaQuery<T> arg0) { try { return ivEm.createQuery(arg0); // depends on control dependency: [try], data = [none] } finally { if (!inJTATransaction()) { ivEm.clear(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private byte[] inputStreamToByte(InputStream inputStream) { try { return IOUtils.toByteArray(inputStream); } catch (IOException e) { LOGGER.error("image inputStream to byte error", e); throw new FdfsUploadImageException("upload ThumbImage error", e.getCause()); } } }
public class class_name { private byte[] inputStreamToByte(InputStream inputStream) { try { return IOUtils.toByteArray(inputStream); // depends on control dependency: [try], data = [none] } catch (IOException e) { LOGGER.error("image inputStream to byte error", e); throw new FdfsUploadImageException("upload ThumbImage error", e.getCause()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setAttribute(String name, final String value) { initAttributes(); String rawAttributeName = name; if (!ownerDocument.config.isCaseSensitive()) { name = name.toLowerCase(); } // search if attribute with the same name exist for (int i = 0, attributesSize = attributes.size(); i < attributesSize; i++) { Attribute attr = attributes.get(i); if (attr.getName().equals(name)) { attr.setValue(value); return; } } attributes.add(new Attribute(rawAttributeName, name, value)); } }
public class class_name { public void setAttribute(String name, final String value) { initAttributes(); String rawAttributeName = name; if (!ownerDocument.config.isCaseSensitive()) { name = name.toLowerCase(); // depends on control dependency: [if], data = [none] } // search if attribute with the same name exist for (int i = 0, attributesSize = attributes.size(); i < attributesSize; i++) { Attribute attr = attributes.get(i); if (attr.getName().equals(name)) { attr.setValue(value); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } attributes.add(new Attribute(rawAttributeName, name, value)); } }
public class class_name { protected boolean removePropertyListener(final BoundPropertyListener listener) { // get the bound property name String name = makePropertyName(listener.getPropertyName()); // get the bound listener list for the property List list = (List) boundListeners.get(name); boolean removed = false; if (list != null) { removed = list.remove(listener); // notify listener that is was unbound if (removed) listener.propertyUnbound(this); } return removed; } }
public class class_name { protected boolean removePropertyListener(final BoundPropertyListener listener) { // get the bound property name String name = makePropertyName(listener.getPropertyName()); // get the bound listener list for the property List list = (List) boundListeners.get(name); boolean removed = false; if (list != null) { removed = list.remove(listener); // depends on control dependency: [if], data = [(list] // notify listener that is was unbound if (removed) listener.propertyUnbound(this); } return removed; } }
public class class_name { public FieldSource<JavaClassSource> addFieldTo(final JavaClassSource targetClass, final String fieldType, final String fieldName, Visibility visibility, boolean withGetter, boolean withSetter, boolean updateToString, String... annotations) { if (targetClass.hasField(fieldName)) { throw new IllegalStateException("Entity already has a field named [" + fieldName + "]"); } PropertySource<JavaClassSource> property = targetClass.addProperty(fieldType, fieldName); FieldSource<JavaClassSource> field = property.getField(); field.setVisibility(visibility); for (String annotation : annotations) { field.addAnnotation(annotation); } String fieldTypeForImport = Types.stripArray(fieldType); if (!fieldTypeForImport.startsWith("java.lang.") && fieldTypeForImport.contains(".") && !fieldTypeForImport.equals(targetClass.getCanonicalName())) { targetClass.addImport(fieldTypeForImport); } if (!withGetter) { targetClass.removeMethod(property.getAccessor()); } if (!withSetter) { targetClass.removeMethod(property.getMutator()); } if (updateToString) { updateToString(targetClass); } return field; } }
public class class_name { public FieldSource<JavaClassSource> addFieldTo(final JavaClassSource targetClass, final String fieldType, final String fieldName, Visibility visibility, boolean withGetter, boolean withSetter, boolean updateToString, String... annotations) { if (targetClass.hasField(fieldName)) { throw new IllegalStateException("Entity already has a field named [" + fieldName + "]"); } PropertySource<JavaClassSource> property = targetClass.addProperty(fieldType, fieldName); FieldSource<JavaClassSource> field = property.getField(); field.setVisibility(visibility); for (String annotation : annotations) { field.addAnnotation(annotation); // depends on control dependency: [for], data = [annotation] } String fieldTypeForImport = Types.stripArray(fieldType); if (!fieldTypeForImport.startsWith("java.lang.") && fieldTypeForImport.contains(".") && !fieldTypeForImport.equals(targetClass.getCanonicalName())) { targetClass.addImport(fieldTypeForImport); // depends on control dependency: [if], data = [none] } if (!withGetter) { targetClass.removeMethod(property.getAccessor()); // depends on control dependency: [if], data = [none] } if (!withSetter) { targetClass.removeMethod(property.getMutator()); // depends on control dependency: [if], data = [none] } if (updateToString) { updateToString(targetClass); // depends on control dependency: [if], data = [none] } return field; } }
public class class_name { public C instantiateClass(Parameterization config) { if(getValue() == null /* && !optionalParameter */) { config.reportError(new UnspecifiedParameterException(this)); return null; } try { config = config.descend(this); return ClassGenericsUtil.tryInstantiate(restrictionClass, getValue(), config); } catch(ClassInstantiationException e) { config.reportError(new WrongParameterValueException(this, getValue().getCanonicalName(), "Error instantiating class.", e)); return null; } } }
public class class_name { public C instantiateClass(Parameterization config) { if(getValue() == null /* && !optionalParameter */) { config.reportError(new UnspecifiedParameterException(this)); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } try { config = config.descend(this); // depends on control dependency: [try], data = [none] return ClassGenericsUtil.tryInstantiate(restrictionClass, getValue(), config); // depends on control dependency: [try], data = [none] } catch(ClassInstantiationException e) { config.reportError(new WrongParameterValueException(this, getValue().getCanonicalName(), "Error instantiating class.", e)); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Pipeline pipelineWithPasswordAwareBuildCauseByBuildId(final long buildId) { Pipeline pipeline = pipelineDao.pipelineWithMaterialsAndModsByBuildId(buildId); MaterialRevisions scheduledRevs = pipeline.getBuildCause().getMaterialRevisions(); MaterialConfigs knownMaterials = knownMaterials(pipeline, scheduledRevs); for (MaterialRevision materialRevision : scheduledRevs) { MaterialConfig materialConfig = materialFrom(knownMaterials, materialRevision); Material usedMaterial = materialRevision.getMaterial(); if (materialConfig == null) { final JobInstance jobInstance = jobInstanceService.buildByIdWithTransitions(buildId); scheduleService.failJob(jobInstance); final String message = "Cannot load job '" + jobInstance.buildLocator() + "' because material " + usedMaterial.config() + " was not found in config."; final String description = "Job for pipeline '" + jobInstance.buildLocator() + "' has been failed as one or more material configurations were either changed or removed."; transactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { @Override public void afterCommit() { final ServerHealthState error = ServerHealthState.error(message, description, HealthStateType.general(HealthStateScope.forJob(jobInstance.getPipelineName(), jobInstance.getStageName(), jobInstance.getName()))); error.setTimeout(Timeout.FIVE_MINUTES); serverHealthService.update(error); appendToConsoleLog(jobInstance, message); appendToConsoleLog(jobInstance, description); } }); throw new StaleMaterialsOnBuildCause(message); } usedMaterial.updateFromConfig(materialConfig); } return pipeline; } }
public class class_name { public Pipeline pipelineWithPasswordAwareBuildCauseByBuildId(final long buildId) { Pipeline pipeline = pipelineDao.pipelineWithMaterialsAndModsByBuildId(buildId); MaterialRevisions scheduledRevs = pipeline.getBuildCause().getMaterialRevisions(); MaterialConfigs knownMaterials = knownMaterials(pipeline, scheduledRevs); for (MaterialRevision materialRevision : scheduledRevs) { MaterialConfig materialConfig = materialFrom(knownMaterials, materialRevision); Material usedMaterial = materialRevision.getMaterial(); if (materialConfig == null) { final JobInstance jobInstance = jobInstanceService.buildByIdWithTransitions(buildId); scheduleService.failJob(jobInstance); // depends on control dependency: [if], data = [none] final String message = "Cannot load job '" + jobInstance.buildLocator() + "' because material " + usedMaterial.config() + " was not found in config."; // depends on control dependency: [if], data = [none] final String description = "Job for pipeline '" + jobInstance.buildLocator() + "' has been failed as one or more material configurations were either changed or removed."; transactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { @Override public void afterCommit() { final ServerHealthState error = ServerHealthState.error(message, description, HealthStateType.general(HealthStateScope.forJob(jobInstance.getPipelineName(), jobInstance.getStageName(), jobInstance.getName()))); error.setTimeout(Timeout.FIVE_MINUTES); serverHealthService.update(error); appendToConsoleLog(jobInstance, message); appendToConsoleLog(jobInstance, description); } }); // depends on control dependency: [if], data = [none] throw new StaleMaterialsOnBuildCause(message); } usedMaterial.updateFromConfig(materialConfig); // depends on control dependency: [for], data = [none] } return pipeline; } }
public class class_name { public ColumnMapper getMapper(String field) { String[] components = field.split("\\."); for (int i = components.length - 1; i >= 0; i--) { StringBuilder sb = new StringBuilder(); for (int j = 0; j <= i; j++) { sb.append(components[j]); if (j < i) sb.append('.'); } ColumnMapper columnMapper = columnMappers.get(sb.toString()); if (columnMapper != null) return columnMapper; } return null; } }
public class class_name { public ColumnMapper getMapper(String field) { String[] components = field.split("\\."); for (int i = components.length - 1; i >= 0; i--) { StringBuilder sb = new StringBuilder(); for (int j = 0; j <= i; j++) { sb.append(components[j]); // depends on control dependency: [for], data = [j] if (j < i) sb.append('.'); } ColumnMapper columnMapper = columnMappers.get(sb.toString()); if (columnMapper != null) return columnMapper; } return null; } }
public class class_name { public void setLookupAttributes(java.util.Collection<LookupAttribute> lookupAttributes) { if (lookupAttributes == null) { this.lookupAttributes = null; return; } this.lookupAttributes = new com.amazonaws.internal.SdkInternalList<LookupAttribute>(lookupAttributes); } }
public class class_name { public void setLookupAttributes(java.util.Collection<LookupAttribute> lookupAttributes) { if (lookupAttributes == null) { this.lookupAttributes = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.lookupAttributes = new com.amazonaws.internal.SdkInternalList<LookupAttribute>(lookupAttributes); } }
public class class_name { @Nonnull public static SAXReaderSettings createCloneOnDemand (@Nullable final ISAXReaderSettings aOther) { if (aOther == null) { // Create plain object return new SAXReaderSettings (); } // Create a clone return new SAXReaderSettings (aOther); } }
public class class_name { @Nonnull public static SAXReaderSettings createCloneOnDemand (@Nullable final ISAXReaderSettings aOther) { if (aOther == null) { // Create plain object return new SAXReaderSettings (); // depends on control dependency: [if], data = [none] } // Create a clone return new SAXReaderSettings (aOther); } }
public class class_name { public Observable<ServiceResponse<Page<KeyItem>>> getKeyVersionsSinglePageAsync(final String vaultBaseUrl, final String keyName, final Integer maxresults) { if (vaultBaseUrl == null) { throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (this.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null."); } String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl); return service.getKeyVersions(keyName, maxresults, this.apiVersion(), this.acceptLanguage(), parameterizedHost, this.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<KeyItem>>>>() { @Override public Observable<ServiceResponse<Page<KeyItem>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<KeyItem>> result = getKeyVersionsDelegate(response); return Observable.just(new ServiceResponse<Page<KeyItem>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<Page<KeyItem>>> getKeyVersionsSinglePageAsync(final String vaultBaseUrl, final String keyName, final Integer maxresults) { if (vaultBaseUrl == null) { throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (this.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null."); } String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl); return service.getKeyVersions(keyName, maxresults, this.apiVersion(), this.acceptLanguage(), parameterizedHost, this.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<KeyItem>>>>() { @Override public Observable<ServiceResponse<Page<KeyItem>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<KeyItem>> result = getKeyVersionsDelegate(response); return Observable.just(new ServiceResponse<Page<KeyItem>>(result.body(), result.response())); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public String getDiscriminatingValue(ILoggingEvent event) { // http://jira.qos.ch/browse/LBCLASSIC-213 Map<String, String> mdcMap = event.getMDCPropertyMap(); if (mdcMap == null) { return defaultValue; } String mdcValue = mdcMap.get(key); if (mdcValue == null) { return defaultValue; } else { return mdcValue; } } }
public class class_name { public String getDiscriminatingValue(ILoggingEvent event) { // http://jira.qos.ch/browse/LBCLASSIC-213 Map<String, String> mdcMap = event.getMDCPropertyMap(); if (mdcMap == null) { return defaultValue; // depends on control dependency: [if], data = [none] } String mdcValue = mdcMap.get(key); if (mdcValue == null) { return defaultValue; // depends on control dependency: [if], data = [none] } else { return mdcValue; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void marshall(InputTemplate inputTemplate, ProtocolMarshaller protocolMarshaller) { if (inputTemplate == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(inputTemplate.getAudioSelectorGroups(), AUDIOSELECTORGROUPS_BINDING); protocolMarshaller.marshall(inputTemplate.getAudioSelectors(), AUDIOSELECTORS_BINDING); protocolMarshaller.marshall(inputTemplate.getCaptionSelectors(), CAPTIONSELECTORS_BINDING); protocolMarshaller.marshall(inputTemplate.getDeblockFilter(), DEBLOCKFILTER_BINDING); protocolMarshaller.marshall(inputTemplate.getDenoiseFilter(), DENOISEFILTER_BINDING); protocolMarshaller.marshall(inputTemplate.getFilterEnable(), FILTERENABLE_BINDING); protocolMarshaller.marshall(inputTemplate.getFilterStrength(), FILTERSTRENGTH_BINDING); protocolMarshaller.marshall(inputTemplate.getImageInserter(), IMAGEINSERTER_BINDING); protocolMarshaller.marshall(inputTemplate.getInputClippings(), INPUTCLIPPINGS_BINDING); protocolMarshaller.marshall(inputTemplate.getProgramNumber(), PROGRAMNUMBER_BINDING); protocolMarshaller.marshall(inputTemplate.getPsiControl(), PSICONTROL_BINDING); protocolMarshaller.marshall(inputTemplate.getTimecodeSource(), TIMECODESOURCE_BINDING); protocolMarshaller.marshall(inputTemplate.getVideoSelector(), VIDEOSELECTOR_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(InputTemplate inputTemplate, ProtocolMarshaller protocolMarshaller) { if (inputTemplate == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(inputTemplate.getAudioSelectorGroups(), AUDIOSELECTORGROUPS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getAudioSelectors(), AUDIOSELECTORS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getCaptionSelectors(), CAPTIONSELECTORS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getDeblockFilter(), DEBLOCKFILTER_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getDenoiseFilter(), DENOISEFILTER_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getFilterEnable(), FILTERENABLE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getFilterStrength(), FILTERSTRENGTH_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getImageInserter(), IMAGEINSERTER_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getInputClippings(), INPUTCLIPPINGS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getProgramNumber(), PROGRAMNUMBER_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getPsiControl(), PSICONTROL_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getTimecodeSource(), TIMECODESOURCE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(inputTemplate.getVideoSelector(), VIDEOSELECTOR_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { void removeVertexLabel(VertexLabel vertexLabel, boolean preserveData) { getTopology().lock(); String fn = this.name + "." + VERTEX_PREFIX + vertexLabel.getName(); if (!uncommittedRemovedVertexLabels.contains(fn)) { uncommittedRemovedVertexLabels.add(fn); TopologyManager.removeVertexLabel(this.sqlgGraph, vertexLabel); for (EdgeRole er : vertexLabel.getOutEdgeRoles().values()) { er.remove(preserveData); } for (EdgeRole er : vertexLabel.getInEdgeRoles().values()) { er.remove(preserveData); } if (!preserveData) { vertexLabel.delete(); } getTopology().fire(vertexLabel, "", TopologyChangeAction.DELETE); } } }
public class class_name { void removeVertexLabel(VertexLabel vertexLabel, boolean preserveData) { getTopology().lock(); String fn = this.name + "." + VERTEX_PREFIX + vertexLabel.getName(); if (!uncommittedRemovedVertexLabels.contains(fn)) { uncommittedRemovedVertexLabels.add(fn); // depends on control dependency: [if], data = [none] TopologyManager.removeVertexLabel(this.sqlgGraph, vertexLabel); // depends on control dependency: [if], data = [none] for (EdgeRole er : vertexLabel.getOutEdgeRoles().values()) { er.remove(preserveData); // depends on control dependency: [for], data = [er] } for (EdgeRole er : vertexLabel.getInEdgeRoles().values()) { er.remove(preserveData); // depends on control dependency: [for], data = [er] } if (!preserveData) { vertexLabel.delete(); // depends on control dependency: [if], data = [none] } getTopology().fire(vertexLabel, "", TopologyChangeAction.DELETE); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public boolean isChannelDeepMemberOf(String fname, String groupKey) { final IEntityGroup distinguishedGroup = GroupService.findGroup(groupKey); if (distinguishedGroup == null) { if (this.logger.isDebugEnabled()) { this.logger.debug("No group found for key '" + groupKey + "'"); } return false; } final IPortletDefinition portletDefinition; try { portletDefinition = this.portletDefinitionRegistry.getPortletDefinitionByFname(fname); } catch (Exception e) { this.logger.warn( "Caught exception while retrieving portlet definition for fname '" + fname + "'", e); return false; } if (portletDefinition == null) { if (this.logger.isDebugEnabled()) { this.logger.debug("No portlet found for key '" + fname + "'"); } return false; } final String portletId = portletDefinition.getPortletDefinitionId().getStringId(); final IEntity entity = GroupService.getEntity(portletId, IPortletDefinition.class); if (entity == null) { if (this.logger.isDebugEnabled()) { this.logger.debug("No portlet found for id '" + portletId + "'"); } return false; } return distinguishedGroup.deepContains(entity); } }
public class class_name { @Override public boolean isChannelDeepMemberOf(String fname, String groupKey) { final IEntityGroup distinguishedGroup = GroupService.findGroup(groupKey); if (distinguishedGroup == null) { if (this.logger.isDebugEnabled()) { this.logger.debug("No group found for key '" + groupKey + "'"); // depends on control dependency: [if], data = [none] } return false; // depends on control dependency: [if], data = [none] } final IPortletDefinition portletDefinition; try { portletDefinition = this.portletDefinitionRegistry.getPortletDefinitionByFname(fname); // depends on control dependency: [try], data = [none] } catch (Exception e) { this.logger.warn( "Caught exception while retrieving portlet definition for fname '" + fname + "'", e); return false; } // depends on control dependency: [catch], data = [none] if (portletDefinition == null) { if (this.logger.isDebugEnabled()) { this.logger.debug("No portlet found for key '" + fname + "'"); // depends on control dependency: [if], data = [none] } return false; // depends on control dependency: [if], data = [none] } final String portletId = portletDefinition.getPortletDefinitionId().getStringId(); final IEntity entity = GroupService.getEntity(portletId, IPortletDefinition.class); if (entity == null) { if (this.logger.isDebugEnabled()) { this.logger.debug("No portlet found for id '" + portletId + "'"); // depends on control dependency: [if], data = [none] } return false; // depends on control dependency: [if], data = [none] } return distinguishedGroup.deepContains(entity); } }
public class class_name { public int deleteAll() { int count = 0; for (String geoPackage : propertiesMap.keySet()) { if (deleteAll(geoPackage)) { count++; } } return count; } }
public class class_name { public int deleteAll() { int count = 0; for (String geoPackage : propertiesMap.keySet()) { if (deleteAll(geoPackage)) { count++; // depends on control dependency: [if], data = [none] } } return count; } }
public class class_name { private void discoverMessageType(ProtobufValueWrapper valueWrapper) { try { ProtobufParser.INSTANCE.parse(new WrappedMessageTagHandler(valueWrapper, serializationContext), wrapperDescriptor, valueWrapper.getBinary()); } catch (IOException e) { throw new CacheException(e); } } }
public class class_name { private void discoverMessageType(ProtobufValueWrapper valueWrapper) { try { ProtobufParser.INSTANCE.parse(new WrappedMessageTagHandler(valueWrapper, serializationContext), wrapperDescriptor, valueWrapper.getBinary()); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new CacheException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public int nextOrdinal() { if(offset >>> 3 == reader.length()) return NO_MORE_ORDINALS; skipToNextPopulatedByte(); while(moreBytesToRead()) { if(testCurrentBit()) { return offset++; } offset++; } return NO_MORE_ORDINALS; } }
public class class_name { @Override public int nextOrdinal() { if(offset >>> 3 == reader.length()) return NO_MORE_ORDINALS; skipToNextPopulatedByte(); while(moreBytesToRead()) { if(testCurrentBit()) { return offset++; // depends on control dependency: [if], data = [none] } offset++; // depends on control dependency: [while], data = [none] } return NO_MORE_ORDINALS; } }
public class class_name { public Tuple get(RowKey key) { AssociationOperation result = currentState.get( key ); if ( result == null ) { return cleared ? null : snapshot.get( key ); } else if ( result.getType() == REMOVE ) { return null; } return result.getValue(); } }
public class class_name { public Tuple get(RowKey key) { AssociationOperation result = currentState.get( key ); if ( result == null ) { return cleared ? null : snapshot.get( key ); // depends on control dependency: [if], data = [none] } else if ( result.getType() == REMOVE ) { return null; // depends on control dependency: [if], data = [none] } return result.getValue(); } }
public class class_name { public void marshall(RecoveryPointByResource recoveryPointByResource, ProtocolMarshaller protocolMarshaller) { if (recoveryPointByResource == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(recoveryPointByResource.getRecoveryPointArn(), RECOVERYPOINTARN_BINDING); protocolMarshaller.marshall(recoveryPointByResource.getCreationDate(), CREATIONDATE_BINDING); protocolMarshaller.marshall(recoveryPointByResource.getStatus(), STATUS_BINDING); protocolMarshaller.marshall(recoveryPointByResource.getEncryptionKeyArn(), ENCRYPTIONKEYARN_BINDING); protocolMarshaller.marshall(recoveryPointByResource.getBackupSizeBytes(), BACKUPSIZEBYTES_BINDING); protocolMarshaller.marshall(recoveryPointByResource.getBackupVaultName(), BACKUPVAULTNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(RecoveryPointByResource recoveryPointByResource, ProtocolMarshaller protocolMarshaller) { if (recoveryPointByResource == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(recoveryPointByResource.getRecoveryPointArn(), RECOVERYPOINTARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(recoveryPointByResource.getCreationDate(), CREATIONDATE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(recoveryPointByResource.getStatus(), STATUS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(recoveryPointByResource.getEncryptionKeyArn(), ENCRYPTIONKEYARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(recoveryPointByResource.getBackupSizeBytes(), BACKUPSIZEBYTES_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(recoveryPointByResource.getBackupVaultName(), BACKUPVAULTNAME_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public final void mEscapeSequence() throws RecognitionException { try { // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1353:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | UnicodeEscape | OctalEscape ) int alt24=3; int LA24_0 = input.LA(1); if ( (LA24_0=='\\') ) { switch ( input.LA(2) ) { case '\"': case '\'': case '\\': case 'b': case 'f': case 'n': case 'r': case 't': { alt24=1; } break; case 'u': { alt24=2; } break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { alt24=3; } break; default: int nvaeMark = input.mark(); try { input.consume(); NoViableAltException nvae = new NoViableAltException("", 24, 1, input); throw nvae; } finally { input.rewind(nvaeMark); } } } else { NoViableAltException nvae = new NoViableAltException("", 24, 0, input); throw nvae; } switch (alt24) { case 1 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1353:9: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) { match('\\'); if ( input.LA(1)=='\"'||input.LA(1)=='\''||input.LA(1)=='\\'||input.LA(1)=='b'||input.LA(1)=='f'||input.LA(1)=='n'||input.LA(1)=='r'||input.LA(1)=='t' ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse; } } break; case 2 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1354:9: UnicodeEscape { mUnicodeEscape(); } break; case 3 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1355:9: OctalEscape { mOctalEscape(); } break; } } finally { // do for sure before leaving } } }
public class class_name { public final void mEscapeSequence() throws RecognitionException { try { // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1353:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | UnicodeEscape | OctalEscape ) int alt24=3; int LA24_0 = input.LA(1); if ( (LA24_0=='\\') ) { switch ( input.LA(2) ) { case '\"': case '\'': case '\\': case 'b': case 'f': case 'n': case 'r': case 't': { alt24=1; } break; case 'u': { alt24=2; } break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': { alt24=3; } break; default: int nvaeMark = input.mark(); try { input.consume(); // depends on control dependency: [try], data = [none] NoViableAltException nvae = new NoViableAltException("", 24, 1, input); throw nvae; } finally { input.rewind(nvaeMark); } } } else { NoViableAltException nvae = new NoViableAltException("", 24, 0, input); throw nvae; } switch (alt24) { case 1 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1353:9: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) { match('\\'); if ( input.LA(1)=='\"'||input.LA(1)=='\''||input.LA(1)=='\\'||input.LA(1)=='b'||input.LA(1)=='f'||input.LA(1)=='n'||input.LA(1)=='r'||input.LA(1)=='t' ) { input.consume(); // depends on control dependency: [if], data = [none] } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); // depends on control dependency: [if], data = [none] throw mse; } } break; case 2 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1354:9: UnicodeEscape { mUnicodeEscape(); } break; case 3 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1355:9: OctalEscape { mOctalEscape(); } break; } } finally { // do for sure before leaving } } }
public class class_name { private InputStream openCustomSortOrderFile() throws FileNotFoundException { InputStream inputStream; try { inputStream = new FileInputStream(customSortOrderFile); } catch (FileNotFoundException ex) { // try classpath try { URL resource = this.getClass().getClassLoader().getResource(customSortOrderFile); if (resource == null) { throw new IOException("Cannot find resource"); } inputStream = resource.openStream(); } catch (IOException e1) { throw new FileNotFoundException(String.format("Could not find %s or %s in classpath", new File( customSortOrderFile).getAbsolutePath(), customSortOrderFile)); } } return inputStream; } }
public class class_name { private InputStream openCustomSortOrderFile() throws FileNotFoundException { InputStream inputStream; try { inputStream = new FileInputStream(customSortOrderFile); } catch (FileNotFoundException ex) { // try classpath try { URL resource = this.getClass().getClassLoader().getResource(customSortOrderFile); if (resource == null) { throw new IOException("Cannot find resource"); } inputStream = resource.openStream(); // depends on control dependency: [try], data = [none] } catch (IOException e1) { throw new FileNotFoundException(String.format("Could not find %s or %s in classpath", new File( customSortOrderFile).getAbsolutePath(), customSortOrderFile)); } // depends on control dependency: [catch], data = [none] } return inputStream; } }
public class class_name { protected String[] getArgNames() { if (args == null) { return new String[]{}; } return args.keySet().toArray(new String[args.keySet().size()]); } }
public class class_name { protected String[] getArgNames() { if (args == null) { return new String[]{}; // depends on control dependency: [if], data = [none] } return args.keySet().toArray(new String[args.keySet().size()]); } }
public class class_name { public org.grails.datastore.mapping.query.api.Criteria geProperty(String propertyName, String otherPropertyName) { if (!validateSimpleExpression()) { throwRuntimeException(new IllegalArgumentException("Call to [geProperty] with propertyName [" + propertyName + "] and other property name [" + otherPropertyName + "] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); addToCriteria(Restrictions.geProperty(propertyName, otherPropertyName)); return this; } }
public class class_name { public org.grails.datastore.mapping.query.api.Criteria geProperty(String propertyName, String otherPropertyName) { if (!validateSimpleExpression()) { throwRuntimeException(new IllegalArgumentException("Call to [geProperty] with propertyName [" + propertyName + "] and other property name [" + otherPropertyName + "] not allowed here.")); // depends on control dependency: [if], data = [none] } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); addToCriteria(Restrictions.geProperty(propertyName, otherPropertyName)); return this; } }
public class class_name { private void fireMultipleLinePropertyParsedEvent(String name, String[] value) { MultipleLinePropertyParsedEvent _event = new MultipleLinePropertyParsedEvent(name, value); for (PropertiesParsingListener _listener : getListeners()) { _listener.onMultipleLinePropertyParsed(_event); } } }
public class class_name { private void fireMultipleLinePropertyParsedEvent(String name, String[] value) { MultipleLinePropertyParsedEvent _event = new MultipleLinePropertyParsedEvent(name, value); for (PropertiesParsingListener _listener : getListeners()) { _listener.onMultipleLinePropertyParsed(_event); // depends on control dependency: [for], data = [_listener] } } }
public class class_name { public PollingResult poll( TaskListener listener ) { SCM scm = getScm(); if (scm==null) { listener.getLogger().println(Messages.AbstractProject_NoSCM()); return NO_CHANGES; } if (!isBuildable()) { listener.getLogger().println(Messages.AbstractProject_Disabled()); return NO_CHANGES; } SCMDecisionHandler veto = SCMDecisionHandler.firstShouldPollVeto(this); if (veto != null) { listener.getLogger().println(Messages.AbstractProject_PollingVetoed(veto)); return NO_CHANGES; } R lb = getLastBuild(); if (lb==null) { listener.getLogger().println(Messages.AbstractProject_NoBuilds()); return isInQueue() ? NO_CHANGES : BUILD_NOW; } if (pollingBaseline==null) { R success = getLastSuccessfulBuild(); // if we have a persisted baseline, we'll find it by this for (R r=lb; r!=null; r=r.getPreviousBuild()) { SCMRevisionState s = r.getAction(SCMRevisionState.class); if (s!=null) { pollingBaseline = s; break; } if (r==success) break; // searched far enough } // NOTE-NO-BASELINE: // if we don't have baseline yet, it means the data is built by old Hudson that doesn't set the baseline // as action, so we need to compute it. This happens later. } try { SCMPollListener.fireBeforePolling(this, listener); PollingResult r = _poll(listener, scm); SCMPollListener.firePollingSuccess(this,listener, r); return r; } catch (AbortException e) { listener.getLogger().println(e.getMessage()); listener.fatalError(Messages.AbstractProject_Aborted()); LOGGER.log(Level.FINE, "Polling "+this+" aborted",e); SCMPollListener.firePollingFailed(this, listener,e); return NO_CHANGES; } catch (IOException e) { Functions.printStackTrace(e, listener.fatalError(e.getMessage())); SCMPollListener.firePollingFailed(this, listener,e); return NO_CHANGES; } catch (InterruptedException e) { Functions.printStackTrace(e, listener.fatalError(Messages.AbstractProject_PollingABorted())); SCMPollListener.firePollingFailed(this, listener,e); return NO_CHANGES; } catch (RuntimeException e) { SCMPollListener.firePollingFailed(this, listener,e); throw e; } catch (Error e) { SCMPollListener.firePollingFailed(this, listener,e); throw e; } } }
public class class_name { public PollingResult poll( TaskListener listener ) { SCM scm = getScm(); if (scm==null) { listener.getLogger().println(Messages.AbstractProject_NoSCM()); // depends on control dependency: [if], data = [none] return NO_CHANGES; // depends on control dependency: [if], data = [none] } if (!isBuildable()) { listener.getLogger().println(Messages.AbstractProject_Disabled()); // depends on control dependency: [if], data = [none] return NO_CHANGES; // depends on control dependency: [if], data = [none] } SCMDecisionHandler veto = SCMDecisionHandler.firstShouldPollVeto(this); if (veto != null) { listener.getLogger().println(Messages.AbstractProject_PollingVetoed(veto)); // depends on control dependency: [if], data = [(veto] return NO_CHANGES; // depends on control dependency: [if], data = [none] } R lb = getLastBuild(); if (lb==null) { listener.getLogger().println(Messages.AbstractProject_NoBuilds()); // depends on control dependency: [if], data = [none] return isInQueue() ? NO_CHANGES : BUILD_NOW; // depends on control dependency: [if], data = [none] } if (pollingBaseline==null) { R success = getLastSuccessfulBuild(); // if we have a persisted baseline, we'll find it by this for (R r=lb; r!=null; r=r.getPreviousBuild()) { SCMRevisionState s = r.getAction(SCMRevisionState.class); if (s!=null) { pollingBaseline = s; // depends on control dependency: [if], data = [none] break; } if (r==success) break; // searched far enough } // NOTE-NO-BASELINE: // if we don't have baseline yet, it means the data is built by old Hudson that doesn't set the baseline // as action, so we need to compute it. This happens later. } try { SCMPollListener.fireBeforePolling(this, listener); // depends on control dependency: [try], data = [none] PollingResult r = _poll(listener, scm); SCMPollListener.firePollingSuccess(this,listener, r); // depends on control dependency: [try], data = [none] return r; // depends on control dependency: [try], data = [none] } catch (AbortException e) { listener.getLogger().println(e.getMessage()); listener.fatalError(Messages.AbstractProject_Aborted()); LOGGER.log(Level.FINE, "Polling "+this+" aborted",e); SCMPollListener.firePollingFailed(this, listener,e); return NO_CHANGES; } catch (IOException e) { // depends on control dependency: [catch], data = [none] Functions.printStackTrace(e, listener.fatalError(e.getMessage())); SCMPollListener.firePollingFailed(this, listener,e); return NO_CHANGES; } catch (InterruptedException e) { // depends on control dependency: [catch], data = [none] Functions.printStackTrace(e, listener.fatalError(Messages.AbstractProject_PollingABorted())); SCMPollListener.firePollingFailed(this, listener,e); return NO_CHANGES; } catch (RuntimeException e) { // depends on control dependency: [catch], data = [none] SCMPollListener.firePollingFailed(this, listener,e); throw e; } catch (Error e) { // depends on control dependency: [catch], data = [none] SCMPollListener.firePollingFailed(this, listener,e); throw e; } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public int countByCWI_CPIU(long commerceWarehouseId, String CPInstanceUuid) { FinderPath finderPath = FINDER_PATH_COUNT_BY_CWI_CPIU; Object[] finderArgs = new Object[] { commerceWarehouseId, CPInstanceUuid }; Long count = (Long)finderCache.getResult(finderPath, finderArgs, this); if (count == null) { StringBundler query = new StringBundler(3); query.append(_SQL_COUNT_COMMERCEWAREHOUSEITEM_WHERE); query.append(_FINDER_COLUMN_CWI_CPIU_COMMERCEWAREHOUSEID_2); boolean bindCPInstanceUuid = false; if (CPInstanceUuid == null) { query.append(_FINDER_COLUMN_CWI_CPIU_CPINSTANCEUUID_1); } else if (CPInstanceUuid.equals("")) { query.append(_FINDER_COLUMN_CWI_CPIU_CPINSTANCEUUID_3); } else { bindCPInstanceUuid = true; query.append(_FINDER_COLUMN_CWI_CPIU_CPINSTANCEUUID_2); } String sql = query.toString(); Session session = null; try { session = openSession(); Query q = session.createQuery(sql); QueryPos qPos = QueryPos.getInstance(q); qPos.add(commerceWarehouseId); if (bindCPInstanceUuid) { qPos.add(CPInstanceUuid); } count = (Long)q.uniqueResult(); finderCache.putResult(finderPath, finderArgs, count); } catch (Exception e) { finderCache.removeResult(finderPath, finderArgs); throw processException(e); } finally { closeSession(session); } } return count.intValue(); } }
public class class_name { @Override public int countByCWI_CPIU(long commerceWarehouseId, String CPInstanceUuid) { FinderPath finderPath = FINDER_PATH_COUNT_BY_CWI_CPIU; Object[] finderArgs = new Object[] { commerceWarehouseId, CPInstanceUuid }; Long count = (Long)finderCache.getResult(finderPath, finderArgs, this); if (count == null) { StringBundler query = new StringBundler(3); query.append(_SQL_COUNT_COMMERCEWAREHOUSEITEM_WHERE); // depends on control dependency: [if], data = [none] query.append(_FINDER_COLUMN_CWI_CPIU_COMMERCEWAREHOUSEID_2); // depends on control dependency: [if], data = [none] boolean bindCPInstanceUuid = false; if (CPInstanceUuid == null) { query.append(_FINDER_COLUMN_CWI_CPIU_CPINSTANCEUUID_1); // depends on control dependency: [if], data = [none] } else if (CPInstanceUuid.equals("")) { query.append(_FINDER_COLUMN_CWI_CPIU_CPINSTANCEUUID_3); // depends on control dependency: [if], data = [none] } else { bindCPInstanceUuid = true; // depends on control dependency: [if], data = [none] query.append(_FINDER_COLUMN_CWI_CPIU_CPINSTANCEUUID_2); // depends on control dependency: [if], data = [none] } String sql = query.toString(); Session session = null; try { session = openSession(); // depends on control dependency: [try], data = [none] Query q = session.createQuery(sql); QueryPos qPos = QueryPos.getInstance(q); qPos.add(commerceWarehouseId); // depends on control dependency: [try], data = [none] if (bindCPInstanceUuid) { qPos.add(CPInstanceUuid); // depends on control dependency: [if], data = [none] } count = (Long)q.uniqueResult(); // depends on control dependency: [try], data = [none] finderCache.putResult(finderPath, finderArgs, count); // depends on control dependency: [try], data = [none] } catch (Exception e) { finderCache.removeResult(finderPath, finderArgs); throw processException(e); } // depends on control dependency: [catch], data = [none] finally { closeSession(session); } } return count.intValue(); } }
public class class_name { public String unCapitalize(final Object target) { if (target == null) { return null; } return StringUtils.unCapitalize(target); } }
public class class_name { public String unCapitalize(final Object target) { if (target == null) { return null; // depends on control dependency: [if], data = [none] } return StringUtils.unCapitalize(target); } }
public class class_name { private void shortcutChanged(String shortcut, boolean unbind) { Set<String> bindings = new HashSet<>(); bindings.add(shortcut); for (BaseUIComponent component : componentBindings) { CommandUtil.updateShortcuts(component, bindings, unbind); } } }
public class class_name { private void shortcutChanged(String shortcut, boolean unbind) { Set<String> bindings = new HashSet<>(); bindings.add(shortcut); for (BaseUIComponent component : componentBindings) { CommandUtil.updateShortcuts(component, bindings, unbind); // depends on control dependency: [for], data = [component] } } }
public class class_name { protected float getPropertyBoost(InternalQName propertyName) { if (indexingConfig == null) { return DEFAULT_BOOST; } else { return indexingConfig.getPropertyBoost(node, propertyName); } } }
public class class_name { protected float getPropertyBoost(InternalQName propertyName) { if (indexingConfig == null) { return DEFAULT_BOOST; // depends on control dependency: [if], data = [none] } else { return indexingConfig.getPropertyBoost(node, propertyName); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String toPrettyPrintedString(final JSONObject jsonObject) { ArgumentChecker.notNull(jsonObject, "jsonObject"); try { return jsonObject.toString(JSON_INDENT) + LINE_SEPARATOR; } catch (JSONException ex) { s_logger.error("Problem converting JSONObject to String", ex); throw new QuandlRuntimeException("Problem converting JSONObject to String", ex); } } }
public class class_name { public static String toPrettyPrintedString(final JSONObject jsonObject) { ArgumentChecker.notNull(jsonObject, "jsonObject"); try { return jsonObject.toString(JSON_INDENT) + LINE_SEPARATOR; // depends on control dependency: [try], data = [none] } catch (JSONException ex) { s_logger.error("Problem converting JSONObject to String", ex); throw new QuandlRuntimeException("Problem converting JSONObject to String", ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public final double mmToDocUom(final double pValue, final EUnitOfMeasure pUom) { if (pUom.equals(EUnitOfMeasure.INCH)) { return pValue / 25.4; } return pValue; } }
public class class_name { public final double mmToDocUom(final double pValue, final EUnitOfMeasure pUom) { if (pUom.equals(EUnitOfMeasure.INCH)) { return pValue / 25.4; // depends on control dependency: [if], data = [none] } return pValue; } }
public class class_name { public String getMappedFormatterConfiguration(String id) { CmsUUID resultId = m_formatterConfigMap.get(new CmsUUID(id)); if (resultId == null) { return null; } return resultId.toString(); } }
public class class_name { public String getMappedFormatterConfiguration(String id) { CmsUUID resultId = m_formatterConfigMap.get(new CmsUUID(id)); if (resultId == null) { return null; // depends on control dependency: [if], data = [none] } return resultId.toString(); } }
public class class_name { public com.google.protobuf.ByteString getUfsUriBytes() { java.lang.Object ref = ufsUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); ufsUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } }
public class class_name { public com.google.protobuf.ByteString getUfsUriBytes() { java.lang.Object ref = ufsUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); ufsUri_ = b; // depends on control dependency: [if], data = [none] return b; // depends on control dependency: [if], data = [none] } else { return (com.google.protobuf.ByteString) ref; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String extractKeyHead(String key, String delimeter) { int index = key.indexOf(delimeter); if (index == -1) { return key; } String result = key.substring(0, index); return result; } }
public class class_name { public static String extractKeyHead(String key, String delimeter) { int index = key.indexOf(delimeter); if (index == -1) { return key; // depends on control dependency: [if], data = [none] } String result = key.substring(0, index); return result; } }
public class class_name { public ISynchronizationPoint<IOException> start(String rootNamespaceURI, String rootLocalName, Map<String, String> namespaces) { if (includeXMLDeclaration) { writer.write(XML_DECLARATION_START); writer.write(output.getEncoding().name()); writer.write(XML_DECLARATION_END); } writer.write('<'); if (rootNamespaceURI != null) { String ns = namespaces.get(rootNamespaceURI); if (ns != null && ns.length() > 0) { writer.write(ns); writer.write(':'); } } ISynchronizationPoint<IOException> result = writer.write(rootLocalName); if (namespaces != null && !namespaces.isEmpty()) { for (Map.Entry<String, String> ns : namespaces.entrySet()) { writer.write(XMLNS); if (ns.getValue().length() > 0) { writer.write(':'); writer.write(ns.getValue()); } writer.write(ATTRIBUTE_EQUALS); writer.write(escape(ns.getKey())); result = writer.write('"'); } } Context ctx = new Context(); ctx.namespaces = new HashMap<>(); if (namespaces != null) ctx.namespaces.putAll(namespaces); ctx.namespaceURI = rootNamespaceURI; ctx.localName = rootLocalName; ctx.open = true; context.addFirst(ctx); return result; } }
public class class_name { public ISynchronizationPoint<IOException> start(String rootNamespaceURI, String rootLocalName, Map<String, String> namespaces) { if (includeXMLDeclaration) { writer.write(XML_DECLARATION_START); // depends on control dependency: [if], data = [none] writer.write(output.getEncoding().name()); // depends on control dependency: [if], data = [none] writer.write(XML_DECLARATION_END); // depends on control dependency: [if], data = [none] } writer.write('<'); if (rootNamespaceURI != null) { String ns = namespaces.get(rootNamespaceURI); if (ns != null && ns.length() > 0) { writer.write(ns); // depends on control dependency: [if], data = [(ns] writer.write(':'); // depends on control dependency: [if], data = [none] } } ISynchronizationPoint<IOException> result = writer.write(rootLocalName); if (namespaces != null && !namespaces.isEmpty()) { for (Map.Entry<String, String> ns : namespaces.entrySet()) { writer.write(XMLNS); // depends on control dependency: [for], data = [none] if (ns.getValue().length() > 0) { writer.write(':'); // depends on control dependency: [if], data = [none] writer.write(ns.getValue()); // depends on control dependency: [if], data = [none] } writer.write(ATTRIBUTE_EQUALS); // depends on control dependency: [for], data = [none] writer.write(escape(ns.getKey())); // depends on control dependency: [for], data = [ns] result = writer.write('"'); // depends on control dependency: [for], data = [none] } } Context ctx = new Context(); ctx.namespaces = new HashMap<>(); if (namespaces != null) ctx.namespaces.putAll(namespaces); ctx.namespaceURI = rootNamespaceURI; ctx.localName = rootLocalName; ctx.open = true; context.addFirst(ctx); return result; } }
public class class_name { public void marshall(DeleteLicenseConfigurationRequest deleteLicenseConfigurationRequest, ProtocolMarshaller protocolMarshaller) { if (deleteLicenseConfigurationRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteLicenseConfigurationRequest.getLicenseConfigurationArn(), LICENSECONFIGURATIONARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DeleteLicenseConfigurationRequest deleteLicenseConfigurationRequest, ProtocolMarshaller protocolMarshaller) { if (deleteLicenseConfigurationRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteLicenseConfigurationRequest.getLicenseConfigurationArn(), LICENSECONFIGURATIONARN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void resetXAResourceTimeout() { // Do a reset of the underlying XAResource timeout if (!(xaResource instanceof LocalXAResource) && xaResourceTimeout > 0) { try { xaResource.setTransactionTimeout(xaResourceTimeout); } catch (XAException e) { log.debugf(e, "Exception during resetXAResourceTimeout for %s", this); } } } }
public class class_name { private void resetXAResourceTimeout() { // Do a reset of the underlying XAResource timeout if (!(xaResource instanceof LocalXAResource) && xaResourceTimeout > 0) { try { xaResource.setTransactionTimeout(xaResourceTimeout); // depends on control dependency: [try], data = [none] } catch (XAException e) { log.debugf(e, "Exception during resetXAResourceTimeout for %s", this); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public static int select(long w, int j) { int part1 = (int) (w & 0xFFFFFFFF); int wfirsthalf = Integer.bitCount(part1); if (wfirsthalf > j) { return select(part1, j); } else { return select((int) (w >>> 32), j - wfirsthalf) + 32; } } }
public class class_name { public static int select(long w, int j) { int part1 = (int) (w & 0xFFFFFFFF); int wfirsthalf = Integer.bitCount(part1); if (wfirsthalf > j) { return select(part1, j); // depends on control dependency: [if], data = [j)] } else { return select((int) (w >>> 32), j - wfirsthalf) + 32; // depends on control dependency: [if], data = [none] } } }
public class class_name { private void checkIndex() throws IOException { boolean isUpToDate = true; if (!isUpToDate()) { closeFile(); isUpToDate = false; } if (zipRandomFile != null || isUpToDate) { lastReferenceTimeStamp = System.currentTimeMillis(); return; } hasPopulatedData = true; if (readIndex()) { lastReferenceTimeStamp = System.currentTimeMillis(); return; } directories = Collections.<RelativeDirectory, DirectoryEntry>emptyMap(); allDirs = Collections.<RelativeDirectory>emptySet(); try { openFile(); long totalLength = zipRandomFile.length(); ZipDirectory directory = new ZipDirectory(zipRandomFile, 0L, totalLength, this); directory.buildIndex(); } finally { if (zipRandomFile != null) { closeFile(); } } lastReferenceTimeStamp = System.currentTimeMillis(); } }
public class class_name { private void checkIndex() throws IOException { boolean isUpToDate = true; if (!isUpToDate()) { closeFile(); isUpToDate = false; } if (zipRandomFile != null || isUpToDate) { lastReferenceTimeStamp = System.currentTimeMillis(); return; } hasPopulatedData = true; if (readIndex()) { lastReferenceTimeStamp = System.currentTimeMillis(); return; } directories = Collections.<RelativeDirectory, DirectoryEntry>emptyMap(); allDirs = Collections.<RelativeDirectory>emptySet(); try { openFile(); long totalLength = zipRandomFile.length(); ZipDirectory directory = new ZipDirectory(zipRandomFile, 0L, totalLength, this); directory.buildIndex(); } finally { if (zipRandomFile != null) { closeFile(); // depends on control dependency: [if], data = [none] } } lastReferenceTimeStamp = System.currentTimeMillis(); } }
public class class_name { private String generateKey(String key, HashMap<String, String[]>[] queryVariables) { StringBuilder newKey = new StringBuilder(key); newKey.append(" -"); for (int q = 0; q < queryVariables.length; q++) { if (queryVariables[q] != null && queryVariables[q].size() > 0) { for (String name : queryVariables[q].keySet()) { newKey.append(" q" + q + ":$" + name + "="); if (queryVariables[q].get(name) != null && queryVariables[q].get(name).length == 1) { newKey.append("'" + queryVariables[q].get(name)[0] .replace("\\", "\\\\").replace(",", "\\,") + "'"); } else { newKey.append("-"); } } } } return newKey.toString(); } }
public class class_name { private String generateKey(String key, HashMap<String, String[]>[] queryVariables) { StringBuilder newKey = new StringBuilder(key); newKey.append(" -"); for (int q = 0; q < queryVariables.length; q++) { if (queryVariables[q] != null && queryVariables[q].size() > 0) { for (String name : queryVariables[q].keySet()) { newKey.append(" q" + q + ":$" + name + "="); // depends on control dependency: [for], data = [name] if (queryVariables[q].get(name) != null && queryVariables[q].get(name).length == 1) { newKey.append("'" + queryVariables[q].get(name)[0] .replace("\\", "\\\\").replace(",", "\\,") + "'"); // depends on control dependency: [if], data = [none] } else { newKey.append("-"); // depends on control dependency: [if], data = [none] } } } } return newKey.toString(); } }
public class class_name { public boolean removeBusHalt(String name) { Iterator<BusItineraryHalt> iterator = this.validHalts.iterator(); BusItineraryHalt bushalt; int i = 0; while (iterator.hasNext()) { bushalt = iterator.next(); if (name.equals(bushalt.getName())) { return removeBusHalt(i); } ++i; } iterator = this.invalidHalts.iterator(); i = 0; while (iterator.hasNext()) { bushalt = iterator.next(); if (name.equals(bushalt.getName())) { return removeBusHalt(i); } ++i; } return false; } }
public class class_name { public boolean removeBusHalt(String name) { Iterator<BusItineraryHalt> iterator = this.validHalts.iterator(); BusItineraryHalt bushalt; int i = 0; while (iterator.hasNext()) { bushalt = iterator.next(); // depends on control dependency: [while], data = [none] if (name.equals(bushalt.getName())) { return removeBusHalt(i); // depends on control dependency: [if], data = [none] } ++i; // depends on control dependency: [while], data = [none] } iterator = this.invalidHalts.iterator(); i = 0; while (iterator.hasNext()) { bushalt = iterator.next(); // depends on control dependency: [while], data = [none] if (name.equals(bushalt.getName())) { return removeBusHalt(i); // depends on control dependency: [if], data = [none] } ++i; // depends on control dependency: [while], data = [none] } return false; } }
public class class_name { private void recover(ReadableLogRecord logRecord) throws LogCorruptedException, InternalLogException { if (tc.isEntryEnabled()) Tr.entry(tc, "recover", new Object[] { this, logRecord }); // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing. if (_recLog.failed()) { if (tc.isEntryEnabled()) Tr.exit(tc, "recover", this); throw new InternalLogException(null); } try { // Read the record type field. short recordType = logRecord.getShort(); if (recordType == RecoverableUnitImpl.RECORDTYPEDELETED) { // This record is a marker to indicate that the recoverable unit was deleted at this point // in its lifecycle (ie its lifecycle in relation to the data contained in the recovery // log before and after the deletion record). In order to support re-use of a recoverable // unit identity before a keypoint operaiton occurs and old data is deleted from the recovery // log, we must delete the existing recoverable unit. This ensures that it does not get // confused with later instance that uses the same identity value. if (tc.isDebugEnabled()) Tr.debug(tc, "This is a DELETION record. Deleting RecoverableUnit from map"); _recLog.removeRecoverableUnitMapEntries(_identity); // PI68664 - if the RU being deleted contains any RU Sections then the DataItem recovery will have // caused payload to be added which increments the _totalDataSize of the log. // We must set this back down here. this.removeDuringLogRead(); } else { // This record is not a deletion record. It contains new data to be recovered for the recoverable // unit. Decode the record accordingly. if (tc.isDebugEnabled()) Tr.debug(tc, "This is a NORMAL record. Decoding contents"); // Determine the identity of the next section. Ideally, we would decode the entire // recoverable unit section from within the RecoverableUnitSectionImpl class, rather // than decoding part of it here. Unfortunatly, we must determine if this class // already knows about this recoverable unit section and if not create it and place // it into the _recoverableUnitSections map. This means that we must decode both its // identity and 'singleData' flag. int recoverableUnitSectionIdentity = logRecord.getInt(); while (recoverableUnitSectionIdentity != END_OF_SECTIONS) { // This is a real recoverable unit section record and not just the marker // to indicate that there are no further recoverable unit sections stored // within the record. if (tc.isDebugEnabled()) Tr.debug(tc, "Recovering RecoverableUnitSection '" + recoverableUnitSectionIdentity + "'"); // Read the 'record type' field. Currently this is not used but is provided for future compatibility. // It would be used to distinguish between a 'normal' write and a 'delete' write. The latter would be used // to remove the recoverable unit section from the recovery log. Ignore this field for now. logRecord.getShort(); // Determine if this section can hold multiple data items. final boolean singleData = logRecord.getBoolean(); if (tc.isDebugEnabled()) { if (singleData) { Tr.debug(tc, "RecoverableUnitSection can hold only a single data item"); } else { Tr.debug(tc, "RecoverableUnitSection can hold multiple data items"); } } // Determine if the identity has been encountered before and either lookup or create // the corrisponding recoverable unit section. RecoverableUnitSectionImpl recoverableUnitSection = (RecoverableUnitSectionImpl) _recoverableUnitSections.get(new Integer(recoverableUnitSectionIdentity)); if (recoverableUnitSection == null) { if (tc.isDebugEnabled()) Tr.debug(tc, "RecoverableUnitSection " + recoverableUnitSectionIdentity + " has not been encountered before. Creating."); try { recoverableUnitSection = (RecoverableUnitSectionImpl) createSection(recoverableUnitSectionIdentity, singleData); } catch (RecoverableUnitSectionExistsException exc) { // This exception should not be generated in practice as we are in the single threaded // recovery process and have already checked that the RecoverableUnitSection does not // exist. If this exception was actually generated then ignore it - it simply indicates // that the creation has failed as the section has already been created. Given that // creation is the goal, this does not seem to be a problem. FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "713", this); } catch (InternalLogException exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "717", this); if (tc.isDebugEnabled()) Tr.debug(tc, "An unexpected exception occured when attempting to create a new RecoverableUnitSection"); throw exc; // Caught in this method further down. } } else { if (tc.isDebugEnabled()) Tr.debug(tc, "RecoverableUnitSection " + recoverableUnitSectionIdentity + " has been encountered before."); } // Direct the recoverable unit section to recover further information from the log record. recoverableUnitSection.recover(logRecord); // Since this information has been recovered from disk it has been "written to the log". Record this fact // so that any subsequent deletion of the recoverable unit will cause a deletion record to be written // to disk. _storedOnDisk = true; // Retrieve the identity of the next recoverable unit section. This may be the 'END_OF_SECTIONS' // marker and hence indicate that there are no further recoverable unit sections to process // in this record. recoverableUnitSectionIdentity = logRecord.getInt(); } } } catch (LogCorruptedException exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "740", this); if (tc.isDebugEnabled()) Tr.debug(tc, "A LogCorruptedException exception occured reconstructng a RecoverableUnitImpl"); _recLog.markFailed(exc); /* @MD19484C */ if (tc.isEntryEnabled()) Tr.exit(tc, "recover", exc); throw exc; } catch (InternalLogException exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "747", this); if (tc.isDebugEnabled()) Tr.debug(tc, "An InternalLogException exception occured reconstructng a RecoverableUnitImpl"); _recLog.markFailed(exc); /* @MD19484C */ if (tc.isEntryEnabled()) Tr.exit(tc, "recover", exc); throw exc; } catch (Throwable exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "753", this); if (tc.isDebugEnabled()) Tr.debug(tc, "An exception occured reconstructng a RecoverableUnitImpl"); _recLog.markFailed(exc); /* @MD19484C */ if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "InternalLogException"); throw new InternalLogException(exc); } if (tc.isEntryEnabled()) Tr.exit(tc, "recover"); } }
public class class_name { private void recover(ReadableLogRecord logRecord) throws LogCorruptedException, InternalLogException { if (tc.isEntryEnabled()) Tr.entry(tc, "recover", new Object[] { this, logRecord }); // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing. if (_recLog.failed()) { if (tc.isEntryEnabled()) Tr.exit(tc, "recover", this); throw new InternalLogException(null); } try { // Read the record type field. short recordType = logRecord.getShort(); if (recordType == RecoverableUnitImpl.RECORDTYPEDELETED) { // This record is a marker to indicate that the recoverable unit was deleted at this point // in its lifecycle (ie its lifecycle in relation to the data contained in the recovery // log before and after the deletion record). In order to support re-use of a recoverable // unit identity before a keypoint operaiton occurs and old data is deleted from the recovery // log, we must delete the existing recoverable unit. This ensures that it does not get // confused with later instance that uses the same identity value. if (tc.isDebugEnabled()) Tr.debug(tc, "This is a DELETION record. Deleting RecoverableUnit from map"); _recLog.removeRecoverableUnitMapEntries(_identity); // depends on control dependency: [if], data = [none] // PI68664 - if the RU being deleted contains any RU Sections then the DataItem recovery will have // caused payload to be added which increments the _totalDataSize of the log. // We must set this back down here. this.removeDuringLogRead(); // depends on control dependency: [if], data = [none] } else { // This record is not a deletion record. It contains new data to be recovered for the recoverable // unit. Decode the record accordingly. if (tc.isDebugEnabled()) Tr.debug(tc, "This is a NORMAL record. Decoding contents"); // Determine the identity of the next section. Ideally, we would decode the entire // recoverable unit section from within the RecoverableUnitSectionImpl class, rather // than decoding part of it here. Unfortunatly, we must determine if this class // already knows about this recoverable unit section and if not create it and place // it into the _recoverableUnitSections map. This means that we must decode both its // identity and 'singleData' flag. int recoverableUnitSectionIdentity = logRecord.getInt(); while (recoverableUnitSectionIdentity != END_OF_SECTIONS) { // This is a real recoverable unit section record and not just the marker // to indicate that there are no further recoverable unit sections stored // within the record. if (tc.isDebugEnabled()) Tr.debug(tc, "Recovering RecoverableUnitSection '" + recoverableUnitSectionIdentity + "'"); // Read the 'record type' field. Currently this is not used but is provided for future compatibility. // It would be used to distinguish between a 'normal' write and a 'delete' write. The latter would be used // to remove the recoverable unit section from the recovery log. Ignore this field for now. logRecord.getShort(); // depends on control dependency: [while], data = [none] // Determine if this section can hold multiple data items. final boolean singleData = logRecord.getBoolean(); if (tc.isDebugEnabled()) { if (singleData) { Tr.debug(tc, "RecoverableUnitSection can hold only a single data item"); // depends on control dependency: [if], data = [none] } else { Tr.debug(tc, "RecoverableUnitSection can hold multiple data items"); // depends on control dependency: [if], data = [none] } } // Determine if the identity has been encountered before and either lookup or create // the corrisponding recoverable unit section. RecoverableUnitSectionImpl recoverableUnitSection = (RecoverableUnitSectionImpl) _recoverableUnitSections.get(new Integer(recoverableUnitSectionIdentity)); if (recoverableUnitSection == null) { if (tc.isDebugEnabled()) Tr.debug(tc, "RecoverableUnitSection " + recoverableUnitSectionIdentity + " has not been encountered before. Creating."); try { recoverableUnitSection = (RecoverableUnitSectionImpl) createSection(recoverableUnitSectionIdentity, singleData); } catch (RecoverableUnitSectionExistsException exc) // depends on control dependency: [while], data = [none] { // This exception should not be generated in practice as we are in the single threaded // recovery process and have already checked that the RecoverableUnitSection does not // exist. If this exception was actually generated then ignore it - it simply indicates // that the creation has failed as the section has already been created. Given that // creation is the goal, this does not seem to be a problem. FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "713", this); } catch (InternalLogException exc) // depends on control dependency: [while], data = [none] { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "717", this); if (tc.isDebugEnabled()) Tr.debug(tc, "An unexpected exception occured when attempting to create a new RecoverableUnitSection"); throw exc; // Caught in this method further down. } } else // depends on control dependency: [if], data = [none] { if (tc.isDebugEnabled()) Tr.debug(tc, "RecoverableUnitSection " + recoverableUnitSectionIdentity + " has been encountered before."); } // Direct the recoverable unit section to recover further information from the log record. recoverableUnitSection.recover(logRecord); // depends on control dependency: [if], data = [none] // Since this information has been recovered from disk it has been "written to the log". Record this fact // so that any subsequent deletion of the recoverable unit will cause a deletion record to be written // to disk. _storedOnDisk = true; // depends on control dependency: [if], data = [none] // Retrieve the identity of the next recoverable unit section. This may be the 'END_OF_SECTIONS' // marker and hence indicate that there are no further recoverable unit sections to process // in this record. recoverableUnitSectionIdentity = logRecord.getInt(); // depends on control dependency: [if], data = [none] } } } catch (LogCorruptedException exc) { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "740", this); if (tc.isDebugEnabled()) Tr.debug(tc, "A LogCorruptedException exception occured reconstructng a RecoverableUnitImpl"); _recLog.markFailed(exc); /* @MD19484C */ if (tc.isEntryEnabled()) Tr.exit(tc, "recover", exc); throw exc; } catch (InternalLogException exc) // depends on control dependency: [catch], data = [none] { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "747", this); if (tc.isDebugEnabled()) Tr.debug(tc, "An InternalLogException exception occured reconstructng a RecoverableUnitImpl"); _recLog.markFailed(exc); /* @MD19484C */ if (tc.isEntryEnabled()) Tr.exit(tc, "recover", exc); throw exc; } catch (Throwable exc) // depends on control dependency: [catch], data = [none] { FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitImpl.recover", "753", this); if (tc.isDebugEnabled()) Tr.debug(tc, "An exception occured reconstructng a RecoverableUnitImpl"); _recLog.markFailed(exc); /* @MD19484C */ if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "InternalLogException"); throw new InternalLogException(exc); } // depends on control dependency: [catch], data = [none] if (tc.isEntryEnabled()) Tr.exit(tc, "recover"); } }
public class class_name { public static boolean hasSideEffect(ExpressionTree expression) { if (expression == null) { return false; } SideEffectAnalysis analyzer = new SideEffectAnalysis(); expression.accept(analyzer, null); return analyzer.hasSideEffect; } }
public class class_name { public static boolean hasSideEffect(ExpressionTree expression) { if (expression == null) { return false; // depends on control dependency: [if], data = [none] } SideEffectAnalysis analyzer = new SideEffectAnalysis(); expression.accept(analyzer, null); return analyzer.hasSideEffect; } }
public class class_name { public com.google.appengine.v1.AutomaticScalingOrBuilder getAutomaticScalingOrBuilder() { if (scalingCase_ == 3) { return (com.google.appengine.v1.AutomaticScaling) scaling_; } return com.google.appengine.v1.AutomaticScaling.getDefaultInstance(); } }
public class class_name { public com.google.appengine.v1.AutomaticScalingOrBuilder getAutomaticScalingOrBuilder() { if (scalingCase_ == 3) { return (com.google.appengine.v1.AutomaticScaling) scaling_; // depends on control dependency: [if], data = [none] } return com.google.appengine.v1.AutomaticScaling.getDefaultInstance(); } }
public class class_name { public boolean readThisClass( String strClassName) { try { Record recClassInfo = this.getMainRecord(); recClassInfo.getField(ClassInfo.CLASS_NAME).setString(strClassName); recClassInfo.setKeyArea(ClassInfo.CLASS_NAME_KEY); return recClassInfo.seek("="); // Get this class record back } catch (DBException ex) { ex.printStackTrace(); return false; } } }
public class class_name { public boolean readThisClass( String strClassName) { try { Record recClassInfo = this.getMainRecord(); recClassInfo.getField(ClassInfo.CLASS_NAME).setString(strClassName); // depends on control dependency: [try], data = [none] recClassInfo.setKeyArea(ClassInfo.CLASS_NAME_KEY); // depends on control dependency: [try], data = [none] return recClassInfo.seek("="); // Get this class record back // depends on control dependency: [try], data = [none] } catch (DBException ex) { ex.printStackTrace(); return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public void createTableIfDoesNotExist(final String tableName, final String... colFamily) throws MasterNotRunningException, IOException { if (!admin.tableExists(Bytes.toBytes(tableName))) { HTableDescriptor htDescriptor = new HTableDescriptor(TableName.valueOf(tableName)); for (String columnFamily : colFamily) { HColumnDescriptor familyMetadata = new HColumnDescriptor(columnFamily); htDescriptor.addFamily(familyMetadata); } admin.createTable(htDescriptor); } } }
public class class_name { @Override public void createTableIfDoesNotExist(final String tableName, final String... colFamily) throws MasterNotRunningException, IOException { if (!admin.tableExists(Bytes.toBytes(tableName))) { HTableDescriptor htDescriptor = new HTableDescriptor(TableName.valueOf(tableName)); for (String columnFamily : colFamily) { HColumnDescriptor familyMetadata = new HColumnDescriptor(columnFamily); htDescriptor.addFamily(familyMetadata); // depends on control dependency: [for], data = [none] } admin.createTable(htDescriptor); } } }
public class class_name { private static void generateSelectiveCommColumnTo(XmlElement element, IntrospectedColumn introspectedColumn, String prefix, int type) { switch (type) { case 3: List<Element> incrementEles = PluginTools.getHook(IIncrementsPluginHook.class).incrementSetElementGenerated(introspectedColumn, prefix, true); if (!incrementEles.isEmpty()) { // 增量插件支持 for (Element ele : incrementEles) { element.addElement(ele); } } else { element.addElement(new TextElement(MyBatis3FormattingUtilities.getEscapedColumnName(introspectedColumn) + " = " + MyBatis3FormattingUtilities.getParameterClause(introspectedColumn, prefix) + ",")); } break; case 2: element.addElement(new TextElement(MyBatis3FormattingUtilities.getParameterClause(introspectedColumn, prefix) + ",")); break; case 1: element.addElement(new TextElement(MyBatis3FormattingUtilities.getEscapedColumnName(introspectedColumn) + ",")); break; } } }
public class class_name { private static void generateSelectiveCommColumnTo(XmlElement element, IntrospectedColumn introspectedColumn, String prefix, int type) { switch (type) { case 3: List<Element> incrementEles = PluginTools.getHook(IIncrementsPluginHook.class).incrementSetElementGenerated(introspectedColumn, prefix, true); if (!incrementEles.isEmpty()) { // 增量插件支持 for (Element ele : incrementEles) { element.addElement(ele); // depends on control dependency: [for], data = [ele] } } else { element.addElement(new TextElement(MyBatis3FormattingUtilities.getEscapedColumnName(introspectedColumn) + " = " + MyBatis3FormattingUtilities.getParameterClause(introspectedColumn, prefix) + ",")); // depends on control dependency: [if], data = [none] } break; case 2: element.addElement(new TextElement(MyBatis3FormattingUtilities.getParameterClause(introspectedColumn, prefix) + ",")); break; case 1: element.addElement(new TextElement(MyBatis3FormattingUtilities.getEscapedColumnName(introspectedColumn) + ",")); break; } } }
public class class_name { private static String getVersion() { final String defaultVersion = "dev-" + System.currentTimeMillis(); final String fileName = "bigtable-version.properties"; final String versionProperty = "bigtable.version"; try (InputStream stream = BigtableVersionInfo.class.getResourceAsStream(fileName)) { if (stream == null) { LOG.error("Could not load properties file bigtable-version.properties"); return defaultVersion; } Properties properties = new Properties(); properties.load(stream); String value = properties.getProperty(versionProperty); if (value == null) { LOG.error("%s not found in %s.", versionProperty, fileName); } else if (value.startsWith("$")){ LOG.info("%s property is not replaced.", versionProperty); } else { return value; } } catch (IOException e) { LOG.error("Error while trying to get user agent name from %s", e, fileName); } return defaultVersion; } }
public class class_name { private static String getVersion() { final String defaultVersion = "dev-" + System.currentTimeMillis(); final String fileName = "bigtable-version.properties"; final String versionProperty = "bigtable.version"; try (InputStream stream = BigtableVersionInfo.class.getResourceAsStream(fileName)) { if (stream == null) { LOG.error("Could not load properties file bigtable-version.properties"); // depends on control dependency: [if], data = [none] return defaultVersion; // depends on control dependency: [if], data = [none] } Properties properties = new Properties(); properties.load(stream); String value = properties.getProperty(versionProperty); if (value == null) { LOG.error("%s not found in %s.", versionProperty, fileName); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else if (value.startsWith("$")){ LOG.info("%s property is not replaced.", versionProperty); // depends on control dependency: [if], data = [none] } else { return value; // depends on control dependency: [if], data = [none] } } catch (IOException e) { LOG.error("Error while trying to get user agent name from %s", e, fileName); } return defaultVersion; } }
public class class_name { private boolean containsValue(StatementGroup statementGroup, Value value) { for (Statement s : statementGroup) { if (value.equals(s.getValue())) { return true; } } return false; } }
public class class_name { private boolean containsValue(StatementGroup statementGroup, Value value) { for (Statement s : statementGroup) { if (value.equals(s.getValue())) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public double[] update( double w, double c1, double rand1, double c2, double rand2, double[] globalBest ) { for( int i = 0; i < locations.length; i++ ) { particleVelocities[i] = w * particleVelocities[i] + // c1 * rand1 * (particleLocalBests[i] - locations[i]) + // c2 * rand2 * (globalBest[i] - locations[i]); double tmpLocation = locations[i] + particleVelocities[i]; /* * if the location falls outside the ranges, it should * not be moved. */ tmpLocations[i] = tmpLocation; } if (!PSEngine.parametersInRange(tmpLocations, ranges)) { // System.out.println("PRE-TMPLOCATIONS: " + Arrays.toString(tmpLocations)); // System.out.println("LOCATIONS: " + Arrays.toString(locations)); /* * mirror the value back */ for( int i = 0; i < tmpLocations.length; i++ ) { double min = ranges[i][0]; double max = ranges[i][1]; if (tmpLocations[i] > max) { double tmp = max - (tmpLocations[i] - max); if (tmp < min) { tmp = max; } locations[i] = tmp; } else if (tmpLocations[i] < min) { double tmp = min + (min - tmpLocations[i]); if (tmp > max) { tmp = min; } locations[i] = tmp; } else { locations[i] = tmpLocations[i]; } } // System.out.println("POST-LOCATIONS: " + Arrays.toString(locations)); // System.out.println("VELOCITIES: " + Arrays.toString(particleVelocities)); return null; } else { for( int i = 0; i < locations.length; i++ ) { locations[i] = tmpLocations[i]; } return locations; } } }
public class class_name { public double[] update( double w, double c1, double rand1, double c2, double rand2, double[] globalBest ) { for( int i = 0; i < locations.length; i++ ) { particleVelocities[i] = w * particleVelocities[i] + // c1 * rand1 * (particleLocalBests[i] - locations[i]) + // c2 * rand2 * (globalBest[i] - locations[i]); // depends on control dependency: [for], data = [i] double tmpLocation = locations[i] + particleVelocities[i]; /* * if the location falls outside the ranges, it should * not be moved. */ tmpLocations[i] = tmpLocation; // depends on control dependency: [for], data = [i] } if (!PSEngine.parametersInRange(tmpLocations, ranges)) { // System.out.println("PRE-TMPLOCATIONS: " + Arrays.toString(tmpLocations)); // System.out.println("LOCATIONS: " + Arrays.toString(locations)); /* * mirror the value back */ for( int i = 0; i < tmpLocations.length; i++ ) { double min = ranges[i][0]; double max = ranges[i][1]; if (tmpLocations[i] > max) { double tmp = max - (tmpLocations[i] - max); if (tmp < min) { tmp = max; // depends on control dependency: [if], data = [none] } locations[i] = tmp; // depends on control dependency: [if], data = [none] } else if (tmpLocations[i] < min) { double tmp = min + (min - tmpLocations[i]); if (tmp > max) { tmp = min; // depends on control dependency: [if], data = [none] } locations[i] = tmp; // depends on control dependency: [if], data = [none] } else { locations[i] = tmpLocations[i]; // depends on control dependency: [if], data = [none] } } // System.out.println("POST-LOCATIONS: " + Arrays.toString(locations)); // System.out.println("VELOCITIES: " + Arrays.toString(particleVelocities)); return null; // depends on control dependency: [if], data = [none] } else { for( int i = 0; i < locations.length; i++ ) { locations[i] = tmpLocations[i]; // depends on control dependency: [for], data = [i] } return locations; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String ucFirst(String str) { if (str == null) return null; else if (str.length() <= 1) return str.toUpperCase(); else { return str.substring(0, 1).toUpperCase() + str.substring(1); } } }
public class class_name { public static String ucFirst(String str) { if (str == null) return null; else if (str.length() <= 1) return str.toUpperCase(); else { return str.substring(0, 1).toUpperCase() + str.substring(1); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void pldltpt() throws Exception{ //matrix S will be changed by the factorization, ad we do not want to change the matrix passed in by the client DoubleMatrix2D S = (rescaler==null)? this.Q.copy() : this.Q; int n = S.rows(); DoubleMatrix2D A = S.copy(); this.P = DoubleFactory2D.sparse.identity(n); this.D = DoubleFactory2D.sparse.make(n, n); this.L = DoubleFactory2D.sparse.make(n, n); DoubleMatrix2D LT = ALG.transpose(L);//remove and work only with L int s = 0; for(int j=0; j<n; j++){ //log.debug("j: " + j); DoubleMatrix2D LPart = null; DoubleMatrix2D LTPart = null; DoubleMatrix2D APart = null; DoubleMatrix2D DPart = null; double ajj = A.getQuick(j, j); if(Math.abs(ajj) > 1.e-16){ //1 x pivot D.setQuick(j, j, ajj); s = 1; }else{ //ajj = 0, so the 2x2 matrix with ajj in its upper left position //is non singular if a(j+1, j)=a(j, j+1) !=0 int k=-1; for (int r = j + 1; r < n; r++) { if (Math.abs(A.getQuick(r, j)) > 1.e-16) { k = r; break; } } if(k<0){ throw new Exception("singular matrix"); } //Symmetrically permute row/column k to position j + 1. A = ColtUtils.symmPermutation(A, k, j+1); P.setQuick(k, k, 0); P.setQuick(j + 1, j + 1, 0); P.setQuick(k, j + 1, 1); P.setQuick(j + 1, k, 1); //Choose a 2 � 2 pivot, //D(j:j+1)(j:j+1) = A(j:j+1)(j:j+1) D.setQuick(j, j, A.getQuick(j, j)); D.setQuick(j, j+1, A.getQuick(j, j+1)); D.setQuick(j+1, j, A.getQuick(j+1, j)); D.setQuick(j+1, j+1, A.getQuick(j+1, j+1)); s=2; } //log.debug("s: " + s); // L(j:n)(j:j+s-1) = A(j:n)(j:j+s-1).DInv(j:j+s-1)(j:j+s-1) APart = A.viewPart(j, j, n - j, 1 + s - 1); DPart = ALG.inverse(D.viewPart(j, j, 1 + s - 1, 1 + s - 1)); LPart = L.viewPart(j, j, n - j, 1 + s - 1); DoubleMatrix2D AD = ALG.mult(APart, DPart); for (int r = 0; r < LPart.rows(); r++) { for (int c = 0; c < LPart.columns(); c++) { LPart.setQuick(r, c, AD.getQuick(r, c)); } } // A(j+s-1:n)(j+s-1:n) = A(j+s-1:n)(j+s-1:n) - L(j+s-1:n)(j:j+s-1).D(j:j+s-1)(j:j+s-1).LT(j:j+s-1)(j+s-1:n) LPart = L.viewPart(j + s - 1, j, n - (j + s - 1), 1 + s - 1); DPart = D.viewPart(j, j, 1 + s - 1, 1 + s - 1); LTPart = LT.viewPart(j, j + s - 1, s, n - (j + s - 1)); APart = A.viewPart(j + s - 1, j + s - 1, n - (j + s - 1), n - (j + s - 1)); DoubleMatrix2D LDLT = ALG.mult(LPart, ALG.mult(DPart, LTPart)); for (int r = 0; r < APart.rows(); r++) { for (int c = 0; c < APart.columns(); c++) { APart.setQuick(r, c, APart.getQuick(r, c) - LDLT.getQuick(r, c)); } } //logger.debug("L: " + ArrayUtils.toString(L.toArray())); //logger.debug("A: " + ArrayUtils.toString(A.toArray())); j = j + s - 1; } } }
public class class_name { private void pldltpt() throws Exception{ //matrix S will be changed by the factorization, ad we do not want to change the matrix passed in by the client DoubleMatrix2D S = (rescaler==null)? this.Q.copy() : this.Q; int n = S.rows(); DoubleMatrix2D A = S.copy(); this.P = DoubleFactory2D.sparse.identity(n); this.D = DoubleFactory2D.sparse.make(n, n); this.L = DoubleFactory2D.sparse.make(n, n); DoubleMatrix2D LT = ALG.transpose(L);//remove and work only with L int s = 0; for(int j=0; j<n; j++){ //log.debug("j: " + j); DoubleMatrix2D LPart = null; DoubleMatrix2D LTPart = null; DoubleMatrix2D APart = null; DoubleMatrix2D DPart = null; double ajj = A.getQuick(j, j); if(Math.abs(ajj) > 1.e-16){ //1 x pivot D.setQuick(j, j, ajj); s = 1; }else{ //ajj = 0, so the 2x2 matrix with ajj in its upper left position //is non singular if a(j+1, j)=a(j, j+1) !=0 int k=-1; for (int r = j + 1; r < n; r++) { if (Math.abs(A.getQuick(r, j)) > 1.e-16) { k = r; // depends on control dependency: [if], data = [none] break; } } if(k<0){ throw new Exception("singular matrix"); } //Symmetrically permute row/column k to position j + 1. A = ColtUtils.symmPermutation(A, k, j+1); P.setQuick(k, k, 0); P.setQuick(j + 1, j + 1, 0); P.setQuick(k, j + 1, 1); P.setQuick(j + 1, k, 1); //Choose a 2 � 2 pivot, //D(j:j+1)(j:j+1) = A(j:j+1)(j:j+1) D.setQuick(j, j, A.getQuick(j, j)); D.setQuick(j, j+1, A.getQuick(j, j+1)); D.setQuick(j+1, j, A.getQuick(j+1, j)); D.setQuick(j+1, j+1, A.getQuick(j+1, j+1)); s=2; } //log.debug("s: " + s); // L(j:n)(j:j+s-1) = A(j:n)(j:j+s-1).DInv(j:j+s-1)(j:j+s-1) APart = A.viewPart(j, j, n - j, 1 + s - 1); DPart = ALG.inverse(D.viewPart(j, j, 1 + s - 1, 1 + s - 1)); LPart = L.viewPart(j, j, n - j, 1 + s - 1); DoubleMatrix2D AD = ALG.mult(APart, DPart); for (int r = 0; r < LPart.rows(); r++) { for (int c = 0; c < LPart.columns(); c++) { LPart.setQuick(r, c, AD.getQuick(r, c)); } } // A(j+s-1:n)(j+s-1:n) = A(j+s-1:n)(j+s-1:n) - L(j+s-1:n)(j:j+s-1).D(j:j+s-1)(j:j+s-1).LT(j:j+s-1)(j+s-1:n) LPart = L.viewPart(j + s - 1, j, n - (j + s - 1), 1 + s - 1); DPart = D.viewPart(j, j, 1 + s - 1, 1 + s - 1); LTPart = LT.viewPart(j, j + s - 1, s, n - (j + s - 1)); APart = A.viewPart(j + s - 1, j + s - 1, n - (j + s - 1), n - (j + s - 1)); DoubleMatrix2D LDLT = ALG.mult(LPart, ALG.mult(DPart, LTPart)); for (int r = 0; r < APart.rows(); r++) { for (int c = 0; c < APart.columns(); c++) { APart.setQuick(r, c, APart.getQuick(r, c) - LDLT.getQuick(r, c)); } } //logger.debug("L: " + ArrayUtils.toString(L.toArray())); //logger.debug("A: " + ArrayUtils.toString(A.toArray())); j = j + s - 1; } } }
public class class_name { @Override public <S extends ResourceSnapshot> S resolve( final Class<? extends S> snapshotClass, final Individual<?,?> individual) { checkNotNull(snapshotClass,"Resource snapshot class cannot be null"); checkNotNull(individual,"Individual cannot be null"); checkState(this.status.equals(Status.ACTIVE),WRITE_SESSION_NOT_ACTIVE,this.status); S result=null; ResourceId id = getIdentifier(individual); if(id!=null) { result=find(snapshotClass,id); } return result; } }
public class class_name { @Override public <S extends ResourceSnapshot> S resolve( final Class<? extends S> snapshotClass, final Individual<?,?> individual) { checkNotNull(snapshotClass,"Resource snapshot class cannot be null"); checkNotNull(individual,"Individual cannot be null"); checkState(this.status.equals(Status.ACTIVE),WRITE_SESSION_NOT_ACTIVE,this.status); S result=null; ResourceId id = getIdentifier(individual); if(id!=null) { result=find(snapshotClass,id); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static Interceptor[] build(Intercept annotation) { if (annotation == null) { return null; } Class<? extends Interceptor>[] interceptorClasses = annotation.value(); int count = interceptorClasses.length; if (count == 0) { return null; } final List<Interceptor> interceptorList = new ArrayList<>(interceptorClasses.length); for (int i = 0; i < count; i++) { Interceptor interceptor = pool.get(interceptorClasses[i]); if (interceptor == null) { try { interceptor = interceptorClasses[i].newInstance(); log.debug("New Interceptor: [{}]", interceptor.getClass().getName()); pool.put(interceptorClasses[i], interceptor); } catch (Exception e) { log.error(e, "Init interceptor error!"); continue; } } if(null != interceptor){ interceptorList.add(interceptor); } } return interceptorList.toArray(new Interceptor[interceptorList.size()]); } }
public class class_name { public static Interceptor[] build(Intercept annotation) { if (annotation == null) { return null; // depends on control dependency: [if], data = [none] } Class<? extends Interceptor>[] interceptorClasses = annotation.value(); int count = interceptorClasses.length; if (count == 0) { return null; // depends on control dependency: [if], data = [none] } final List<Interceptor> interceptorList = new ArrayList<>(interceptorClasses.length); for (int i = 0; i < count; i++) { Interceptor interceptor = pool.get(interceptorClasses[i]); if (interceptor == null) { try { interceptor = interceptorClasses[i].newInstance(); // depends on control dependency: [try], data = [none] log.debug("New Interceptor: [{}]", interceptor.getClass().getName()); // depends on control dependency: [try], data = [none] pool.put(interceptorClasses[i], interceptor); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.error(e, "Init interceptor error!"); continue; } // depends on control dependency: [catch], data = [none] } if(null != interceptor){ interceptorList.add(interceptor); // depends on control dependency: [if], data = [interceptor)] } } return interceptorList.toArray(new Interceptor[interceptorList.size()]); } }
public class class_name { @Override public void encode(final BsonWriter writer, final Instant value, final EncoderContext encoderContext) { try { writer.writeDateTime(value.toEpochMilli()); } catch (ArithmeticException e) { throw new CodecConfigurationException(format("Unsupported Instant value '%s' could not be converted to milliseconds: %s", value, e.getMessage()), e); } } }
public class class_name { @Override public void encode(final BsonWriter writer, final Instant value, final EncoderContext encoderContext) { try { writer.writeDateTime(value.toEpochMilli()); // depends on control dependency: [try], data = [none] } catch (ArithmeticException e) { throw new CodecConfigurationException(format("Unsupported Instant value '%s' could not be converted to milliseconds: %s", value, e.getMessage()), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static boolean isInAggregate(String aggr, String str) { if (aggr != null && str != null) { str += "1"; for (int i = 0; i < str.length(); i++) { String s = str.substring(i, i + 1); if (aggr.indexOf(s) == -1) return false; } return true; } return false; } }
public class class_name { public static boolean isInAggregate(String aggr, String str) { if (aggr != null && str != null) { str += "1"; // depends on control dependency: [if], data = [none] for (int i = 0; i < str.length(); i++) { String s = str.substring(i, i + 1); if (aggr.indexOf(s) == -1) return false; } return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { @Override public String indexValue(String name, Object value) { // Check not null if (value == null) { return null; } // Parse big decimal String svalue = value.toString(); BigDecimal bd; try { bd = new BigDecimal(value.toString()); } catch (NumberFormatException e) { String message = String.format("Field %s requires a base 10 decimal, but found \"%s\"", name, svalue); throw new IllegalArgumentException(message); } // Split integer and decimal part bd = bd.stripTrailingZeros(); String[] parts = bd.toPlainString().split("\\."); String integerPart = parts[0]; String decimalPart = parts.length == 1 ? "0" : parts[1]; if (integerPart.replaceFirst("-", "").length() > integerDigits) { throw new IllegalArgumentException("Too much digits in integer part"); } if (decimalPart.length() > decimalDigits) { throw new IllegalArgumentException("Too much digits in decimal part"); } BigDecimal complemented = bd.add(complement); String bds[] = complemented.toString().split("\\."); integerPart = bds[0]; decimalPart = bds.length == 2 ? bds[1] : "0"; integerPart = StringUtils.leftPad(integerPart, integerDigits + 1, '0'); return integerPart + "." + decimalPart; } }
public class class_name { @Override public String indexValue(String name, Object value) { // Check not null if (value == null) { return null; // depends on control dependency: [if], data = [none] } // Parse big decimal String svalue = value.toString(); BigDecimal bd; try { bd = new BigDecimal(value.toString()); // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { String message = String.format("Field %s requires a base 10 decimal, but found \"%s\"", name, svalue); throw new IllegalArgumentException(message); } // depends on control dependency: [catch], data = [none] // Split integer and decimal part bd = bd.stripTrailingZeros(); String[] parts = bd.toPlainString().split("\\."); String integerPart = parts[0]; String decimalPart = parts.length == 1 ? "0" : parts[1]; if (integerPart.replaceFirst("-", "").length() > integerDigits) { throw new IllegalArgumentException("Too much digits in integer part"); } if (decimalPart.length() > decimalDigits) { throw new IllegalArgumentException("Too much digits in decimal part"); } BigDecimal complemented = bd.add(complement); String bds[] = complemented.toString().split("\\."); integerPart = bds[0]; decimalPart = bds.length == 2 ? bds[1] : "0"; integerPart = StringUtils.leftPad(integerPart, integerDigits + 1, '0'); return integerPart + "." + decimalPart; } }
public class class_name { @SuppressWarnings("MethodLength") public String build() { sb.setLength(0); if (null != prefix && !"".equals(prefix)) { sb.append(prefix).append(':'); } sb.append(ChannelUri.AERON_SCHEME).append(':').append(media).append('?'); if (null != tags) { sb.append(TAGS_PARAM_NAME).append('=').append(tags).append('|'); } if (null != endpoint) { sb.append(ENDPOINT_PARAM_NAME).append('=').append(endpoint).append('|'); } if (null != networkInterface) { sb.append(INTERFACE_PARAM_NAME).append('=').append(networkInterface).append('|'); } if (null != controlEndpoint) { sb.append(MDC_CONTROL_PARAM_NAME).append('=').append(controlEndpoint).append('|'); } if (null != controlMode) { sb.append(MDC_CONTROL_MODE_PARAM_NAME).append('=').append(controlMode).append('|'); } if (null != mtu) { sb.append(MTU_LENGTH_PARAM_NAME).append('=').append(mtu.intValue()).append('|'); } if (null != termLength) { sb.append(TERM_LENGTH_PARAM_NAME).append('=').append(termLength.intValue()).append('|'); } if (null != initialTermId) { sb.append(INITIAL_TERM_ID_PARAM_NAME).append('=').append(initialTermId.intValue()).append('|'); } if (null != termId) { sb.append(TERM_ID_PARAM_NAME).append('=').append(termId.intValue()).append('|'); } if (null != termOffset) { sb.append(TERM_OFFSET_PARAM_NAME).append('=').append(termOffset.intValue()).append('|'); } if (null != sessionId) { sb.append(SESSION_ID_PARAM_NAME).append('=').append(prefixTag(isSessionIdTagged, sessionId)).append('|'); } if (null != ttl) { sb.append(TTL_PARAM_NAME).append('=').append(ttl.intValue()).append('|'); } if (null != reliable) { sb.append(RELIABLE_STREAM_PARAM_NAME).append('=').append(reliable).append('|'); } if (null != linger) { sb.append(LINGER_PARAM_NAME).append('=').append(linger.intValue()).append('|'); } if (null != alias) { sb.append(ALIAS_PARAM_NAME).append('=').append(alias).append('|'); } if (null != sparse) { sb.append(SPARSE_PARAM_NAME).append('=').append(sparse).append('|'); } if (null != eos) { sb.append(EOS_PARAM_NAME).append('=').append(eos).append('|'); } if (null != tether) { sb.append(TETHER_PARAM_NAME).append('=').append(tether).append('|'); } final char lastChar = sb.charAt(sb.length() - 1); if (lastChar == '|' || lastChar == '?') { sb.setLength(sb.length() - 1); } return sb.toString(); } }
public class class_name { @SuppressWarnings("MethodLength") public String build() { sb.setLength(0); if (null != prefix && !"".equals(prefix)) { sb.append(prefix).append(':'); // depends on control dependency: [if], data = [none] } sb.append(ChannelUri.AERON_SCHEME).append(':').append(media).append('?'); if (null != tags) { sb.append(TAGS_PARAM_NAME).append('=').append(tags).append('|'); // depends on control dependency: [if], data = [tags)] } if (null != endpoint) { sb.append(ENDPOINT_PARAM_NAME).append('=').append(endpoint).append('|'); // depends on control dependency: [if], data = [endpoint)] } if (null != networkInterface) { sb.append(INTERFACE_PARAM_NAME).append('=').append(networkInterface).append('|'); // depends on control dependency: [if], data = [networkInterface)] } if (null != controlEndpoint) { sb.append(MDC_CONTROL_PARAM_NAME).append('=').append(controlEndpoint).append('|'); // depends on control dependency: [if], data = [controlEndpoint)] } if (null != controlMode) { sb.append(MDC_CONTROL_MODE_PARAM_NAME).append('=').append(controlMode).append('|'); // depends on control dependency: [if], data = [controlMode)] } if (null != mtu) { sb.append(MTU_LENGTH_PARAM_NAME).append('=').append(mtu.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != termLength) { sb.append(TERM_LENGTH_PARAM_NAME).append('=').append(termLength.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != initialTermId) { sb.append(INITIAL_TERM_ID_PARAM_NAME).append('=').append(initialTermId.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != termId) { sb.append(TERM_ID_PARAM_NAME).append('=').append(termId.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != termOffset) { sb.append(TERM_OFFSET_PARAM_NAME).append('=').append(termOffset.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != sessionId) { sb.append(SESSION_ID_PARAM_NAME).append('=').append(prefixTag(isSessionIdTagged, sessionId)).append('|'); // depends on control dependency: [if], data = [sessionId)] } if (null != ttl) { sb.append(TTL_PARAM_NAME).append('=').append(ttl.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != reliable) { sb.append(RELIABLE_STREAM_PARAM_NAME).append('=').append(reliable).append('|'); // depends on control dependency: [if], data = [reliable)] } if (null != linger) { sb.append(LINGER_PARAM_NAME).append('=').append(linger.intValue()).append('|'); // depends on control dependency: [if], data = [none] } if (null != alias) { sb.append(ALIAS_PARAM_NAME).append('=').append(alias).append('|'); // depends on control dependency: [if], data = [alias)] } if (null != sparse) { sb.append(SPARSE_PARAM_NAME).append('=').append(sparse).append('|'); // depends on control dependency: [if], data = [sparse)] } if (null != eos) { sb.append(EOS_PARAM_NAME).append('=').append(eos).append('|'); // depends on control dependency: [if], data = [eos)] } if (null != tether) { sb.append(TETHER_PARAM_NAME).append('=').append(tether).append('|'); // depends on control dependency: [if], data = [tether)] } final char lastChar = sb.charAt(sb.length() - 1); if (lastChar == '|' || lastChar == '?') { sb.setLength(sb.length() - 1); // depends on control dependency: [if], data = [none] } return sb.toString(); } }
public class class_name { public void marshall(GetBotChannelAssociationsRequest getBotChannelAssociationsRequest, ProtocolMarshaller protocolMarshaller) { if (getBotChannelAssociationsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getBotChannelAssociationsRequest.getBotName(), BOTNAME_BINDING); protocolMarshaller.marshall(getBotChannelAssociationsRequest.getBotAlias(), BOTALIAS_BINDING); protocolMarshaller.marshall(getBotChannelAssociationsRequest.getNextToken(), NEXTTOKEN_BINDING); protocolMarshaller.marshall(getBotChannelAssociationsRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(getBotChannelAssociationsRequest.getNameContains(), NAMECONTAINS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetBotChannelAssociationsRequest getBotChannelAssociationsRequest, ProtocolMarshaller protocolMarshaller) { if (getBotChannelAssociationsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getBotChannelAssociationsRequest.getBotName(), BOTNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getBotChannelAssociationsRequest.getBotAlias(), BOTALIAS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getBotChannelAssociationsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getBotChannelAssociationsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getBotChannelAssociationsRequest.getNameContains(), NAMECONTAINS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static int listFindIgnoreEmpty(String list, String value, char delimiter) { if (list == null) return -1; int len = list.length(); if (len == 0) return -1; int last = 0; int count = 0; for (int i = 0; i < len; i++) { if (list.charAt(i) == delimiter) { if (last < i) { if (list.substring(last, i).equals(value)) return count; count++; } last = i + 1; } } if (last < len) { if (list.substring(last).equals(value)) return count; } return -1; } }
public class class_name { public static int listFindIgnoreEmpty(String list, String value, char delimiter) { if (list == null) return -1; int len = list.length(); if (len == 0) return -1; int last = 0; int count = 0; for (int i = 0; i < len; i++) { if (list.charAt(i) == delimiter) { if (last < i) { if (list.substring(last, i).equals(value)) return count; count++; // depends on control dependency: [if], data = [none] } last = i + 1; // depends on control dependency: [if], data = [none] } } if (last < len) { if (list.substring(last).equals(value)) return count; } return -1; } }
public class class_name { static BsonDocument sanitizeDocument(final BsonDocument document) { if (document == null) { return null; } if (document.containsKey(DOCUMENT_VERSION_FIELD)) { final BsonDocument clonedDoc = document.clone(); clonedDoc.remove(DOCUMENT_VERSION_FIELD); return clonedDoc; } return document; } }
public class class_name { static BsonDocument sanitizeDocument(final BsonDocument document) { if (document == null) { return null; // depends on control dependency: [if], data = [none] } if (document.containsKey(DOCUMENT_VERSION_FIELD)) { final BsonDocument clonedDoc = document.clone(); clonedDoc.remove(DOCUMENT_VERSION_FIELD); // depends on control dependency: [if], data = [none] return clonedDoc; // depends on control dependency: [if], data = [none] } return document; } }
public class class_name { public static ModuleIdentifier forClosure(String name) { String normalizedName = name; if (normalizedName.startsWith("goog:")) { normalizedName = normalizedName.substring("goog:".length()); } String namespace = normalizedName; String moduleName = normalizedName; int splitPoint = normalizedName.indexOf(':'); if (splitPoint != -1) { moduleName = normalizedName.substring(0, splitPoint); namespace = normalizedName.substring(Math.min(splitPoint + 1, normalizedName.length() - 1)); } return new AutoValue_ModuleIdentifier(normalizedName, namespace, moduleName); } }
public class class_name { public static ModuleIdentifier forClosure(String name) { String normalizedName = name; if (normalizedName.startsWith("goog:")) { normalizedName = normalizedName.substring("goog:".length()); // depends on control dependency: [if], data = [none] } String namespace = normalizedName; String moduleName = normalizedName; int splitPoint = normalizedName.indexOf(':'); if (splitPoint != -1) { moduleName = normalizedName.substring(0, splitPoint); // depends on control dependency: [if], data = [none] namespace = normalizedName.substring(Math.min(splitPoint + 1, normalizedName.length() - 1)); // depends on control dependency: [if], data = [(splitPoint] } return new AutoValue_ModuleIdentifier(normalizedName, namespace, moduleName); } }
public class class_name { public V remove(K key) { final OClosableEntry<K, V> removed = data.remove(key); if (removed != null) { long preStatus = removed.makeRetired(); if (OClosableEntry.isOpen(preStatus)) { countClosedFiles(); } logRemoved(removed); return removed.get(); } return null; } }
public class class_name { public V remove(K key) { final OClosableEntry<K, V> removed = data.remove(key); if (removed != null) { long preStatus = removed.makeRetired(); if (OClosableEntry.isOpen(preStatus)) { countClosedFiles(); // depends on control dependency: [if], data = [none] } logRemoved(removed); // depends on control dependency: [if], data = [(removed] return removed.get(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public void executeInterceptors(final List<IntuitMessage> intuitMessages) throws FMSException { for(IntuitMessage message : intuitMessages) { executeRequestInterceptors(message); } HTTPBatchClientConnectionInterceptor batchConnectionInterceptor = new HTTPBatchClientConnectionInterceptor(); batchConnectionInterceptor.execute(intuitMessages); for(IntuitMessage message : intuitMessages) { try { message.getResponseElements().getResponseContent().reset(); } catch (Exception ex) { LOG.error("IllegalStateException while get the content from HttpRespose.", ex); throw new FMSException(ex); } executeResponseInterceptors(message); } } }
public class class_name { public void executeInterceptors(final List<IntuitMessage> intuitMessages) throws FMSException { for(IntuitMessage message : intuitMessages) { executeRequestInterceptors(message); } HTTPBatchClientConnectionInterceptor batchConnectionInterceptor = new HTTPBatchClientConnectionInterceptor(); batchConnectionInterceptor.execute(intuitMessages); for(IntuitMessage message : intuitMessages) { try { message.getResponseElements().getResponseContent().reset(); // depends on control dependency: [try], data = [none] } catch (Exception ex) { LOG.error("IllegalStateException while get the content from HttpRespose.", ex); throw new FMSException(ex); } // depends on control dependency: [catch], data = [none] executeResponseInterceptors(message); } } }
public class class_name { public void computeValue(DoubleVector values) { if (this.isType()) { setValues(values); double sumDif = 0.0; this.value = this.values.sumOfValues() / (double) values.numValues(); for (int i = 0; i < this.values.numValues(); i++) { double dif = this.values.getValue(i) - this.value; sumDif += Math.pow(dif, 2); } sumDif = sumDif / this.values.numValues(); this.std = Math.sqrt(sumDif); } } }
public class class_name { public void computeValue(DoubleVector values) { if (this.isType()) { setValues(values); // depends on control dependency: [if], data = [none] double sumDif = 0.0; this.value = this.values.sumOfValues() / (double) values.numValues(); // depends on control dependency: [if], data = [none] for (int i = 0; i < this.values.numValues(); i++) { double dif = this.values.getValue(i) - this.value; sumDif += Math.pow(dif, 2); // depends on control dependency: [for], data = [none] } sumDif = sumDif / this.values.numValues(); // depends on control dependency: [if], data = [none] this.std = Math.sqrt(sumDif); // depends on control dependency: [if], data = [none] } } }