code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public IPromise atomicQuery(String key, RLFunction<Record,Object> action) { Record rec = getStore().get(key); if ( rec == null ) { final Object apply = action.apply(rec); if ( apply instanceof ChangeMessage ) { receive( (ChangeMessage) apply ) ; } return new Promise(apply); } else { PatchingRecord pr = new PatchingRecord(rec); final Object res = action.apply(pr); if ( res instanceof ChangeMessage ) { receive( (ChangeMessage) res ) ; } else { UpdateMessage updates = pr.getUpdates(0); if (updates != null) { receive(updates); } } return new Promise(res); } } }
public class class_name { public IPromise atomicQuery(String key, RLFunction<Record,Object> action) { Record rec = getStore().get(key); if ( rec == null ) { final Object apply = action.apply(rec); if ( apply instanceof ChangeMessage ) { receive( (ChangeMessage) apply ) ; // depends on control dependency: [if], data = [none] } return new Promise(apply); // depends on control dependency: [if], data = [none] } else { PatchingRecord pr = new PatchingRecord(rec); final Object res = action.apply(pr); if ( res instanceof ChangeMessage ) { receive( (ChangeMessage) res ) ; // depends on control dependency: [if], data = [none] } else { UpdateMessage updates = pr.getUpdates(0); if (updates != null) { receive(updates); // depends on control dependency: [if], data = [(updates] } } return new Promise(res); // depends on control dependency: [if], data = [none] } } }
public class class_name { public byte[] toByteArray() { // computes the real size of the bytecode of this class final int interfaceCount = interfaces.length; int size = 24 + 2 * interfaceCount; // if (fields != null) { // size += fields.length; // } for (MethodWriter cb : this.methods) { size += cb.getSize(); } // int attributeCount = 0; // if ((access & Constants.ACC_DEPRECATED) != 0) { // ++attributeCount; // size += 6; // } // if ((access & Constants.ACC_SYNTHETIC) != 0) { // ++attributeCount; // size += 6; // } size += pool.length; // allocates a byte vector of this size, in order to avoid unnecessary // arraycopy operations in the ByteBuffer.enlarge() method final ByteBuffer out = new ByteBuffer(size); out.putInt(0xCAFEBABE).putInt(version); out.putShort(poolIndex).put(pool); out.putShort(access).putShort(name).putShort(superName); out.putShort(interfaceCount); for (int i = 0; i < interfaceCount; ++i) { out.putShort(interfaces[i]); } out.putShort(0); //out.putShort(fieldCount); // if (fields != null) { // out.put(fields); // } out.putShort(this.methods.size()); for (MethodWriter cb : this.methods) { cb.renderTo(out); } out.putShort(0 /* attributeCount */); // if ((access & Constants.ACC_DEPRECATED) != 0) { // out.putShort(newUTF8("Deprecated")).putInt(0); // } // if ((access & Constants.ACC_SYNTHETIC) != 0) { // out.putShort(newUTF8("Synthetic")).putInt(0); // } return out.data; } }
public class class_name { public byte[] toByteArray() { // computes the real size of the bytecode of this class final int interfaceCount = interfaces.length; int size = 24 + 2 * interfaceCount; // if (fields != null) { // size += fields.length; // } for (MethodWriter cb : this.methods) { size += cb.getSize(); // depends on control dependency: [for], data = [cb] } // int attributeCount = 0; // if ((access & Constants.ACC_DEPRECATED) != 0) { // ++attributeCount; // size += 6; // } // if ((access & Constants.ACC_SYNTHETIC) != 0) { // ++attributeCount; // size += 6; // } size += pool.length; // allocates a byte vector of this size, in order to avoid unnecessary // arraycopy operations in the ByteBuffer.enlarge() method final ByteBuffer out = new ByteBuffer(size); out.putInt(0xCAFEBABE).putInt(version); out.putShort(poolIndex).put(pool); out.putShort(access).putShort(name).putShort(superName); out.putShort(interfaceCount); for (int i = 0; i < interfaceCount; ++i) { out.putShort(interfaces[i]); // depends on control dependency: [for], data = [i] } out.putShort(0); //out.putShort(fieldCount); // if (fields != null) { // out.put(fields); // } out.putShort(this.methods.size()); for (MethodWriter cb : this.methods) { cb.renderTo(out); // depends on control dependency: [for], data = [cb] } out.putShort(0 /* attributeCount */); // if ((access & Constants.ACC_DEPRECATED) != 0) { // out.putShort(newUTF8("Deprecated")).putInt(0); // } // if ((access & Constants.ACC_SYNTHETIC) != 0) { // out.putShort(newUTF8("Synthetic")).putInt(0); // } return out.data; } }
public class class_name { public java.lang.String getServerIp() { java.lang.Object ref = serverIp_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serverIp_ = s; return s; } } }
public class class_name { public java.lang.String getServerIp() { java.lang.Object ref = serverIp_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; // depends on control dependency: [if], data = [none] } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serverIp_ = s; // depends on control dependency: [if], data = [none] return s; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static void add_acker(Map stormConf, StormTopology ret) { String key = Config.TOPOLOGY_ACKER_EXECUTORS; Integer ackerNum = JStormUtils.parseInt(stormConf.get(key), 0); // generate outputs HashMap<String, StreamInfo> outputs = new HashMap<>(); ArrayList<String> fields = new ArrayList<>(); fields.add("id"); outputs.put(ACKER_ACK_STREAM_ID, Thrift.directOutputFields(fields)); outputs.put(ACKER_FAIL_STREAM_ID, Thrift.directOutputFields(fields)); IBolt ackerbolt = new Acker(); // generate inputs Map<GlobalStreamId, Grouping> inputs = acker_inputs(ret); // generate acker which will be stored in topology Bolt acker_bolt = Thrift.mkBolt(inputs, ackerbolt, outputs, ackerNum); // add every bolt two output stream // ACKER_ACK_STREAM_ID/ACKER_FAIL_STREAM_ID for (Entry<String, Bolt> e : ret.get_bolts().entrySet()) { Bolt bolt = e.getValue(); ComponentCommon common = bolt.get_common(); List<String> ackList = JStormUtils.mk_list("id", "ack-val"); common.put_to_streams(ACKER_ACK_STREAM_ID, Thrift.outputFields(ackList)); List<String> failList = JStormUtils.mk_list("id"); common.put_to_streams(ACKER_FAIL_STREAM_ID, Thrift.outputFields(failList)); bolt.set_common(common); } // add every spout output stream ACKER_INIT_STREAM_ID // add every spout two intput source // ((ACKER_COMPONENT_ID, ACKER_ACK_STREAM_ID), directGrouping) // ((ACKER_COMPONENT_ID, ACKER_FAIL_STREAM_ID), directGrouping) for (Entry<String, SpoutSpec> kv : ret.get_spouts().entrySet()) { SpoutSpec bolt = kv.getValue(); ComponentCommon common = bolt.get_common(); List<String> initList = JStormUtils.mk_list("id", "init-val", "spout-task"); common.put_to_streams(ACKER_INIT_STREAM_ID, Thrift.outputFields(initList)); GlobalStreamId ack_ack = new GlobalStreamId(ACKER_COMPONENT_ID, ACKER_ACK_STREAM_ID); common.put_to_inputs(ack_ack, Thrift.mkDirectGrouping()); GlobalStreamId ack_fail = new GlobalStreamId(ACKER_COMPONENT_ID, ACKER_FAIL_STREAM_ID); common.put_to_inputs(ack_fail, Thrift.mkDirectGrouping()); } ret.put_to_bolts(ACKER_COMPONENT_ID, acker_bolt); } }
public class class_name { public static void add_acker(Map stormConf, StormTopology ret) { String key = Config.TOPOLOGY_ACKER_EXECUTORS; Integer ackerNum = JStormUtils.parseInt(stormConf.get(key), 0); // generate outputs HashMap<String, StreamInfo> outputs = new HashMap<>(); ArrayList<String> fields = new ArrayList<>(); fields.add("id"); outputs.put(ACKER_ACK_STREAM_ID, Thrift.directOutputFields(fields)); outputs.put(ACKER_FAIL_STREAM_ID, Thrift.directOutputFields(fields)); IBolt ackerbolt = new Acker(); // generate inputs Map<GlobalStreamId, Grouping> inputs = acker_inputs(ret); // generate acker which will be stored in topology Bolt acker_bolt = Thrift.mkBolt(inputs, ackerbolt, outputs, ackerNum); // add every bolt two output stream // ACKER_ACK_STREAM_ID/ACKER_FAIL_STREAM_ID for (Entry<String, Bolt> e : ret.get_bolts().entrySet()) { Bolt bolt = e.getValue(); ComponentCommon common = bolt.get_common(); List<String> ackList = JStormUtils.mk_list("id", "ack-val"); common.put_to_streams(ACKER_ACK_STREAM_ID, Thrift.outputFields(ackList)); // depends on control dependency: [for], data = [e] List<String> failList = JStormUtils.mk_list("id"); common.put_to_streams(ACKER_FAIL_STREAM_ID, Thrift.outputFields(failList)); // depends on control dependency: [for], data = [e] bolt.set_common(common); // depends on control dependency: [for], data = [e] } // add every spout output stream ACKER_INIT_STREAM_ID // add every spout two intput source // ((ACKER_COMPONENT_ID, ACKER_ACK_STREAM_ID), directGrouping) // ((ACKER_COMPONENT_ID, ACKER_FAIL_STREAM_ID), directGrouping) for (Entry<String, SpoutSpec> kv : ret.get_spouts().entrySet()) { SpoutSpec bolt = kv.getValue(); ComponentCommon common = bolt.get_common(); List<String> initList = JStormUtils.mk_list("id", "init-val", "spout-task"); common.put_to_streams(ACKER_INIT_STREAM_ID, Thrift.outputFields(initList)); // depends on control dependency: [for], data = [none] GlobalStreamId ack_ack = new GlobalStreamId(ACKER_COMPONENT_ID, ACKER_ACK_STREAM_ID); common.put_to_inputs(ack_ack, Thrift.mkDirectGrouping()); // depends on control dependency: [for], data = [none] GlobalStreamId ack_fail = new GlobalStreamId(ACKER_COMPONENT_ID, ACKER_FAIL_STREAM_ID); common.put_to_inputs(ack_fail, Thrift.mkDirectGrouping()); // depends on control dependency: [for], data = [none] } ret.put_to_bolts(ACKER_COMPONENT_ID, acker_bolt); } }
public class class_name { public com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfigOrBuilder getCategoricalStatsConfigOrBuilder() { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig) type_; } return com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig.getDefaultInstance(); } }
public class class_name { public com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfigOrBuilder getCategoricalStatsConfigOrBuilder() { if (typeCase_ == 2) { return (com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig) type_; // depends on control dependency: [if], data = [none] } return com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig.getDefaultInstance(); } }
public class class_name { public boolean isBaseLevelForKey(Slice userKey) { // Maybe use binary search to find right entry instead of linear search? UserComparator userComparator = inputVersion.getInternalKeyComparator().getUserComparator(); for (int level = this.level + 2; level < NUM_LEVELS; level++) { List<FileMetaData> files = inputVersion.getFiles(level); while (levelPointers[level] < files.size()) { FileMetaData f = files.get(levelPointers[level]); if (userComparator.compare(userKey, f.getLargest().getUserKey()) <= 0) { // We've advanced far enough if (userComparator.compare(userKey, f.getSmallest().getUserKey()) >= 0) { // Key falls in this file's range, so definitely not base level return false; } break; } levelPointers[level]++; } } return true; } }
public class class_name { public boolean isBaseLevelForKey(Slice userKey) { // Maybe use binary search to find right entry instead of linear search? UserComparator userComparator = inputVersion.getInternalKeyComparator().getUserComparator(); for (int level = this.level + 2; level < NUM_LEVELS; level++) { List<FileMetaData> files = inputVersion.getFiles(level); while (levelPointers[level] < files.size()) { FileMetaData f = files.get(levelPointers[level]); if (userComparator.compare(userKey, f.getLargest().getUserKey()) <= 0) { // We've advanced far enough if (userComparator.compare(userKey, f.getSmallest().getUserKey()) >= 0) { // Key falls in this file's range, so definitely not base level return false; // depends on control dependency: [if], data = [none] } break; } levelPointers[level]++; // depends on control dependency: [while], data = [none] } } return true; } }
public class class_name { public void marshall(GetCoreDefinitionRequest getCoreDefinitionRequest, ProtocolMarshaller protocolMarshaller) { if (getCoreDefinitionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getCoreDefinitionRequest.getCoreDefinitionId(), COREDEFINITIONID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetCoreDefinitionRequest getCoreDefinitionRequest, ProtocolMarshaller protocolMarshaller) { if (getCoreDefinitionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getCoreDefinitionRequest.getCoreDefinitionId(), COREDEFINITIONID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Requires("r != null") public void report(Report r) { if (r.getKind() == Kind.ERROR) { ++errorCount; } reports.add(r); } }
public class class_name { @Requires("r != null") public void report(Report r) { if (r.getKind() == Kind.ERROR) { ++errorCount; // depends on control dependency: [if], data = [none] } reports.add(r); } }
public class class_name { @Override public void onStageTransition(JobExecutionState state, String previousStage, String newStage) { try { _dispatcher.execCallbacks(new StageTransitionCallback(state, previousStage, newStage)); } catch (InterruptedException e) { _dispatcher.getLog().warn("onStageTransition interrupted."); } } }
public class class_name { @Override public void onStageTransition(JobExecutionState state, String previousStage, String newStage) { try { _dispatcher.execCallbacks(new StageTransitionCallback(state, previousStage, newStage)); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { _dispatcher.getLog().warn("onStageTransition interrupted."); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public void onRefresh(String updatedKey, String configContent) { boolean isDeleted = (configContent == null); if (isDeleted) { LOGGER.info("LEP script deleted by config path: {}", updatedKey); // delete scriptResources.remove(updatedKey); } else { scriptResources.compute(updatedKey, (key, currentValue) -> { if (currentValue != null) { LOGGER.info("LEP script updated by config path: {}", updatedKey); // update currentValue.update(configContent, getCurrentMilli()); return currentValue; } else { LOGGER.info("LEP script created by config path: {}", updatedKey); // create return new XmLepScriptResource(updatedKey, configContent, getCurrentMilli()); } }); } } }
public class class_name { @Override public void onRefresh(String updatedKey, String configContent) { boolean isDeleted = (configContent == null); if (isDeleted) { LOGGER.info("LEP script deleted by config path: {}", updatedKey); // depends on control dependency: [if], data = [none] // delete scriptResources.remove(updatedKey); // depends on control dependency: [if], data = [none] } else { scriptResources.compute(updatedKey, (key, currentValue) -> { if (currentValue != null) { LOGGER.info("LEP script updated by config path: {}", updatedKey); // depends on control dependency: [if], data = [none] // update currentValue.update(configContent, getCurrentMilli()); // depends on control dependency: [if], data = [none] return currentValue; // depends on control dependency: [if], data = [none] } else { LOGGER.info("LEP script created by config path: {}", updatedKey); // depends on control dependency: [if], data = [none] // create return new XmLepScriptResource(updatedKey, configContent, getCurrentMilli()); // depends on control dependency: [if], data = [none] } }); } } }
public class class_name { public static PeriodDuration from(TemporalAmount amount) { if (amount instanceof PeriodDuration) { return (PeriodDuration) amount; } if (amount instanceof Period) { return PeriodDuration.of((Period) amount); } if (amount instanceof Duration) { return PeriodDuration.of((Duration) amount); } if (amount instanceof ChronoPeriod) { if (IsoChronology.INSTANCE.equals(((ChronoPeriod) amount).getChronology()) == false) { throw new DateTimeException("Period requires ISO chronology: " + amount); } } Objects.requireNonNull(amount, "amount"); int years = 0; int months = 0; int days = 0; Duration duration = Duration.ZERO; for (TemporalUnit unit : amount.getUnits()) { long value = amount.get(unit); if (value != 0) { // ignore unless non-zero if (unit.isDurationEstimated()) { if (unit == ChronoUnit.DAYS) { days = Math.addExact(days, Math.toIntExact(value)); } else if (unit == ChronoUnit.WEEKS) { days = Math.addExact(days, Math.toIntExact(Math.multiplyExact(value, 7))); } else if (unit == ChronoUnit.MONTHS) { months = Math.addExact(months, Math.toIntExact(value)); } else if (unit == IsoFields.QUARTER_YEARS) { months = Math.addExact(months, Math.toIntExact(Math.multiplyExact(value, 3))); } else if (unit == ChronoUnit.YEARS) { years = Math.addExact(years, Math.toIntExact(value)); } else if (unit == ChronoUnit.DECADES) { years = Math.addExact(years, Math.toIntExact(Math.multiplyExact(value, 10))); } else if (unit == ChronoUnit.CENTURIES) { years = Math.addExact(years, Math.toIntExact(Math.multiplyExact(value, 100))); } else if (unit == ChronoUnit.MILLENNIA) { years = Math.addExact(years, Math.toIntExact(Math.multiplyExact(value, 1000))); } else { throw new DateTimeException("Unknown unit: " + unit); } } else { // total of exact durations duration = duration.plus(amount.get(unit), unit); } } } return PeriodDuration.of(Period.of(years, months, days), duration); } }
public class class_name { public static PeriodDuration from(TemporalAmount amount) { if (amount instanceof PeriodDuration) { return (PeriodDuration) amount; // depends on control dependency: [if], data = [none] } if (amount instanceof Period) { return PeriodDuration.of((Period) amount); // depends on control dependency: [if], data = [none] } if (amount instanceof Duration) { return PeriodDuration.of((Duration) amount); // depends on control dependency: [if], data = [none] } if (amount instanceof ChronoPeriod) { if (IsoChronology.INSTANCE.equals(((ChronoPeriod) amount).getChronology()) == false) { throw new DateTimeException("Period requires ISO chronology: " + amount); } } Objects.requireNonNull(amount, "amount"); int years = 0; int months = 0; int days = 0; Duration duration = Duration.ZERO; for (TemporalUnit unit : amount.getUnits()) { long value = amount.get(unit); if (value != 0) { // ignore unless non-zero if (unit.isDurationEstimated()) { if (unit == ChronoUnit.DAYS) { days = Math.addExact(days, Math.toIntExact(value)); // depends on control dependency: [if], data = [none] } else if (unit == ChronoUnit.WEEKS) { days = Math.addExact(days, Math.toIntExact(Math.multiplyExact(value, 7))); // depends on control dependency: [if], data = [none] } else if (unit == ChronoUnit.MONTHS) { months = Math.addExact(months, Math.toIntExact(value)); // depends on control dependency: [if], data = [none] } else if (unit == IsoFields.QUARTER_YEARS) { months = Math.addExact(months, Math.toIntExact(Math.multiplyExact(value, 3))); // depends on control dependency: [if], data = [none] } else if (unit == ChronoUnit.YEARS) { years = Math.addExact(years, Math.toIntExact(value)); // depends on control dependency: [if], data = [none] } else if (unit == ChronoUnit.DECADES) { years = Math.addExact(years, Math.toIntExact(Math.multiplyExact(value, 10))); // depends on control dependency: [if], data = [none] } else if (unit == ChronoUnit.CENTURIES) { years = Math.addExact(years, Math.toIntExact(Math.multiplyExact(value, 100))); // depends on control dependency: [if], data = [none] } else if (unit == ChronoUnit.MILLENNIA) { years = Math.addExact(years, Math.toIntExact(Math.multiplyExact(value, 1000))); // depends on control dependency: [if], data = [none] } else { throw new DateTimeException("Unknown unit: " + unit); } } else { // total of exact durations duration = duration.plus(amount.get(unit), unit); // depends on control dependency: [if], data = [none] } } } return PeriodDuration.of(Period.of(years, months, days), duration); } }
public class class_name { @SuppressWarnings("unchecked") @Override public <T> List<T> findAll(String collectionName) { CollectionMetaData cmd = cmdMap.get(collectionName); Map<Object, T> collection = (Map<Object, T>) collectionsRef.get().get(collectionName); if((null == cmd) || (null == collection)) { throw new InvalidJsonDbApiUsageException("Collection by name '" + collectionName + "' not found. Create collection first."); } cmd.getCollectionLock().readLock().lock(); try { List<T> newCollection = new ArrayList<T>(); for (T document : collection.values()) { T obj = (T)Util.deepCopy(document); if(encrypted && cmd.hasSecret() && null!=obj){ CryptoUtil.decryptFields(obj, cmd, dbConfig.getCipher()); newCollection.add(obj); } else{ newCollection.add((T) obj); } } return newCollection; } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { logger.error("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e); throw new JsonDBException("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e); } finally { cmd.getCollectionLock().readLock().unlock(); } } }
public class class_name { @SuppressWarnings("unchecked") @Override public <T> List<T> findAll(String collectionName) { CollectionMetaData cmd = cmdMap.get(collectionName); Map<Object, T> collection = (Map<Object, T>) collectionsRef.get().get(collectionName); if((null == cmd) || (null == collection)) { throw new InvalidJsonDbApiUsageException("Collection by name '" + collectionName + "' not found. Create collection first."); } cmd.getCollectionLock().readLock().lock(); try { List<T> newCollection = new ArrayList<T>(); for (T document : collection.values()) { T obj = (T)Util.deepCopy(document); if(encrypted && cmd.hasSecret() && null!=obj){ CryptoUtil.decryptFields(obj, cmd, dbConfig.getCipher()); // depends on control dependency: [if], data = [none] newCollection.add(obj); // depends on control dependency: [if], data = [obj)] } else{ newCollection.add((T) obj); // depends on control dependency: [if], data = [obj)] } } return newCollection; // depends on control dependency: [try], data = [none] } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { logger.error("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e); throw new JsonDBException("Error when decrypting value for a @Secret annotated field for entity: " + collectionName, e); } finally { // depends on control dependency: [catch], data = [none] cmd.getCollectionLock().readLock().unlock(); } } }
public class class_name { public static <T, U> ToLongBiFunction<T, U> toLongBiFunction(CheckedToLongBiFunction<T, U> function, Consumer<Throwable> handler) { return (t, u) -> { try { return function.applyAsLong(t, u); } catch (Throwable e) { handler.accept(e); throw new IllegalStateException("Exception handler must throw a RuntimeException", e); } }; } }
public class class_name { public static <T, U> ToLongBiFunction<T, U> toLongBiFunction(CheckedToLongBiFunction<T, U> function, Consumer<Throwable> handler) { return (t, u) -> { try { return function.applyAsLong(t, u); // depends on control dependency: [try], data = [none] } catch (Throwable e) { handler.accept(e); throw new IllegalStateException("Exception handler must throw a RuntimeException", e); } // depends on control dependency: [catch], data = [none] }; } }
public class class_name { public ServiceStatus getServiceStatus() { List<String> reasons = new ArrayList<String>(); Status criticalStatus = Status.UP; for (MonitoredService m : criticals) { ServiceStatus serviceStatus = m.getServiceStatus(); Status status = serviceStatus.getStatus(); if (statusIsNotUp(status)) { for (String reason : serviceStatus.getReasons()) { reasons.add(serviceStatus.getName() + ":" + reason); } } if (status.getValue() < criticalStatus.getValue()) { criticalStatus = status; } } Status result = Status.UP; for (MonitoredService m : noncriticals) { ServiceStatus serviceStatus = m.getServiceStatus(); Status status = serviceStatus.getStatus(); if (statusIsNotUp(status)) { for (String reason : serviceStatus.getReasons()) { reasons.add(serviceStatus.getName() + ":" + reason); } result = Status.DEGRADED; } } if (criticalStatus.getValue() < result.getValue()) { result = criticalStatus; } return new ServiceStatus(name, result, reasons); } }
public class class_name { public ServiceStatus getServiceStatus() { List<String> reasons = new ArrayList<String>(); Status criticalStatus = Status.UP; for (MonitoredService m : criticals) { ServiceStatus serviceStatus = m.getServiceStatus(); Status status = serviceStatus.getStatus(); if (statusIsNotUp(status)) { for (String reason : serviceStatus.getReasons()) { reasons.add(serviceStatus.getName() + ":" + reason); // depends on control dependency: [for], data = [reason] } } if (status.getValue() < criticalStatus.getValue()) { criticalStatus = status; // depends on control dependency: [if], data = [none] } } Status result = Status.UP; for (MonitoredService m : noncriticals) { ServiceStatus serviceStatus = m.getServiceStatus(); Status status = serviceStatus.getStatus(); if (statusIsNotUp(status)) { for (String reason : serviceStatus.getReasons()) { reasons.add(serviceStatus.getName() + ":" + reason); // depends on control dependency: [for], data = [reason] } result = Status.DEGRADED; // depends on control dependency: [if], data = [none] } } if (criticalStatus.getValue() < result.getValue()) { result = criticalStatus; // depends on control dependency: [if], data = [none] } return new ServiceStatus(name, result, reasons); } }
public class class_name { private static void addProtocolSpecificHeaders(ResponseBuilder builder, Class<? extends PublicResource> clazz) { List<Term> types=new ArrayList<Term>(); if(PublicRDFSource.class.isAssignableFrom(clazz)) { types.add(LDP.RESOURCE); } if(PublicBasicContainer.class.isAssignableFrom(clazz)) { types.add(LDP.BASIC_CONTAINER); } else if(PublicDirectContainer.class.isAssignableFrom(clazz)) { types.add(LDP.DIRECT_CONTAINER); } else if(PublicIndirectContainer.class.isAssignableFrom(clazz)) { types.add(LDP.INDIRECT_CONTAINER); } for(Term type:types) { builder.header(MoreHttp.LINK_HEADER,MoreHttp.createLink(type, "type")); } } }
public class class_name { private static void addProtocolSpecificHeaders(ResponseBuilder builder, Class<? extends PublicResource> clazz) { List<Term> types=new ArrayList<Term>(); if(PublicRDFSource.class.isAssignableFrom(clazz)) { types.add(LDP.RESOURCE); // depends on control dependency: [if], data = [none] } if(PublicBasicContainer.class.isAssignableFrom(clazz)) { types.add(LDP.BASIC_CONTAINER); // depends on control dependency: [if], data = [none] } else if(PublicDirectContainer.class.isAssignableFrom(clazz)) { types.add(LDP.DIRECT_CONTAINER); // depends on control dependency: [if], data = [none] } else if(PublicIndirectContainer.class.isAssignableFrom(clazz)) { types.add(LDP.INDIRECT_CONTAINER); // depends on control dependency: [if], data = [none] } for(Term type:types) { builder.header(MoreHttp.LINK_HEADER,MoreHttp.createLink(type, "type")); // depends on control dependency: [for], data = [type] } } }
public class class_name { public synchronized Object put(final String key, final Object value) { try { if (maxCacheSize == 0) { return null; } // if the key isn't in the cache and the cache is full... if (!super.containsKey(key) && !list.isEmpty() && list.size() + 1 > maxCacheSize) { final Object deadKey = list.removeLast(); super.remove(deadKey); } updateKey(key); } catch (Exception e) { log.log(Level.SEVERE, "put", e); } return super.put(key, value); } }
public class class_name { public synchronized Object put(final String key, final Object value) { try { if (maxCacheSize == 0) { return null; // depends on control dependency: [if], data = [none] } // if the key isn't in the cache and the cache is full... if (!super.containsKey(key) && !list.isEmpty() && list.size() + 1 > maxCacheSize) { final Object deadKey = list.removeLast(); super.remove(deadKey); // depends on control dependency: [if], data = [none] } updateKey(key); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.log(Level.SEVERE, "put", e); } // depends on control dependency: [catch], data = [none] return super.put(key, value); } }
public class class_name { public java.util.List<DomainMembership> getDomainMemberships() { if (domainMemberships == null) { domainMemberships = new com.amazonaws.internal.SdkInternalList<DomainMembership>(); } return domainMemberships; } }
public class class_name { public java.util.List<DomainMembership> getDomainMemberships() { if (domainMemberships == null) { domainMemberships = new com.amazonaws.internal.SdkInternalList<DomainMembership>(); // depends on control dependency: [if], data = [none] } return domainMemberships; } }
public class class_name { public void closeNotification() throws SIResourceException, SIConnectionLostException, SIErrorException, SIConnectionDroppedException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "closeNotification"); // Determine if we need to actually perform the close (we might already be closed). // This needs to be synchronized to avoid a race condition into close. LinkedList<ProxyQueue> closeList = null; synchronized(this) { if (!closed) { // Mark ourself as closed. closed = true; // Make a copy of the map's values to avoid a concurrent modification // exception later. Then clear the map. closeList = new LinkedList<ProxyQueue>(); closeList.addAll(idToProxyQueueMap.values()); } } // If we actually carried out a close operation then closeList will // be non-null. if (closeList != null) { // Close each proxy queue in turn. This is not synchronized with this // objects monitor to avoid the deadlock described above. SIException caughtException = null; Iterator iterator = closeList.iterator(); while(iterator.hasNext()) { ProxyQueue queue = (ProxyQueue)iterator.next(); try { if (queue.getDestinationSessionProxy() instanceof ConsumerSessionProxy) ((ConsumerSessionProxy)queue.getDestinationSessionProxy()).close(true); else queue.getDestinationSessionProxy().close(); } catch(SIException e) { // No FFDC code needed. caughtException = e; } } // If we caught an exception when closing one of the proxy queues, report it // by throwing an exception of our own, linking one of the exceptions we caught. if (caughtException != null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "exception caught when closing queues"); SIConnectionLostException closeException = new SIConnectionLostException( nls.getFormattedMessage("ERROR_CLOSING_PROXYQUEUE_GROUP_SICO1025", new Object[] {caughtException}, null) // d192293 ); closeException.initCause(caughtException); throw closeException; } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "closeNotification"); } }
public class class_name { public void closeNotification() throws SIResourceException, SIConnectionLostException, SIErrorException, SIConnectionDroppedException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "closeNotification"); // Determine if we need to actually perform the close (we might already be closed). // This needs to be synchronized to avoid a race condition into close. LinkedList<ProxyQueue> closeList = null; synchronized(this) { if (!closed) { // Mark ourself as closed. closed = true; // depends on control dependency: [if], data = [none] // Make a copy of the map's values to avoid a concurrent modification // exception later. Then clear the map. closeList = new LinkedList<ProxyQueue>(); // depends on control dependency: [if], data = [none] closeList.addAll(idToProxyQueueMap.values()); // depends on control dependency: [if], data = [none] } } // If we actually carried out a close operation then closeList will // be non-null. if (closeList != null) { // Close each proxy queue in turn. This is not synchronized with this // objects monitor to avoid the deadlock described above. SIException caughtException = null; Iterator iterator = closeList.iterator(); while(iterator.hasNext()) { ProxyQueue queue = (ProxyQueue)iterator.next(); try { if (queue.getDestinationSessionProxy() instanceof ConsumerSessionProxy) ((ConsumerSessionProxy)queue.getDestinationSessionProxy()).close(true); else queue.getDestinationSessionProxy().close(); } catch(SIException e) { // No FFDC code needed. caughtException = e; } } // If we caught an exception when closing one of the proxy queues, report it // by throwing an exception of our own, linking one of the exceptions we caught. if (caughtException != null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "exception caught when closing queues"); SIConnectionLostException closeException = new SIConnectionLostException( nls.getFormattedMessage("ERROR_CLOSING_PROXYQUEUE_GROUP_SICO1025", new Object[] {caughtException}, null) // d192293 ); closeException.initCause(caughtException); throw closeException; } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "closeNotification"); } }
public class class_name { private Response getResponseFromProviderUsing(Authentication token) { Client client = getClient(); WebTarget webTarget = client .target(oAuth2ServiceProperties.getUserInfoUri()) .queryParam(oAuth2ServiceProperties.getAccessTokenName(), (String)token.getCredentials()); if (oAuth2ServiceProperties.getAdditionalInfoParams() != null) { for (Map.Entry<String, String> entry : oAuth2ServiceProperties.getAdditionalInfoParams().entrySet()) { webTarget = webTarget.queryParam(entry.getKey(), entry.getValue()); } } return webTarget.request(MediaType.APPLICATION_JSON_TYPE) .get(); } }
public class class_name { private Response getResponseFromProviderUsing(Authentication token) { Client client = getClient(); WebTarget webTarget = client .target(oAuth2ServiceProperties.getUserInfoUri()) .queryParam(oAuth2ServiceProperties.getAccessTokenName(), (String)token.getCredentials()); if (oAuth2ServiceProperties.getAdditionalInfoParams() != null) { for (Map.Entry<String, String> entry : oAuth2ServiceProperties.getAdditionalInfoParams().entrySet()) { webTarget = webTarget.queryParam(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry] } } return webTarget.request(MediaType.APPLICATION_JSON_TYPE) .get(); } }
public class class_name { public INDArray asRowVector(BufferedImage image) { if (centerCropIfNeeded) { image = centerCropIfNeeded(image); } image = scalingIfNeed(image, true); if (channels == 3) { return toINDArrayBGR(image).ravel(); } int[][] ret = toIntArrayArray(image); return NDArrayUtil.toNDArray(ArrayUtil.flatten(ret)); } }
public class class_name { public INDArray asRowVector(BufferedImage image) { if (centerCropIfNeeded) { image = centerCropIfNeeded(image); // depends on control dependency: [if], data = [none] } image = scalingIfNeed(image, true); if (channels == 3) { return toINDArrayBGR(image).ravel(); // depends on control dependency: [if], data = [none] } int[][] ret = toIntArrayArray(image); return NDArrayUtil.toNDArray(ArrayUtil.flatten(ret)); } }
public class class_name { public Result<Void> createArtifact(AuthzTrans trans, List<ArtiDAO.Data> list) { Validator v = new Validator().artisRequired(list, 1); if(v.err()) { return Result.err(Result.ERR_BadData,v.errs()); } for(ArtiDAO.Data add : list) { try { // Policy 1: MechID must exist in Org Identity muser = trans.org().getIdentity(trans, add.mechid); if(muser == null) { return Result.err(Result.ERR_Denied,"%s is not valid for %s", add.mechid,trans.org().getName()); } // Policy 2: MechID must have valid Organization Owner Identity ouser = muser.owner(); if(ouser == null) { return Result.err(Result.ERR_Denied,"%s is not a valid Sponsor for %s at %s", trans.user(),add.mechid,trans.org().getName()); } // Policy 3: Calling ID must be MechID Owner if(!trans.user().equals(ouser.fullID())) { return Result.err(Result.ERR_Denied,"%s is not the Sponsor for %s at %s", trans.user(),add.mechid,trans.org().getName()); } // Policy 4: Renewal Days are between 10 and 60 (constants, may be parameterized) if(add.renewDays<MIN_RENEWAL) { add.renewDays = STD_RENEWAL; } else if(add.renewDays>MAX_RENEWAL) { add.renewDays = MAX_RENEWAL; } // Policy 5: If Notify is blank, set to Owner's Email if(add.notify==null || add.notify.length()==0) { add.notify = "mailto:"+ouser.email(); } // Set Sponsor from Golden Source add.sponsor = ouser.fullID(); } catch (OrganizationException e) { return Result.err(e); } // Add to DB Result<ArtiDAO.Data> rv = artiDAO.create(trans, add); // TODO come up with Partial Reporting Scheme, or allow only one at a time. if(rv.notOK()) { return Result.err(rv); } } return Result.ok(); } }
public class class_name { public Result<Void> createArtifact(AuthzTrans trans, List<ArtiDAO.Data> list) { Validator v = new Validator().artisRequired(list, 1); if(v.err()) { return Result.err(Result.ERR_BadData,v.errs()); // depends on control dependency: [if], data = [none] } for(ArtiDAO.Data add : list) { try { // Policy 1: MechID must exist in Org Identity muser = trans.org().getIdentity(trans, add.mechid); if(muser == null) { return Result.err(Result.ERR_Denied,"%s is not valid for %s", add.mechid,trans.org().getName()); // depends on control dependency: [if], data = [none] } // Policy 2: MechID must have valid Organization Owner Identity ouser = muser.owner(); if(ouser == null) { return Result.err(Result.ERR_Denied,"%s is not a valid Sponsor for %s at %s", trans.user(),add.mechid,trans.org().getName()); // depends on control dependency: [if], data = [none] } // Policy 3: Calling ID must be MechID Owner if(!trans.user().equals(ouser.fullID())) { return Result.err(Result.ERR_Denied,"%s is not the Sponsor for %s at %s", trans.user(),add.mechid,trans.org().getName()); // depends on control dependency: [if], data = [none] } // Policy 4: Renewal Days are between 10 and 60 (constants, may be parameterized) if(add.renewDays<MIN_RENEWAL) { add.renewDays = STD_RENEWAL; // depends on control dependency: [if], data = [none] } else if(add.renewDays>MAX_RENEWAL) { add.renewDays = MAX_RENEWAL; // depends on control dependency: [if], data = [none] } // Policy 5: If Notify is blank, set to Owner's Email if(add.notify==null || add.notify.length()==0) { add.notify = "mailto:"+ouser.email(); // depends on control dependency: [if], data = [none] } // Set Sponsor from Golden Source add.sponsor = ouser.fullID(); // depends on control dependency: [try], data = [none] } catch (OrganizationException e) { return Result.err(e); } // depends on control dependency: [catch], data = [none] // Add to DB Result<ArtiDAO.Data> rv = artiDAO.create(trans, add); // TODO come up with Partial Reporting Scheme, or allow only one at a time. if(rv.notOK()) { return Result.err(rv); // depends on control dependency: [if], data = [none] } } return Result.ok(); } }
public class class_name { private void create(Connection conn) throws SQLException { logger.debug("enter - create()"); try { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement(CREATE_SEQ); stmt.setString(INS_NAME, getName()); stmt.setLong(INS_NEXT_KEY, nextKey); stmt.setLong(INS_INTERVAL, interval); stmt.setLong(INS_UPDATE, System.currentTimeMillis()); if( stmt.executeUpdate() != 1 ) { logger.warn("Unable to create sequence " + getName() + "."); sequence = -1L; } } finally { if( rs != null ) { try { rs.close(); } catch( SQLException ignore ) { /* ignore */} } if( stmt != null ) { try { stmt.close(); } catch( SQLException ignore ) { /* ignore */ } } } } finally { logger.debug("exit - create()"); } } }
public class class_name { private void create(Connection conn) throws SQLException { logger.debug("enter - create()"); try { PreparedStatement stmt = null; ResultSet rs = null; try { stmt = conn.prepareStatement(CREATE_SEQ); // depends on control dependency: [try], data = [none] stmt.setString(INS_NAME, getName()); // depends on control dependency: [try], data = [none] stmt.setLong(INS_NEXT_KEY, nextKey); // depends on control dependency: [try], data = [none] stmt.setLong(INS_INTERVAL, interval); // depends on control dependency: [try], data = [none] stmt.setLong(INS_UPDATE, System.currentTimeMillis()); // depends on control dependency: [try], data = [none] if( stmt.executeUpdate() != 1 ) { logger.warn("Unable to create sequence " + getName() + "."); // depends on control dependency: [if], data = [none] sequence = -1L; // depends on control dependency: [if], data = [none] } } finally { if( rs != null ) { try { rs.close(); } // depends on control dependency: [try], data = [none] catch( SQLException ignore ) { /* ignore */} // depends on control dependency: [catch], data = [none] } if( stmt != null ) { try { stmt.close(); } // depends on control dependency: [try], data = [none] catch( SQLException ignore ) { /* ignore */ } // depends on control dependency: [catch], data = [none] } } } finally { logger.debug("exit - create()"); } } }
public class class_name { @SuppressWarnings("unchecked") private void prepareKarafRbacInvocations(String fullName, Map<String, Object> mBeanInfo, Map<String, List<String>> queryForMBeans, Map<String, List<String>> queryForMBeanOperations) { queryForMBeans.put(fullName, new ArrayList<>()); List<String> operations = operations((Map<String, Object>) mBeanInfo.get("op")); // prepare opByString for MBeainInfo Map<String, Map<String, Object>> opByString = new HashMap<>(); mBeanInfo.put("opByString", opByString); if (operations.isEmpty()) { return; } queryForMBeanOperations.put(fullName, operations); for (String op : operations) { // ! no need to copy relevant map for "op['opname']" - hawtio uses only 'canInvoke' property opByString.put(op, new HashMap<>()); } } }
public class class_name { @SuppressWarnings("unchecked") private void prepareKarafRbacInvocations(String fullName, Map<String, Object> mBeanInfo, Map<String, List<String>> queryForMBeans, Map<String, List<String>> queryForMBeanOperations) { queryForMBeans.put(fullName, new ArrayList<>()); List<String> operations = operations((Map<String, Object>) mBeanInfo.get("op")); // prepare opByString for MBeainInfo Map<String, Map<String, Object>> opByString = new HashMap<>(); mBeanInfo.put("opByString", opByString); if (operations.isEmpty()) { return; // depends on control dependency: [if], data = [none] } queryForMBeanOperations.put(fullName, operations); for (String op : operations) { // ! no need to copy relevant map for "op['opname']" - hawtio uses only 'canInvoke' property opByString.put(op, new HashMap<>()); // depends on control dependency: [for], data = [op] } } }
public class class_name { private void setType(UIComponent component, String type) { Method method; try { method = component.getClass().getMethod("getType"); if (null != method) { Object invoke = method.invoke(component); if (invoke != null) { // is it an PrimeFaces component? if (component.getClass().getName().equals("org.primefaces.component.inputtext.InputText")) { if (!"text".equals(invoke)) { // the programmer has explicitly assigned a type return; } } else return; } } method = component.getClass().getMethod("setType", String.class); if (null != method) { method.invoke(component, type); return; } } catch (ReflectiveOperationException e) { // catch block required by compiler, can't happen in reality } if (null == component.getAttributes().get("type") && null == component.getPassThroughAttributes().get("type")) { component.getPassThroughAttributes().put("type", type); } } }
public class class_name { private void setType(UIComponent component, String type) { Method method; try { method = component.getClass().getMethod("getType"); if (null != method) { Object invoke = method.invoke(component); if (invoke != null) { // is it an PrimeFaces component? if (component.getClass().getName().equals("org.primefaces.component.inputtext.InputText")) { if (!"text".equals(invoke)) { // the programmer has explicitly assigned a type return; // depends on control dependency: [if], data = [none] } } else return; } } method = component.getClass().getMethod("setType", String.class); if (null != method) { method.invoke(component, type); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } catch (ReflectiveOperationException e) { // catch block required by compiler, can't happen in reality } if (null == component.getAttributes().get("type") && null == component.getPassThroughAttributes().get("type")) { component.getPassThroughAttributes().put("type", type); } } }
public class class_name { public boolean existsCore(String coreName) { try { CoreStatus status = CoreAdminRequest.getCoreStatus(coreName, solrClient); status.getInstanceDirectory(); } catch (Exception e) { return false; } return true; } }
public class class_name { public boolean existsCore(String coreName) { try { CoreStatus status = CoreAdminRequest.getCoreStatus(coreName, solrClient); status.getInstanceDirectory(); // depends on control dependency: [try], data = [none] } catch (Exception e) { return false; } // depends on control dependency: [catch], data = [none] return true; } }
public class class_name { public static ManagedChannel createNettyChannel(String host, BigtableOptions options, ClientInterceptor ... interceptors) throws SSLException { LOG.info("Creating new channel for %s", host); if (LOG.getLog().isDebugEnabled()) { LOG.debug(Throwables.getStackTraceAsString(new Throwable())); } // Ideally, this should be ManagedChannelBuilder.forAddress(...) rather than an explicit // call to NettyChannelBuilder. Unfortunately, that doesn't work for shaded artifacts. ManagedChannelBuilder<?> builder = ManagedChannelBuilder .forAddress(host, options.getPort()); if (options.usePlaintextNegotiation()) { // NOTE: usePlaintext(true) is deprecated in newer versions of grpc (1.11.0). // usePlantxext() is the preferred approach, but won't work with older versions. // This means that plaintext negotiation can't be used with Beam. builder.usePlaintext(); } return builder .idleTimeout(Long.MAX_VALUE, TimeUnit.SECONDS) .maxInboundMessageSize(MAX_MESSAGE_SIZE) .userAgent(BigtableVersionInfo.CORE_USER_AGENT + "," + options.getUserAgent()) .intercept(interceptors) .build(); } }
public class class_name { public static ManagedChannel createNettyChannel(String host, BigtableOptions options, ClientInterceptor ... interceptors) throws SSLException { LOG.info("Creating new channel for %s", host); if (LOG.getLog().isDebugEnabled()) { LOG.debug(Throwables.getStackTraceAsString(new Throwable())); // depends on control dependency: [if], data = [none] } // Ideally, this should be ManagedChannelBuilder.forAddress(...) rather than an explicit // call to NettyChannelBuilder. Unfortunately, that doesn't work for shaded artifacts. ManagedChannelBuilder<?> builder = ManagedChannelBuilder .forAddress(host, options.getPort()); if (options.usePlaintextNegotiation()) { // NOTE: usePlaintext(true) is deprecated in newer versions of grpc (1.11.0). // usePlantxext() is the preferred approach, but won't work with older versions. // This means that plaintext negotiation can't be used with Beam. builder.usePlaintext(); // depends on control dependency: [if], data = [none] } return builder .idleTimeout(Long.MAX_VALUE, TimeUnit.SECONDS) .maxInboundMessageSize(MAX_MESSAGE_SIZE) .userAgent(BigtableVersionInfo.CORE_USER_AGENT + "," + options.getUserAgent()) .intercept(interceptors) .build(); } }
public class class_name { private EntityDecl handleExternalEntityDecl(WstxInputSource inputSource, boolean isParam, String id, char c, Location evtLoc) throws XMLStreamException { boolean isPublic = checkPublicSystemKeyword(c); String pubId = null; // Ok, now we can parse the reference; first public id if needed: if (isPublic) { c = skipObligatoryDtdWs(); if (c != '"' && c != '\'') { throwDTDUnexpectedChar(c, "; expected a quote to start the public identifier"); } pubId = parsePublicId(c, getErrorMsg()); /* 30-Sep-2005, TSa: SGML has public ids that miss the system * id. Although not legal with XML DTDs, let's give bit more * meaningful error in those cases... */ c = getNextExpanded(); if (c <= CHAR_SPACE) { // good c = skipDtdWs(true); } else { // not good... // Let's just push it back and generate normal error then: if (c != '>') { // this is handled below though --mInputPtr; c = skipObligatoryDtdWs(); } } /* But here let's deal with one case that we are familiar with: * SGML does NOT require system id after public one... */ if (c == '>') { _reportWFCViolation("Unexpected end of ENTITY declaration (expected a system id after public id): trying to use an SGML DTD instead of XML one?"); } } else { // Just need some white space here c = skipObligatoryDtdWs(); } if (c != '"' && c != '\'') { throwDTDUnexpectedChar(c, "; expected a quote to start the system identifier"); } String sysId = parseSystemId(c, mNormalizeLFs, getErrorMsg()); // Ok; how about notation? String notationId = null; /* Ok; PEs are simpler, as they always are parsed (see production * #72 in xml 1.0 specs) */ if (isParam) { c = skipDtdWs(true); } else { /* GEs can be unparsed, too, so it's bit more complicated; * if we get '>', don't need space; otherwise need separating * space (or PE boundary). Thus, need bit more code. */ int i = peekNext(); if (i == '>') { // good c = '>'; ++mInputPtr; } else if (i < 0) { // local EOF, ok c = skipDtdWs(true); } else if (i == '%') { c = getNextExpanded(); } else { ++mInputPtr; c = (char) i; if (!isSpaceChar(c)) { throwDTDUnexpectedChar(c, "; expected a separating space or closing '>'"); } c = skipDtdWs(true); } if (c != '>') { if (!isNameStartChar(c)) { throwDTDUnexpectedChar(c, "; expected either NDATA keyword, or closing '>'"); } String keyw = checkDTDKeyword("DATA"); if (keyw != null) { _reportWFCViolation("Unrecognized keyword '"+keyw+"'; expected NOTATION (or closing '>')"); } c = skipObligatoryDtdWs(); notationId = readNotationEntry(c, null, evtLoc); c = skipDtdWs(true); } } // Ok, better have '>' now: if (c != '>') { throwDTDUnexpectedChar(c, "; expected closing '>'"); } URL ctxt; try { ctxt = inputSource.getSource(); } catch (IOException e) { throw new WstxIOException(e); } if (notationId == null) { // parsed entity: return new ParsedExtEntity(evtLoc, id, ctxt, pubId, sysId); } return new UnparsedExtEntity(evtLoc, id, ctxt, pubId, sysId, notationId); } }
public class class_name { private EntityDecl handleExternalEntityDecl(WstxInputSource inputSource, boolean isParam, String id, char c, Location evtLoc) throws XMLStreamException { boolean isPublic = checkPublicSystemKeyword(c); String pubId = null; // Ok, now we can parse the reference; first public id if needed: if (isPublic) { c = skipObligatoryDtdWs(); if (c != '"' && c != '\'') { throwDTDUnexpectedChar(c, "; expected a quote to start the public identifier"); } pubId = parsePublicId(c, getErrorMsg()); /* 30-Sep-2005, TSa: SGML has public ids that miss the system * id. Although not legal with XML DTDs, let's give bit more * meaningful error in those cases... */ c = getNextExpanded(); if (c <= CHAR_SPACE) { // good c = skipDtdWs(true); // depends on control dependency: [if], data = [none] } else { // not good... // Let's just push it back and generate normal error then: if (c != '>') { // this is handled below though --mInputPtr; // depends on control dependency: [if], data = [none] c = skipObligatoryDtdWs(); // depends on control dependency: [if], data = [none] } } /* But here let's deal with one case that we are familiar with: * SGML does NOT require system id after public one... */ if (c == '>') { _reportWFCViolation("Unexpected end of ENTITY declaration (expected a system id after public id): trying to use an SGML DTD instead of XML one?"); // depends on control dependency: [if], data = [none] } } else { // Just need some white space here c = skipObligatoryDtdWs(); } if (c != '"' && c != '\'') { throwDTDUnexpectedChar(c, "; expected a quote to start the system identifier"); } String sysId = parseSystemId(c, mNormalizeLFs, getErrorMsg()); // Ok; how about notation? String notationId = null; /* Ok; PEs are simpler, as they always are parsed (see production * #72 in xml 1.0 specs) */ if (isParam) { c = skipDtdWs(true); } else { /* GEs can be unparsed, too, so it's bit more complicated; * if we get '>', don't need space; otherwise need separating * space (or PE boundary). Thus, need bit more code. */ int i = peekNext(); if (i == '>') { // good c = '>'; // depends on control dependency: [if], data = [none] ++mInputPtr; // depends on control dependency: [if], data = [none] } else if (i < 0) { // local EOF, ok c = skipDtdWs(true); // depends on control dependency: [if], data = [none] } else if (i == '%') { c = getNextExpanded(); // depends on control dependency: [if], data = [none] } else { ++mInputPtr; // depends on control dependency: [if], data = [none] c = (char) i; // depends on control dependency: [if], data = [none] if (!isSpaceChar(c)) { throwDTDUnexpectedChar(c, "; expected a separating space or closing '>'"); // depends on control dependency: [if], data = [none] } c = skipDtdWs(true); // depends on control dependency: [if], data = [none] } if (c != '>') { if (!isNameStartChar(c)) { throwDTDUnexpectedChar(c, "; expected either NDATA keyword, or closing '>'"); } String keyw = checkDTDKeyword("DATA"); if (keyw != null) { _reportWFCViolation("Unrecognized keyword '"+keyw+"'; expected NOTATION (or closing '>')"); } c = skipObligatoryDtdWs(); notationId = readNotationEntry(c, null, evtLoc); c = skipDtdWs(true); } } // Ok, better have '>' now: if (c != '>') { throwDTDUnexpectedChar(c, "; expected closing '>'"); } URL ctxt; try { ctxt = inputSource.getSource(); } catch (IOException e) { throw new WstxIOException(e); // depends on control dependency: [if], data = [none] } if (notationId == null) { // parsed entity: return new ParsedExtEntity(evtLoc, id, ctxt, pubId, sysId); // depends on control dependency: [if], data = [none] } return new UnparsedExtEntity(evtLoc, id, ctxt, pubId, sysId, notationId); // depends on control dependency: [if], data = [none] } }
public class class_name { public static double Digamma(double x) { double s = 0; double w = 0; double y = 0; double z = 0; double nz = 0; boolean negative = false; if (x <= 0.0) { negative = true; double q = x; double p = (int) Math.floor(q); if (p == q) { try { throw new ArithmeticException("Function computation resulted in arithmetic overflow."); } catch (Exception e) { e.printStackTrace(); } } nz = q - p; if (nz != 0.5) { if (nz > 0.5) { p = p + 1.0; nz = q - p; } nz = Math.PI / Math.tan(Math.PI * nz); } else { nz = 0.0; } x = 1.0 - x; } if (x <= 10.0 && x == Math.floor(x)) { y = 0.0; int n = (int) Math.floor(x); for (int i = 1; i <= n - 1; i++) { w = i; y = y + 1.0 / w; } y = y - 0.57721566490153286061; } else { s = x; w = 0.0; while (s < 10.0) { w = w + 1.0 / s; s = s + 1.0; } if (s < 1.0E17) { z = 1.0 / (s * s); double polv = 8.33333333333333333333E-2; polv = polv * z - 2.10927960927960927961E-2; polv = polv * z + 7.57575757575757575758E-3; polv = polv * z - 4.16666666666666666667E-3; polv = polv * z + 3.96825396825396825397E-3; polv = polv * z - 8.33333333333333333333E-3; polv = polv * z + 8.33333333333333333333E-2; y = z * polv; } else { y = 0.0; } y = Math.log(s) - 0.5 / s - y - w; } if (negative == true) { y = y - nz; } return y; } }
public class class_name { public static double Digamma(double x) { double s = 0; double w = 0; double y = 0; double z = 0; double nz = 0; boolean negative = false; if (x <= 0.0) { negative = true; // depends on control dependency: [if], data = [none] double q = x; double p = (int) Math.floor(q); if (p == q) { try { throw new ArithmeticException("Function computation resulted in arithmetic overflow."); } catch (Exception e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } nz = q - p; // depends on control dependency: [if], data = [none] if (nz != 0.5) { if (nz > 0.5) { p = p + 1.0; // depends on control dependency: [if], data = [none] nz = q - p; // depends on control dependency: [if], data = [none] } nz = Math.PI / Math.tan(Math.PI * nz); // depends on control dependency: [if], data = [none] } else { nz = 0.0; // depends on control dependency: [if], data = [none] } x = 1.0 - x; // depends on control dependency: [if], data = [none] } if (x <= 10.0 && x == Math.floor(x)) { y = 0.0; // depends on control dependency: [if], data = [none] int n = (int) Math.floor(x); for (int i = 1; i <= n - 1; i++) { w = i; // depends on control dependency: [for], data = [i] y = y + 1.0 / w; // depends on control dependency: [for], data = [none] } y = y - 0.57721566490153286061; // depends on control dependency: [if], data = [none] } else { s = x; // depends on control dependency: [if], data = [none] w = 0.0; // depends on control dependency: [if], data = [none] while (s < 10.0) { w = w + 1.0 / s; // depends on control dependency: [while], data = [none] s = s + 1.0; // depends on control dependency: [while], data = [none] } if (s < 1.0E17) { z = 1.0 / (s * s); // depends on control dependency: [if], data = [(s] double polv = 8.33333333333333333333E-2; polv = polv * z - 2.10927960927960927961E-2; // depends on control dependency: [if], data = [none] polv = polv * z + 7.57575757575757575758E-3; // depends on control dependency: [if], data = [none] polv = polv * z - 4.16666666666666666667E-3; // depends on control dependency: [if], data = [none] polv = polv * z + 3.96825396825396825397E-3; // depends on control dependency: [if], data = [none] polv = polv * z - 8.33333333333333333333E-3; // depends on control dependency: [if], data = [none] polv = polv * z + 8.33333333333333333333E-2; // depends on control dependency: [if], data = [none] y = z * polv; // depends on control dependency: [if], data = [none] } else { y = 0.0; // depends on control dependency: [if], data = [none] } y = Math.log(s) - 0.5 / s - y - w; // depends on control dependency: [if], data = [none] } if (negative == true) { y = y - nz; // depends on control dependency: [if], data = [none] } return y; } }
public class class_name { @Override public void setTo( Planar<T> orig) { if (orig.width != width || orig.height != height) reshape(orig.width,orig.height); if( orig.getBandType() != getBandType() ) throw new IllegalArgumentException("The band type must be the same"); int N = orig.getNumBands(); if( N != getNumBands() ) { setNumberOfBands(orig.getNumBands()); } for( int i = 0; i < N; i++ ) { bands[i].setTo(orig.getBand(i)); } } }
public class class_name { @Override public void setTo( Planar<T> orig) { if (orig.width != width || orig.height != height) reshape(orig.width,orig.height); if( orig.getBandType() != getBandType() ) throw new IllegalArgumentException("The band type must be the same"); int N = orig.getNumBands(); if( N != getNumBands() ) { setNumberOfBands(orig.getNumBands()); // depends on control dependency: [if], data = [none] } for( int i = 0; i < N; i++ ) { bands[i].setTo(orig.getBand(i)); // depends on control dependency: [for], data = [i] } } }
public class class_name { public String editDbmsConfiguration() { try { if (isCustomSetup()) { if (hibernateDialect != null && jndiUrl != null) { if (canConnectToDbms()) { getConfigurationActivator().initCustomInstallConfiguration(hibernateDialect, jndiUrl); } else { addActionError("greenpepper.install.dbms.test.failure"); } } } else { getConfigurationActivator().initQuickInstallConfiguration(); } } catch (GreenPepperServerException ex) { addActionError("greenpepper.install.dbms.init.failure"); } return SUCCESS; } }
public class class_name { public String editDbmsConfiguration() { try { if (isCustomSetup()) { if (hibernateDialect != null && jndiUrl != null) { if (canConnectToDbms()) { getConfigurationActivator().initCustomInstallConfiguration(hibernateDialect, jndiUrl); // depends on control dependency: [if], data = [none] } else { addActionError("greenpepper.install.dbms.test.failure"); // depends on control dependency: [if], data = [none] } } } else { getConfigurationActivator().initQuickInstallConfiguration(); // depends on control dependency: [if], data = [none] } } catch (GreenPepperServerException ex) { addActionError("greenpepper.install.dbms.init.failure"); } // depends on control dependency: [catch], data = [none] return SUCCESS; } }
public class class_name { public void marshall(OutputGroup outputGroup, ProtocolMarshaller protocolMarshaller) { if (outputGroup == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(outputGroup.getCustomName(), CUSTOMNAME_BINDING); protocolMarshaller.marshall(outputGroup.getName(), NAME_BINDING); protocolMarshaller.marshall(outputGroup.getOutputGroupSettings(), OUTPUTGROUPSETTINGS_BINDING); protocolMarshaller.marshall(outputGroup.getOutputs(), OUTPUTS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(OutputGroup outputGroup, ProtocolMarshaller protocolMarshaller) { if (outputGroup == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(outputGroup.getCustomName(), CUSTOMNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(outputGroup.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(outputGroup.getOutputGroupSettings(), OUTPUTGROUPSETTINGS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(outputGroup.getOutputs(), OUTPUTS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setSamplingTargetDocuments(java.util.Collection<SamplingTargetDocument> samplingTargetDocuments) { if (samplingTargetDocuments == null) { this.samplingTargetDocuments = null; return; } this.samplingTargetDocuments = new java.util.ArrayList<SamplingTargetDocument>(samplingTargetDocuments); } }
public class class_name { public void setSamplingTargetDocuments(java.util.Collection<SamplingTargetDocument> samplingTargetDocuments) { if (samplingTargetDocuments == null) { this.samplingTargetDocuments = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.samplingTargetDocuments = new java.util.ArrayList<SamplingTargetDocument>(samplingTargetDocuments); } }
public class class_name { public void restoreKey(String key) { if (!key.equalsIgnoreCase("test_mode_opened")) { String originalKey = key.substring(keyTestMode.length()); Object value = get(key); put(originalKey, value); clear(key); } } }
public class class_name { public void restoreKey(String key) { if (!key.equalsIgnoreCase("test_mode_opened")) { String originalKey = key.substring(keyTestMode.length()); Object value = get(key); put(originalKey, value); // depends on control dependency: [if], data = [none] clear(key); // depends on control dependency: [if], data = [none] } } }
public class class_name { public EClass getRevisionUpdated() { if (revisionUpdatedEClass == null) { revisionUpdatedEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(LogPackage.eNS_URI) .getEClassifiers().get(25); } return revisionUpdatedEClass; } }
public class class_name { public EClass getRevisionUpdated() { if (revisionUpdatedEClass == null) { revisionUpdatedEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(LogPackage.eNS_URI) .getEClassifiers().get(25); // depends on control dependency: [if], data = [none] } return revisionUpdatedEClass; } }
public class class_name { public SequencalJobExecuteResults runAll() { SequencalJobExecuteResults results = new SequencalJobExecuteResults(); for (Job job : jobs) { job.setJarByClass(SequencalJobChain.class); try { boolean isSuccessful = job.waitForCompletion(true); results.add(new SequencalJobExecuteResult(isSuccessful, job.getJobName())); if (!isSuccessful) { break; } } catch (Exception e) { throw new RuntimeException(e); } } return results; } }
public class class_name { public SequencalJobExecuteResults runAll() { SequencalJobExecuteResults results = new SequencalJobExecuteResults(); for (Job job : jobs) { job.setJarByClass(SequencalJobChain.class); // depends on control dependency: [for], data = [job] try { boolean isSuccessful = job.waitForCompletion(true); results.add(new SequencalJobExecuteResult(isSuccessful, job.getJobName())); // depends on control dependency: [try], data = [none] if (!isSuccessful) { break; } } catch (Exception e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } return results; } }
public class class_name { public E poll() { final ReentrantLock lock = this.lock; lock.lock(); try { E first = q.peek(); return (first == null || first.getDelay(NANOSECONDS) > 0) ? null : q.poll(); } finally { lock.unlock(); } } }
public class class_name { public E poll() { final ReentrantLock lock = this.lock; lock.lock(); try { E first = q.peek(); return (first == null || first.getDelay(NANOSECONDS) > 0) ? null : q.poll(); // depends on control dependency: [try], data = [none] } finally { lock.unlock(); } } }
public class class_name { public static void rename(ObjectNode obj, String oldFieldName, String newFieldName) { if (obj != null && isNotBlank(oldFieldName) && isNotBlank(newFieldName)) { JsonNode node = remove(obj, oldFieldName); if (node != null) { obj.set(newFieldName, node); } } } }
public class class_name { public static void rename(ObjectNode obj, String oldFieldName, String newFieldName) { if (obj != null && isNotBlank(oldFieldName) && isNotBlank(newFieldName)) { JsonNode node = remove(obj, oldFieldName); if (node != null) { obj.set(newFieldName, node); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void moveFieldToThin(FieldInfo fieldInfo, BaseField field, Record record) { if ((field == null) || (!field.getFieldName().equals(fieldInfo.getFieldName()))) { field = null; if (record != null) field = record.getField(fieldInfo.getFieldName()); } if (field != null) { if (field.getDataClass() == fieldInfo.getDataClass()) fieldInfo.setData(field.getData()); else fieldInfo.setString(field.toString()); } } }
public class class_name { public void moveFieldToThin(FieldInfo fieldInfo, BaseField field, Record record) { if ((field == null) || (!field.getFieldName().equals(fieldInfo.getFieldName()))) { field = null; // depends on control dependency: [if], data = [none] if (record != null) field = record.getField(fieldInfo.getFieldName()); } if (field != null) { if (field.getDataClass() == fieldInfo.getDataClass()) fieldInfo.setData(field.getData()); else fieldInfo.setString(field.toString()); } } }
public class class_name { public static <T> List<T> toList(Iterable<? extends T> iterable) { List<T> list = new ArrayList<T>(); for (T e : iterable) { list.add(e); } return list; } }
public class class_name { public static <T> List<T> toList(Iterable<? extends T> iterable) { List<T> list = new ArrayList<T>(); for (T e : iterable) { list.add(e); // depends on control dependency: [for], data = [e] } return list; } }
public class class_name { public V get(String key) { int index = exactMatchSearch(key); if (index >= 0) { return getValueAt(index); } return null; } }
public class class_name { public V get(String key) { int index = exactMatchSearch(key); if (index >= 0) { return getValueAt(index); // depends on control dependency: [if], data = [(index] } return null; } }
public class class_name { public char getDataLength() { char length = 0; List<StunAttribute> attrs = getAttributes(); for (StunAttribute att : attrs) { int attLen = att.getDataLength() + StunAttribute.HEADER_LENGTH; // take attribute padding into account: attLen += (4 - (attLen % 4)) % 4; length += attLen; } return length; } }
public class class_name { public char getDataLength() { char length = 0; List<StunAttribute> attrs = getAttributes(); for (StunAttribute att : attrs) { int attLen = att.getDataLength() + StunAttribute.HEADER_LENGTH; // take attribute padding into account: attLen += (4 - (attLen % 4)) % 4; // depends on control dependency: [for], data = [att] length += attLen; // depends on control dependency: [for], data = [att] } return length; } }
public class class_name { private void traverseNode(String path) { DataNode node = getNode(path); String children[] = null; synchronized (node) { Set<String> childs = node.getChildren(); if (childs != null) { children = childs.toArray(new String[childs.size()]); } } if (children != null) { if (children.length == 0) { // this node does not have a child // is the leaf node // check if its the leaf node String endString = "/" + Quotas.limitNode; if (path.endsWith(endString)) { // ok this is the limit node // get the real node and update // the count and the bytes String realPath = path.substring(Quotas.quotaZookeeper .length(), path.indexOf(endString)); updateQuotaForPath(realPath); this.pTrie.addPath(realPath); } return; } for (String child : children) { traverseNode(path + "/" + child); } } } }
public class class_name { private void traverseNode(String path) { DataNode node = getNode(path); String children[] = null; synchronized (node) { Set<String> childs = node.getChildren(); if (childs != null) { children = childs.toArray(new String[childs.size()]); // depends on control dependency: [if], data = [none] } } if (children != null) { if (children.length == 0) { // this node does not have a child // is the leaf node // check if its the leaf node String endString = "/" + Quotas.limitNode; if (path.endsWith(endString)) { // ok this is the limit node // get the real node and update // the count and the bytes String realPath = path.substring(Quotas.quotaZookeeper .length(), path.indexOf(endString)); updateQuotaForPath(realPath); // depends on control dependency: [if], data = [none] this.pTrie.addPath(realPath); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } for (String child : children) { traverseNode(path + "/" + child); // depends on control dependency: [for], data = [child] } } } }
public class class_name { @SuppressWarnings("unchecked") public static <T> Collector<T> initOutputs(AbstractInvokable nepheleTask, ClassLoader cl, TaskConfig config, List<ChainedDriver<?, ?>> chainedTasksTarget, List<BufferWriter> eventualOutputs) throws Exception { final int numOutputs = config.getNumOutputs(); // check whether we got any chained tasks final int numChained = config.getNumberOfChainedStubs(); if (numChained > 0) { // got chained stubs. that means that this one may only have a single forward connection if (numOutputs != 1 || config.getOutputShipStrategy(0) != ShipStrategyType.FORWARD) { throw new RuntimeException("Plan Generation Bug: Found a chained stub that is not connected via an only forward connection."); } // instantiate each task @SuppressWarnings("rawtypes") Collector previous = null; for (int i = numChained - 1; i >= 0; --i) { // get the task first final ChainedDriver<?, ?> ct; try { Class<? extends ChainedDriver<?, ?>> ctc = config.getChainedTask(i); ct = ctc.newInstance(); } catch (Exception ex) { throw new RuntimeException("Could not instantiate chained task driver.", ex); } // get the configuration for the task final TaskConfig chainedStubConf = config.getChainedStubConfig(i); final String taskName = config.getChainedTaskName(i); if (i == numChained -1) { // last in chain, instantiate the output collector for this task previous = getOutputCollector(nepheleTask, chainedStubConf, cl, eventualOutputs, chainedStubConf.getNumOutputs()); } ct.setup(chainedStubConf, taskName, previous, nepheleTask, cl); chainedTasksTarget.add(0, ct); previous = ct; } // the collector of the first in the chain is the collector for the nephele task return (Collector<T>) previous; } // else // instantiate the output collector the default way from this configuration return getOutputCollector(nepheleTask , config, cl, eventualOutputs, numOutputs); } }
public class class_name { @SuppressWarnings("unchecked") public static <T> Collector<T> initOutputs(AbstractInvokable nepheleTask, ClassLoader cl, TaskConfig config, List<ChainedDriver<?, ?>> chainedTasksTarget, List<BufferWriter> eventualOutputs) throws Exception { final int numOutputs = config.getNumOutputs(); // check whether we got any chained tasks final int numChained = config.getNumberOfChainedStubs(); if (numChained > 0) { // got chained stubs. that means that this one may only have a single forward connection if (numOutputs != 1 || config.getOutputShipStrategy(0) != ShipStrategyType.FORWARD) { throw new RuntimeException("Plan Generation Bug: Found a chained stub that is not connected via an only forward connection."); } // instantiate each task @SuppressWarnings("rawtypes") Collector previous = null; for (int i = numChained - 1; i >= 0; --i) { // get the task first final ChainedDriver<?, ?> ct; try { Class<? extends ChainedDriver<?, ?>> ctc = config.getChainedTask(i); ct = ctc.newInstance(); } catch (Exception ex) { throw new RuntimeException("Could not instantiate chained task driver.", ex); } // depends on control dependency: [catch], data = [none] // get the configuration for the task final TaskConfig chainedStubConf = config.getChainedStubConfig(i); final String taskName = config.getChainedTaskName(i); if (i == numChained -1) { // last in chain, instantiate the output collector for this task previous = getOutputCollector(nepheleTask, chainedStubConf, cl, eventualOutputs, chainedStubConf.getNumOutputs()); // depends on control dependency: [if], data = [none] } ct.setup(chainedStubConf, taskName, previous, nepheleTask, cl); // depends on control dependency: [for], data = [none] chainedTasksTarget.add(0, ct); // depends on control dependency: [for], data = [none] previous = ct; // depends on control dependency: [for], data = [none] } // the collector of the first in the chain is the collector for the nephele task return (Collector<T>) previous; } // else // instantiate the output collector the default way from this configuration return getOutputCollector(nepheleTask , config, cl, eventualOutputs, numOutputs); } }
public class class_name { protected String htmlComponent(CmsSetupComponent component) { StringBuffer html = new StringBuffer(256); html.append("\t<tr>\n"); html.append("\t\t<td>\n"); html.append("\t\t\t<input type='checkbox' name='availableComponents' value='"); html.append(component.getId()); html.append("'"); if (component.isChecked()) { html.append(" checked='checked'"); } html.append(" onClick=\"checkComponentDependencies('"); html.append(component.getId()); html.append("');\">\n"); html.append("\t\t</td>\n"); html.append("\t\t<td style='width: 100%; '>\n\t\t\t"); html.append(component.getName()); html.append("\n\t\t</td>\n"); html.append("\t</tr>\n"); html.append("\t<tr>\n"); html.append("\t\t<td>&nbsp;</td>\n"); html.append( "\t\t<td style='vertical-align: top; width: 100%; padding-bottom: 8px; font-style: italic;'>\n\t\t\t"); html.append(component.getDescription()); html.append("\n\t\t</td>\n"); html.append("\t</tr>\n"); return html.toString(); } }
public class class_name { protected String htmlComponent(CmsSetupComponent component) { StringBuffer html = new StringBuffer(256); html.append("\t<tr>\n"); html.append("\t\t<td>\n"); html.append("\t\t\t<input type='checkbox' name='availableComponents' value='"); html.append(component.getId()); html.append("'"); if (component.isChecked()) { html.append(" checked='checked'"); // depends on control dependency: [if], data = [none] } html.append(" onClick=\"checkComponentDependencies('"); html.append(component.getId()); html.append("');\">\n"); html.append("\t\t</td>\n"); html.append("\t\t<td style='width: 100%; '>\n\t\t\t"); html.append(component.getName()); html.append("\n\t\t</td>\n"); html.append("\t</tr>\n"); html.append("\t<tr>\n"); html.append("\t\t<td>&nbsp;</td>\n"); html.append( "\t\t<td style='vertical-align: top; width: 100%; padding-bottom: 8px; font-style: italic;'>\n\t\t\t"); html.append(component.getDescription()); html.append("\n\t\t</td>\n"); html.append("\t</tr>\n"); return html.toString(); } }
public class class_name { public void storeAppSettings(CmsObject cms, Class<? extends I_CmsAppSettings> type, I_CmsAppSettings appSettings) { CmsUser user = cms.getRequestContext().getCurrentUser(); CmsUserSettings settings = new CmsUserSettings(user); String currentSetting = settings.getAdditionalPreference(type.getName(), true); String state = appSettings.getSettingsString(); if (((state == null) && (currentSetting == null)) || ((state != null) && state.equals(currentSetting))) { // nothing changed return; } settings.setAdditionalPreference(type.getName(), state); try { settings.save(cms); } catch (CmsException e) { LOG.error("Failed to store workplace app settings for type " + type.getName(), e); } } }
public class class_name { public void storeAppSettings(CmsObject cms, Class<? extends I_CmsAppSettings> type, I_CmsAppSettings appSettings) { CmsUser user = cms.getRequestContext().getCurrentUser(); CmsUserSettings settings = new CmsUserSettings(user); String currentSetting = settings.getAdditionalPreference(type.getName(), true); String state = appSettings.getSettingsString(); if (((state == null) && (currentSetting == null)) || ((state != null) && state.equals(currentSetting))) { // nothing changed return; // depends on control dependency: [if], data = [none] } settings.setAdditionalPreference(type.getName(), state); try { settings.save(cms); // depends on control dependency: [try], data = [none] } catch (CmsException e) { LOG.error("Failed to store workplace app settings for type " + type.getName(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { static FacebookSettings newInstance(Bundle bundle) { FacebookSettings settings = null; int destinationId = bundle.getInt(BUNDLE_KEY_DESTINATION_ID); String accountName = bundle.getString(BUNDLE_KEY_ACCOUNT_NAME); String albumName = bundle.getString(BUNDLE_KEY_ALBUM_NAME); String albumGraphPath = bundle.getString(BUNDLE_KEY_ALBUM_GRAPH_PATH); String pageAccessToken = bundle.getString(BUNDLE_KEY_PAGE_ACCESS_TOKEN); String photoPrivacy = bundle.getString(BUNDLE_KEY_PHOTO_PRIVACY); if (!TextUtils.isEmpty(accountName) && !TextUtils.isEmpty(albumName) && !TextUtils.isEmpty(albumGraphPath)) { if (destinationId == FacebookEndpoint.DestinationId.PROFILE) { settings = new FacebookSettings(destinationId, accountName, albumName, albumGraphPath, pageAccessToken, photoPrivacy); } else if ((destinationId == FacebookEndpoint.DestinationId.PAGE || destinationId == FacebookEndpoint.DestinationId.PAGE_ALBUM) && !TextUtils.isEmpty(pageAccessToken)) { settings = new FacebookSettings(destinationId, accountName, albumName, albumGraphPath, pageAccessToken, photoPrivacy); } } return settings; } }
public class class_name { static FacebookSettings newInstance(Bundle bundle) { FacebookSettings settings = null; int destinationId = bundle.getInt(BUNDLE_KEY_DESTINATION_ID); String accountName = bundle.getString(BUNDLE_KEY_ACCOUNT_NAME); String albumName = bundle.getString(BUNDLE_KEY_ALBUM_NAME); String albumGraphPath = bundle.getString(BUNDLE_KEY_ALBUM_GRAPH_PATH); String pageAccessToken = bundle.getString(BUNDLE_KEY_PAGE_ACCESS_TOKEN); String photoPrivacy = bundle.getString(BUNDLE_KEY_PHOTO_PRIVACY); if (!TextUtils.isEmpty(accountName) && !TextUtils.isEmpty(albumName) && !TextUtils.isEmpty(albumGraphPath)) { if (destinationId == FacebookEndpoint.DestinationId.PROFILE) { settings = new FacebookSettings(destinationId, accountName, albumName, albumGraphPath, pageAccessToken, photoPrivacy); // depends on control dependency: [if], data = [(destinationId] } else if ((destinationId == FacebookEndpoint.DestinationId.PAGE || destinationId == FacebookEndpoint.DestinationId.PAGE_ALBUM) && !TextUtils.isEmpty(pageAccessToken)) { settings = new FacebookSettings(destinationId, accountName, albumName, albumGraphPath, pageAccessToken, photoPrivacy); // depends on control dependency: [if], data = [none] } } return settings; } }
public class class_name { @VisibleForTesting Map<String, LookupExtractorFactoryMapContainer> getToBeLoadedOnNode( LookupsState<LookupExtractorFactoryMapContainer> currLookupsStateOnNode, Map<String, LookupExtractorFactoryMapContainer> nodeTierLookupsToBe ) { Map<String, LookupExtractorFactoryMapContainer> toLoad = new HashMap<>(); for (Map.Entry<String, LookupExtractorFactoryMapContainer> e : nodeTierLookupsToBe.entrySet()) { String name = e.getKey(); LookupExtractorFactoryMapContainer lookupToBe = e.getValue(); // get it from the current pending notices list on the node LookupExtractorFactoryMapContainer current = currLookupsStateOnNode.getToLoad().get(name); if (current == null) { //ok, not on pending list, get from currently loaded lookups on node current = currLookupsStateOnNode.getCurrent().get(name); } if (current == null || //lookup is neither pending nor already loaded on the node OR currLookupsStateOnNode.getToDrop().contains(name) || //it is being dropped on the node OR lookupToBe.replaces(current) //lookup is already know to node, but lookupToBe overrides that ) { toLoad.put(name, lookupToBe); } } return toLoad; } }
public class class_name { @VisibleForTesting Map<String, LookupExtractorFactoryMapContainer> getToBeLoadedOnNode( LookupsState<LookupExtractorFactoryMapContainer> currLookupsStateOnNode, Map<String, LookupExtractorFactoryMapContainer> nodeTierLookupsToBe ) { Map<String, LookupExtractorFactoryMapContainer> toLoad = new HashMap<>(); for (Map.Entry<String, LookupExtractorFactoryMapContainer> e : nodeTierLookupsToBe.entrySet()) { String name = e.getKey(); LookupExtractorFactoryMapContainer lookupToBe = e.getValue(); // get it from the current pending notices list on the node LookupExtractorFactoryMapContainer current = currLookupsStateOnNode.getToLoad().get(name); if (current == null) { //ok, not on pending list, get from currently loaded lookups on node current = currLookupsStateOnNode.getCurrent().get(name); // depends on control dependency: [if], data = [none] } if (current == null || //lookup is neither pending nor already loaded on the node OR currLookupsStateOnNode.getToDrop().contains(name) || //it is being dropped on the node OR lookupToBe.replaces(current) //lookup is already know to node, but lookupToBe overrides that ) { toLoad.put(name, lookupToBe); // depends on control dependency: [if], data = [none] } } return toLoad; } }
public class class_name { public static double[] timesPlus(final double[] v1, final double s1, final double[] v2) { assert v1.length == v2.length : ERR_VEC_DIMENSIONS; final double[] result = new double[v1.length]; for(int i = 0; i < result.length; i++) { result[i] = v1[i] * s1 + v2[i]; } return result; } }
public class class_name { public static double[] timesPlus(final double[] v1, final double s1, final double[] v2) { assert v1.length == v2.length : ERR_VEC_DIMENSIONS; final double[] result = new double[v1.length]; for(int i = 0; i < result.length; i++) { result[i] = v1[i] * s1 + v2[i]; // depends on control dependency: [for], data = [i] } return result; } }
public class class_name { @Override public synchronized ChainGroupData removeChainGroup(String groupName) throws ChainGroupException { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Removing chain group, " + groupName); } if (null == groupName) { throw new ChainGroupException("Input group name is null"); } ChainGroupData groupData = this.chainGroups.remove(groupName); if (null == groupData) { throw new ChainGroupException("Null group name"); } return groupData; } }
public class class_name { @Override public synchronized ChainGroupData removeChainGroup(String groupName) throws ChainGroupException { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Removing chain group, " + groupName); // depends on control dependency: [if], data = [none] } if (null == groupName) { throw new ChainGroupException("Input group name is null"); } ChainGroupData groupData = this.chainGroups.remove(groupName); if (null == groupData) { throw new ChainGroupException("Null group name"); } return groupData; } }
public class class_name { public void setChannel(@Nonnull Channel channel, @CheckForNull OutputStream launchLog, @CheckForNull Channel.Listener listener) throws IOException, InterruptedException { if(this.channel!=null) throw new IllegalStateException("Already connected"); final TaskListener taskListener = launchLog != null ? new StreamTaskListener(launchLog) : TaskListener.NULL; PrintStream log = taskListener.getLogger(); channel.setProperty(SlaveComputer.class, this); channel.addListener(new LoggingChannelListener(logger, Level.FINEST) { @Override public void onClosed(Channel c, IOException cause) { // Orderly shutdown will have null exception if (cause!=null) { offlineCause = new ChannelTermination(cause); Functions.printStackTrace(cause, taskListener.error("Connection terminated")); } else { taskListener.getLogger().println("Connection terminated"); } closeChannel(); try { launcher.afterDisconnect(SlaveComputer.this, taskListener); } catch (Throwable t) { LogRecord lr = new LogRecord(Level.SEVERE, "Launcher {0}'s afterDisconnect method propagated an exception when {1}'s connection was closed: {2}"); lr.setThrown(t); lr.setParameters(new Object[]{launcher, SlaveComputer.this.getName(), t.getMessage()}); logger.log(lr); } } }); if(listener!=null) channel.addListener(listener); String slaveVersion = channel.call(new SlaveVersion()); log.println("Remoting version: " + slaveVersion); VersionNumber agentVersion = new VersionNumber(slaveVersion); if (agentVersion.isOlderThan(RemotingVersionInfo.getMinimumSupportedVersion())) { log.println(String.format("WARNING: Remoting version is older than a minimum required one (%s). " + "Connection will not be rejected, but the compatibility is NOT guaranteed", RemotingVersionInfo.getMinimumSupportedVersion())); } boolean _isUnix = channel.call(new DetectOS()); log.println(_isUnix? hudson.model.Messages.Slave_UnixSlave():hudson.model.Messages.Slave_WindowsSlave()); String defaultCharsetName = channel.call(new DetectDefaultCharset()); Slave node = getNode(); if (node == null) { // Node has been disabled/removed during the connection throw new IOException("Node "+nodeName+" has been deleted during the channel setup"); } String remoteFS = node.getRemoteFS(); if (Util.isRelativePath(remoteFS)) { remoteFS = channel.call(new AbsolutePath(remoteFS)); log.println("NOTE: Relative remote path resolved to: "+remoteFS); } if(_isUnix && !remoteFS.contains("/") && remoteFS.contains("\\")) log.println("WARNING: "+remoteFS +" looks suspiciously like Windows path. Maybe you meant "+remoteFS.replace('\\','/')+"?"); FilePath root = new FilePath(channel,remoteFS); // reference counting problem is known to happen, such as JENKINS-9017, and so as a preventive measure // we pin the base classloader so that it'll never get GCed. When this classloader gets released, // it'll have a catastrophic impact on the communication. channel.pinClassLoader(getClass().getClassLoader()); channel.call(new SlaveInitializer(DEFAULT_RING_BUFFER_SIZE)); SecurityContext old = ACL.impersonate(ACL.SYSTEM); try { for (ComputerListener cl : ComputerListener.all()) { cl.preOnline(this,channel,root,taskListener); } } finally { SecurityContextHolder.setContext(old); } offlineCause = null; // update the data structure atomically to prevent others from seeing a channel that's not properly initialized yet synchronized(channelLock) { if(this.channel!=null) { // check again. we used to have this entire method in a big synchronization block, // but Channel constructor blocks for an external process to do the connection // if CommandLauncher is used, and that cannot be interrupted because it blocks at InputStream. // so if the process hangs, it hangs the thread in a lock, and since Hudson will try to relaunch, // we'll end up queuing the lot of threads in a pseudo deadlock. // This implementation prevents that by avoiding a lock. HUDSON-1705 is likely a manifestation of this. channel.close(); throw new IllegalStateException("Already connected"); } isUnix = _isUnix; numRetryAttempt = 0; this.channel = channel; this.absoluteRemoteFs = remoteFS; defaultCharset = Charset.forName(defaultCharsetName); synchronized (statusChangeLock) { statusChangeLock.notifyAll(); } } old = ACL.impersonate(ACL.SYSTEM); try { for (ComputerListener cl : ComputerListener.all()) { cl.onOnline(this,taskListener); } } finally { SecurityContextHolder.setContext(old); } log.println("Agent successfully connected and online"); Jenkins.get().getQueue().scheduleMaintenance(); } }
public class class_name { public void setChannel(@Nonnull Channel channel, @CheckForNull OutputStream launchLog, @CheckForNull Channel.Listener listener) throws IOException, InterruptedException { if(this.channel!=null) throw new IllegalStateException("Already connected"); final TaskListener taskListener = launchLog != null ? new StreamTaskListener(launchLog) : TaskListener.NULL; PrintStream log = taskListener.getLogger(); channel.setProperty(SlaveComputer.class, this); channel.addListener(new LoggingChannelListener(logger, Level.FINEST) { @Override public void onClosed(Channel c, IOException cause) { // Orderly shutdown will have null exception if (cause!=null) { offlineCause = new ChannelTermination(cause); // depends on control dependency: [if], data = [(cause] Functions.printStackTrace(cause, taskListener.error("Connection terminated")); // depends on control dependency: [if], data = [(cause] } else { taskListener.getLogger().println("Connection terminated"); // depends on control dependency: [if], data = [none] } closeChannel(); try { launcher.afterDisconnect(SlaveComputer.this, taskListener); // depends on control dependency: [try], data = [none] } catch (Throwable t) { LogRecord lr = new LogRecord(Level.SEVERE, "Launcher {0}'s afterDisconnect method propagated an exception when {1}'s connection was closed: {2}"); lr.setThrown(t); lr.setParameters(new Object[]{launcher, SlaveComputer.this.getName(), t.getMessage()}); logger.log(lr); } // depends on control dependency: [catch], data = [none] } }); if(listener!=null) channel.addListener(listener); String slaveVersion = channel.call(new SlaveVersion()); log.println("Remoting version: " + slaveVersion); VersionNumber agentVersion = new VersionNumber(slaveVersion); if (agentVersion.isOlderThan(RemotingVersionInfo.getMinimumSupportedVersion())) { log.println(String.format("WARNING: Remoting version is older than a minimum required one (%s). " + "Connection will not be rejected, but the compatibility is NOT guaranteed", RemotingVersionInfo.getMinimumSupportedVersion())); } boolean _isUnix = channel.call(new DetectOS()); log.println(_isUnix? hudson.model.Messages.Slave_UnixSlave():hudson.model.Messages.Slave_WindowsSlave()); String defaultCharsetName = channel.call(new DetectDefaultCharset()); Slave node = getNode(); if (node == null) { // Node has been disabled/removed during the connection throw new IOException("Node "+nodeName+" has been deleted during the channel setup"); } String remoteFS = node.getRemoteFS(); if (Util.isRelativePath(remoteFS)) { remoteFS = channel.call(new AbsolutePath(remoteFS)); log.println("NOTE: Relative remote path resolved to: "+remoteFS); } if(_isUnix && !remoteFS.contains("/") && remoteFS.contains("\\")) log.println("WARNING: "+remoteFS +" looks suspiciously like Windows path. Maybe you meant "+remoteFS.replace('\\','/')+"?"); FilePath root = new FilePath(channel,remoteFS); // reference counting problem is known to happen, such as JENKINS-9017, and so as a preventive measure // we pin the base classloader so that it'll never get GCed. When this classloader gets released, // it'll have a catastrophic impact on the communication. channel.pinClassLoader(getClass().getClassLoader()); channel.call(new SlaveInitializer(DEFAULT_RING_BUFFER_SIZE)); SecurityContext old = ACL.impersonate(ACL.SYSTEM); try { for (ComputerListener cl : ComputerListener.all()) { cl.preOnline(this,channel,root,taskListener); // depends on control dependency: [for], data = [cl] } } finally { SecurityContextHolder.setContext(old); } offlineCause = null; // update the data structure atomically to prevent others from seeing a channel that's not properly initialized yet synchronized(channelLock) { if(this.channel!=null) { // check again. we used to have this entire method in a big synchronization block, // but Channel constructor blocks for an external process to do the connection // if CommandLauncher is used, and that cannot be interrupted because it blocks at InputStream. // so if the process hangs, it hangs the thread in a lock, and since Hudson will try to relaunch, // we'll end up queuing the lot of threads in a pseudo deadlock. // This implementation prevents that by avoiding a lock. HUDSON-1705 is likely a manifestation of this. channel.close(); // depends on control dependency: [if], data = [none] throw new IllegalStateException("Already connected"); } isUnix = _isUnix; numRetryAttempt = 0; this.channel = channel; this.absoluteRemoteFs = remoteFS; defaultCharset = Charset.forName(defaultCharsetName); synchronized (statusChangeLock) { statusChangeLock.notifyAll(); } } old = ACL.impersonate(ACL.SYSTEM); try { for (ComputerListener cl : ComputerListener.all()) { cl.onOnline(this,taskListener); // depends on control dependency: [for], data = [cl] } } finally { SecurityContextHolder.setContext(old); } log.println("Agent successfully connected and online"); Jenkins.get().getQueue().scheduleMaintenance(); } }
public class class_name { void assignToRangeVariables() { for (int i = 0; i < rangeVariables.length; i++) { boolean isOuter = rangeVariables[i].isLeftJoin || rangeVariables[i].isRightJoin; if (isOuter) { assignToRangeVariable(rangeVariables[i], i, joinExpressions[i], true); assignToRangeVariable(rangeVariables[i], i, whereExpressions[i], false); } else { joinExpressions[i].addAll(whereExpressions[i]); assignToRangeVariable(rangeVariables[i], i, joinExpressions[i], true); } // A VoltDB extension to disable // Turn off some weird rewriting of in expressions based on index support for the query. // This makes it simpler to parse on the VoltDB side, // at the expense of HSQL performance. // Also fixed an apparent join/where confusion? if (inExpressions[i] != null) { if (!flags[i] && isOuter) { rangeVariables[i].addJoinCondition(inExpressions[i]); } else { rangeVariables[i].addWhereCondition(inExpressions[i]); } /* disable 7 lines ... if (rangeVariables[i].hasIndexCondition() && inExpressions[i] != null) { if (!flags[i] && isOuter) { rangeVariables[i].addWhereCondition(inExpressions[i]); } else { rangeVariables[i].addJoinCondition(inExpressions[i]); } ... disabled 7 lines */ // End of VoltDB extension inExpressions[i] = null; inExpressionCount--; } } if (inExpressionCount != 0) { // A VoltDB extension to disable // This will never be called because of the change made to the block above assert(false); // End of VoltDB extension setInConditionsAsTables(); } } }
public class class_name { void assignToRangeVariables() { for (int i = 0; i < rangeVariables.length; i++) { boolean isOuter = rangeVariables[i].isLeftJoin || rangeVariables[i].isRightJoin; if (isOuter) { assignToRangeVariable(rangeVariables[i], i, joinExpressions[i], true); // depends on control dependency: [if], data = [none] assignToRangeVariable(rangeVariables[i], i, whereExpressions[i], false); // depends on control dependency: [if], data = [none] } else { joinExpressions[i].addAll(whereExpressions[i]); // depends on control dependency: [if], data = [none] assignToRangeVariable(rangeVariables[i], i, joinExpressions[i], true); // depends on control dependency: [if], data = [none] } // A VoltDB extension to disable // Turn off some weird rewriting of in expressions based on index support for the query. // This makes it simpler to parse on the VoltDB side, // at the expense of HSQL performance. // Also fixed an apparent join/where confusion? if (inExpressions[i] != null) { if (!flags[i] && isOuter) { rangeVariables[i].addJoinCondition(inExpressions[i]); // depends on control dependency: [if], data = [none] } else { rangeVariables[i].addWhereCondition(inExpressions[i]); // depends on control dependency: [if], data = [none] } /* disable 7 lines ... if (rangeVariables[i].hasIndexCondition() && inExpressions[i] != null) { if (!flags[i] && isOuter) { rangeVariables[i].addWhereCondition(inExpressions[i]); } else { rangeVariables[i].addJoinCondition(inExpressions[i]); } ... disabled 7 lines */ // End of VoltDB extension inExpressions[i] = null; // depends on control dependency: [if], data = [none] inExpressionCount--; // depends on control dependency: [if], data = [none] } } if (inExpressionCount != 0) { // A VoltDB extension to disable // This will never be called because of the change made to the block above assert(false); // depends on control dependency: [if], data = [none] // End of VoltDB extension setInConditionsAsTables(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void initializeAndWait() throws XMPPException, SmackException, InterruptedException { this.initialize(); try { LOGGER.fine("Initializing transport resolver..."); while (!this.isInitialized()) { LOGGER.fine("Resolver init still pending"); Thread.sleep(1000); } LOGGER.fine("Transport resolved"); } catch (Exception e) { LOGGER.log(Level.WARNING, "exception", e); } } }
public class class_name { public void initializeAndWait() throws XMPPException, SmackException, InterruptedException { this.initialize(); try { LOGGER.fine("Initializing transport resolver..."); while (!this.isInitialized()) { LOGGER.fine("Resolver init still pending"); // depends on control dependency: [while], data = [none] Thread.sleep(1000); // depends on control dependency: [while], data = [none] } LOGGER.fine("Transport resolved"); } catch (Exception e) { LOGGER.log(Level.WARNING, "exception", e); } } }
public class class_name { public SIMPRequestMessageInfo getRequestMessageInfo() throws SIMPRuntimeOperationFailedException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getRequestMessageInfo"); SIMPRequestMessageInfo requestMessageInfo = null; try { if (State.REQUEST.toString().equals(getState())) { // This RemoteMessageRequest is in state request so lets get the info TickRange tickRange = _aiStream.getTickRange(_tick); requestMessageInfo = new RequestMessageInfo((AIRequestedTick)tickRange.value); } } catch(SIMPException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.runtime.RemoteMessageRequest.getRequestMessageInfo", "1:407:1.34", this); SIMPRuntimeOperationFailedException e1 = new SIMPRuntimeOperationFailedException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0003", new Object[] {"RemoteMessageRequest.getRequestMessageInfo", "1:415:1.34", e, _aiStream.getStreamId()}, null), e); SibTr.exception(tc, e1); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getRequestMessageInfo", e1); throw e1; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getRequestMessageInfo", requestMessageInfo); return requestMessageInfo; } }
public class class_name { public SIMPRequestMessageInfo getRequestMessageInfo() throws SIMPRuntimeOperationFailedException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getRequestMessageInfo"); SIMPRequestMessageInfo requestMessageInfo = null; try { if (State.REQUEST.toString().equals(getState())) { // This RemoteMessageRequest is in state request so lets get the info TickRange tickRange = _aiStream.getTickRange(_tick); requestMessageInfo = new RequestMessageInfo((AIRequestedTick)tickRange.value); // depends on control dependency: [if], data = [none] } } catch(SIMPException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.runtime.RemoteMessageRequest.getRequestMessageInfo", "1:407:1.34", this); SIMPRuntimeOperationFailedException e1 = new SIMPRuntimeOperationFailedException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0003", new Object[] {"RemoteMessageRequest.getRequestMessageInfo", "1:415:1.34", e, _aiStream.getStreamId()}, null), e); SibTr.exception(tc, e1); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getRequestMessageInfo", e1); throw e1; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getRequestMessageInfo", requestMessageInfo); return requestMessageInfo; } }
public class class_name { public void marshall(ModelArtifacts modelArtifacts, ProtocolMarshaller protocolMarshaller) { if (modelArtifacts == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(modelArtifacts.getS3ModelArtifacts(), S3MODELARTIFACTS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ModelArtifacts modelArtifacts, ProtocolMarshaller protocolMarshaller) { if (modelArtifacts == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(modelArtifacts.getS3ModelArtifacts(), S3MODELARTIFACTS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static TheHashinator constructHashinator( Class<? extends TheHashinator> hashinatorImplementation, byte configBytes[], boolean cooked) { try { Constructor<? extends TheHashinator> constructor = hashinatorImplementation.getConstructor(byte[].class, boolean.class); return constructor.newInstance(configBytes, cooked); } catch (Exception e) { Throwables.propagate(e); } return null; } }
public class class_name { public static TheHashinator constructHashinator( Class<? extends TheHashinator> hashinatorImplementation, byte configBytes[], boolean cooked) { try { Constructor<? extends TheHashinator> constructor = hashinatorImplementation.getConstructor(byte[].class, boolean.class); // depends on control dependency: [try], data = [none] return constructor.newInstance(configBytes, cooked); // depends on control dependency: [try], data = [none] } catch (Exception e) { Throwables.propagate(e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { public int getDataSize() { // Data size is given by lvs int size = 0; for(int i=0; i<numLVs; i++) { size += IOUtils.toInt(lvs[i]); } return size; } }
public class class_name { public int getDataSize() { // Data size is given by lvs int size = 0; for(int i=0; i<numLVs; i++) { size += IOUtils.toInt(lvs[i]); // depends on control dependency: [for], data = [i] } return size; } }
public class class_name { public ExcelWriter setColumnWidth(int columnIndex, int width) { if (columnIndex < 0) { this.sheet.setDefaultColumnWidth(width); } else { this.sheet.setColumnWidth(columnIndex, width * 256); } return this; } }
public class class_name { public ExcelWriter setColumnWidth(int columnIndex, int width) { if (columnIndex < 0) { this.sheet.setDefaultColumnWidth(width); // depends on control dependency: [if], data = [none] } else { this.sheet.setColumnWidth(columnIndex, width * 256); // depends on control dependency: [if], data = [(columnIndex] } return this; } }
public class class_name { public List<Comment> parse(String jsonText) throws RedditParseException { // Parse to a list of things List<Thing> things = this.parseGeneric(jsonText); // List of comment and submission mixed elements List<Comment> comments = new LinkedList<Comment>(); // Iterate over things for (Thing t : things) { if (t instanceof Comment) { comments.add((Comment) t); } else { LOGGER.warn("Encountered an unexpected reddit thing (" + t.getKind().value() + "), skipping it."); } } // Return resulting comments list return comments; } }
public class class_name { public List<Comment> parse(String jsonText) throws RedditParseException { // Parse to a list of things List<Thing> things = this.parseGeneric(jsonText); // List of comment and submission mixed elements List<Comment> comments = new LinkedList<Comment>(); // Iterate over things for (Thing t : things) { if (t instanceof Comment) { comments.add((Comment) t); // depends on control dependency: [if], data = [none] } else { LOGGER.warn("Encountered an unexpected reddit thing (" + t.getKind().value() + "), skipping it."); // depends on control dependency: [if], data = [none] } } // Return resulting comments list return comments; } }
public class class_name { protected static Map<String, List<Statement>> addStatementToGroups(Statement statement, Map<String, List<Statement>> claims) { Map<String, List<Statement>> newGroups = new HashMap<>(claims); String pid = statement.getMainSnak().getPropertyId().getId(); if(newGroups.containsKey(pid)) { List<Statement> newGroup = new ArrayList<>(newGroups.get(pid).size()); boolean statementReplaced = false; for(Statement existingStatement : newGroups.get(pid)) { if(existingStatement.getStatementId().equals(statement.getStatementId()) && !existingStatement.getStatementId().isEmpty()) { statementReplaced = true; newGroup.add(statement); } else { newGroup.add(existingStatement); } } if(!statementReplaced) { newGroup.add(statement); } newGroups.put(pid, newGroup); } else { newGroups.put(pid, Collections.singletonList(statement)); } return newGroups; } }
public class class_name { protected static Map<String, List<Statement>> addStatementToGroups(Statement statement, Map<String, List<Statement>> claims) { Map<String, List<Statement>> newGroups = new HashMap<>(claims); String pid = statement.getMainSnak().getPropertyId().getId(); if(newGroups.containsKey(pid)) { List<Statement> newGroup = new ArrayList<>(newGroups.get(pid).size()); boolean statementReplaced = false; for(Statement existingStatement : newGroups.get(pid)) { if(existingStatement.getStatementId().equals(statement.getStatementId()) && !existingStatement.getStatementId().isEmpty()) { statementReplaced = true; // depends on control dependency: [if], data = [none] newGroup.add(statement); // depends on control dependency: [if], data = [none] } else { newGroup.add(existingStatement); // depends on control dependency: [if], data = [none] } } if(!statementReplaced) { newGroup.add(statement); // depends on control dependency: [if], data = [none] } newGroups.put(pid, newGroup); // depends on control dependency: [if], data = [none] } else { newGroups.put(pid, Collections.singletonList(statement)); // depends on control dependency: [if], data = [none] } return newGroups; } }
public class class_name { protected int calculateLineIndentationLevel(final ParserData parserData, final String line, int lineNumber) throws IndentationException { char[] lineCharArray = line.toCharArray(); int indentationCount = 0; // Count the amount of whitespace characters before any text to determine the level if (Character.isWhitespace(lineCharArray[0])) { for (char c : lineCharArray) { if (Character.isWhitespace(c)) { indentationCount++; } else { break; } } if (indentationCount % parserData.getIndentationSize() != 0) { throw new IndentationException(format(ProcessorConstants.ERROR_INCORRECT_INDENTATION_MSG, lineNumber, line.trim())); } } return indentationCount / parserData.getIndentationSize(); } }
public class class_name { protected int calculateLineIndentationLevel(final ParserData parserData, final String line, int lineNumber) throws IndentationException { char[] lineCharArray = line.toCharArray(); int indentationCount = 0; // Count the amount of whitespace characters before any text to determine the level if (Character.isWhitespace(lineCharArray[0])) { for (char c : lineCharArray) { if (Character.isWhitespace(c)) { indentationCount++; // depends on control dependency: [if], data = [none] } else { break; } } if (indentationCount % parserData.getIndentationSize() != 0) { throw new IndentationException(format(ProcessorConstants.ERROR_INCORRECT_INDENTATION_MSG, lineNumber, line.trim())); } } return indentationCount / parserData.getIndentationSize(); } }
public class class_name { protected Map<String, String> parseHeaderParameters(HttpServletRequest request) { String header = null; Enumeration<String> headers = request.getHeaders("Authorization"); while (headers.hasMoreElements()) { String value = headers.nextElement(); if ((value.toLowerCase().startsWith("oauth "))) { header = value; break; } } Map<String, String> parameters = null; if (header != null) { parameters = new HashMap<String, String>(); String authHeaderValue = header.substring(6); //create a map of the authorization header values per OAuth Core 1.0, section 5.4.1 String[] headerEntries = StringSplitUtils.splitIgnoringQuotes(authHeaderValue, ','); for (Object o : StringSplitUtils.splitEachArrayElementAndCreateMap(headerEntries, "=", "\"").entrySet()) { Map.Entry entry = (Map.Entry) o; try { String key = oauthDecode((String) entry.getKey()); String value = oauthDecode((String) entry.getValue()); parameters.put(key, value); } catch (DecoderException e) { throw new IllegalStateException(e); } } } return parameters; } }
public class class_name { protected Map<String, String> parseHeaderParameters(HttpServletRequest request) { String header = null; Enumeration<String> headers = request.getHeaders("Authorization"); while (headers.hasMoreElements()) { String value = headers.nextElement(); if ((value.toLowerCase().startsWith("oauth "))) { header = value; // depends on control dependency: [if], data = [none] break; } } Map<String, String> parameters = null; if (header != null) { parameters = new HashMap<String, String>(); // depends on control dependency: [if], data = [none] String authHeaderValue = header.substring(6); //create a map of the authorization header values per OAuth Core 1.0, section 5.4.1 String[] headerEntries = StringSplitUtils.splitIgnoringQuotes(authHeaderValue, ','); for (Object o : StringSplitUtils.splitEachArrayElementAndCreateMap(headerEntries, "=", "\"").entrySet()) { Map.Entry entry = (Map.Entry) o; try { String key = oauthDecode((String) entry.getKey()); String value = oauthDecode((String) entry.getValue()); parameters.put(key, value); // depends on control dependency: [try], data = [none] } catch (DecoderException e) { throw new IllegalStateException(e); } // depends on control dependency: [catch], data = [none] } } return parameters; } }
public class class_name { private List<String> getEchoMessagesFromReturnValue() { List<String> returnListing = new ArrayList(); if(this.returnValue == null) { return returnListing; } Iterator<T> iterForReturnVal = this.returnValue.iterator(); //Only add where the ECHO message is set... while(iterForReturnVal.hasNext()) { T returnVal = iterForReturnVal.next(); if(returnVal.getEcho() == null) { continue; } returnListing.add(returnVal.getEcho()); } return returnListing; } }
public class class_name { private List<String> getEchoMessagesFromReturnValue() { List<String> returnListing = new ArrayList(); if(this.returnValue == null) { return returnListing; // depends on control dependency: [if], data = [none] } Iterator<T> iterForReturnVal = this.returnValue.iterator(); //Only add where the ECHO message is set... while(iterForReturnVal.hasNext()) { T returnVal = iterForReturnVal.next(); if(returnVal.getEcho() == null) { continue; } returnListing.add(returnVal.getEcho()); // depends on control dependency: [while], data = [none] } return returnListing; } }
public class class_name { public void close() { fsRunning = false; try { if (pendingReplications != null) { pendingReplications.stop(); } if (hbthread != null) { hbthread.interrupt(); } if (underreplthread != null) { underreplthread.interrupt(); } if (overreplthread != null) { overreplthread.interrupt(); } if (raidEncodingTaskThread != null) { raidEncodingTaskThread.interrupt(); } if (dnthread != null) { dnthread.interrupt(); } if (automaticEditsRollingThread != null) { automaticEditsRoller.stop(); // We cannot interrupt roller thread. For manual failover, we want // the edits file operations to finish. automaticEditsRollingThread.join(); } if (safeMode != null) { safeMode.shutdown(); } } catch (Exception e) { LOG.warn("Exception shutting down FSNamesystem", e); } finally { // using finally to ensure we also wait for lease daemon try { LOG.info("Stopping LeaseManager"); stopLeaseMonitor(); if (InjectionHandler .trueCondition(InjectionEvent.FSNAMESYSTEM_CLOSE_DIRECTORY)) { if (dir != null) { LOG.info("Stopping directory (fsimage, fsedits)"); dir.close(); } } } catch (InterruptedException ie) { } catch (IOException ie) { LOG.error("Error closing FSDirectory", ie); IOUtils.cleanup(LOG, dir); } } } }
public class class_name { public void close() { fsRunning = false; try { if (pendingReplications != null) { pendingReplications.stop(); // depends on control dependency: [if], data = [none] } if (hbthread != null) { hbthread.interrupt(); // depends on control dependency: [if], data = [none] } if (underreplthread != null) { underreplthread.interrupt(); // depends on control dependency: [if], data = [none] } if (overreplthread != null) { overreplthread.interrupt(); // depends on control dependency: [if], data = [none] } if (raidEncodingTaskThread != null) { raidEncodingTaskThread.interrupt(); // depends on control dependency: [if], data = [none] } if (dnthread != null) { dnthread.interrupt(); // depends on control dependency: [if], data = [none] } if (automaticEditsRollingThread != null) { automaticEditsRoller.stop(); // depends on control dependency: [if], data = [none] // We cannot interrupt roller thread. For manual failover, we want // the edits file operations to finish. automaticEditsRollingThread.join(); // depends on control dependency: [if], data = [none] } if (safeMode != null) { safeMode.shutdown(); // depends on control dependency: [if], data = [none] } } catch (Exception e) { LOG.warn("Exception shutting down FSNamesystem", e); } finally { // depends on control dependency: [catch], data = [none] // using finally to ensure we also wait for lease daemon try { LOG.info("Stopping LeaseManager"); // depends on control dependency: [try], data = [none] stopLeaseMonitor(); // depends on control dependency: [try], data = [none] if (InjectionHandler .trueCondition(InjectionEvent.FSNAMESYSTEM_CLOSE_DIRECTORY)) { if (dir != null) { LOG.info("Stopping directory (fsimage, fsedits)"); // depends on control dependency: [if], data = [none] dir.close(); // depends on control dependency: [if], data = [none] } } } catch (InterruptedException ie) { } catch (IOException ie) { // depends on control dependency: [catch], data = [none] LOG.error("Error closing FSDirectory", ie); IOUtils.cleanup(LOG, dir); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { private void determineDefaultAccessTypeAndMetaCompleteness() { for ( EntityMappings mappings : entityMappings ) { PersistenceUnitMetadata meta = mappings.getPersistenceUnitMetadata(); if ( meta != null ) { if ( meta.getXmlMappingMetadataComplete() != null ) { context.mappingDocumentFullyXmlConfigured( true ); } else { context.mappingDocumentFullyXmlConfigured( false ); } PersistenceUnitDefaults persistenceUnitDefaults = meta.getPersistenceUnitDefaults(); if ( persistenceUnitDefaults != null ) { org.hibernate.jpamodelgen.xml.jaxb.AccessType xmlAccessType = persistenceUnitDefaults.getAccess(); if ( xmlAccessType != null ) { context.setPersistenceUnitDefaultAccessType( mapXmlAccessTypeToJpaAccessType( xmlAccessType ) ); } } } else { context.mappingDocumentFullyXmlConfigured( false ); } } } }
public class class_name { private void determineDefaultAccessTypeAndMetaCompleteness() { for ( EntityMappings mappings : entityMappings ) { PersistenceUnitMetadata meta = mappings.getPersistenceUnitMetadata(); if ( meta != null ) { if ( meta.getXmlMappingMetadataComplete() != null ) { context.mappingDocumentFullyXmlConfigured( true ); // depends on control dependency: [if], data = [none] } else { context.mappingDocumentFullyXmlConfigured( false ); // depends on control dependency: [if], data = [none] } PersistenceUnitDefaults persistenceUnitDefaults = meta.getPersistenceUnitDefaults(); if ( persistenceUnitDefaults != null ) { org.hibernate.jpamodelgen.xml.jaxb.AccessType xmlAccessType = persistenceUnitDefaults.getAccess(); if ( xmlAccessType != null ) { context.setPersistenceUnitDefaultAccessType( mapXmlAccessTypeToJpaAccessType( xmlAccessType ) ); // depends on control dependency: [if], data = [( xmlAccessType] } } } else { context.mappingDocumentFullyXmlConfigured( false ); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected static String createToolbar(JavaScriptObject jso) { JsArray<?> jsItemArray = jso.<JsArray<?>> cast(); List<String> jsItemList = new ArrayList<String>(); for (int i = 0; i < jsItemArray.length(); i++) { jsItemList.add(jsItemArray.get(i).toString()); } return CmsTinyMceToolbarHelper.createTinyMceToolbarStringFromGenericToolbarItems(jsItemList); } }
public class class_name { protected static String createToolbar(JavaScriptObject jso) { JsArray<?> jsItemArray = jso.<JsArray<?>> cast(); List<String> jsItemList = new ArrayList<String>(); for (int i = 0; i < jsItemArray.length(); i++) { jsItemList.add(jsItemArray.get(i).toString()); // depends on control dependency: [for], data = [i] } return CmsTinyMceToolbarHelper.createTinyMceToolbarStringFromGenericToolbarItems(jsItemList); } }
public class class_name { public FormValidation doCheckViewName(@QueryParameter String value) { checkPermission(View.CREATE); String name = fixEmpty(value); if (name == null) return FormValidation.ok(); // already exists? if (getView(name) != null) return FormValidation.error(Messages.Hudson_ViewAlreadyExists(name)); // good view name? try { checkGoodName(name); } catch (Failure e) { return FormValidation.error(e.getMessage()); } return FormValidation.ok(); } }
public class class_name { public FormValidation doCheckViewName(@QueryParameter String value) { checkPermission(View.CREATE); String name = fixEmpty(value); if (name == null) return FormValidation.ok(); // already exists? if (getView(name) != null) return FormValidation.error(Messages.Hudson_ViewAlreadyExists(name)); // good view name? try { checkGoodName(name); // depends on control dependency: [try], data = [none] } catch (Failure e) { return FormValidation.error(e.getMessage()); } // depends on control dependency: [catch], data = [none] return FormValidation.ok(); } }
public class class_name { public static <T> T newInstance(Class<T> clazz) { try { return clazz.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new ResourceException(String.format("couldn't create a new instance of %s", clazz)); } } }
public class class_name { public static <T> T newInstance(Class<T> clazz) { try { return clazz.newInstance(); // depends on control dependency: [try], data = [none] } catch (InstantiationException | IllegalAccessException e) { throw new ResourceException(String.format("couldn't create a new instance of %s", clazz)); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public String getList() { String result = ""; int i = 1; for (String s : mReps.keySet()) { result += (i++) + ". key='" + s + "', value='" + mReps.get(s) + "'\n"; } return result; } }
public class class_name { public String getList() { String result = ""; int i = 1; for (String s : mReps.keySet()) { result += (i++) + ". key='" + s + "', value='" + mReps.get(s) + "'\n"; // depends on control dependency: [for], data = [s] } return result; } }
public class class_name { public void sendData(SerialMessage serialMessage) { if (serialMessage.getMessageClass() != SerialMessage.SerialMessageClass.SendData) { logger.error(String.format("Invalid message class %s (0x%02X) for sendData", serialMessage.getMessageClass().getLabel(), serialMessage.getMessageClass().getKey())); return; } if (serialMessage.getMessageType() != SerialMessage.SerialMessageType.Request) { logger.error("Only request messages can be sent"); return; } ZWaveNode node = this.getNode(serialMessage.getMessageNode()); if (node.getNodeStage() == NodeStage.NODEBUILDINFO_DEAD) { logger.debug("Node {} is dead, not sending message.", node.getNodeId()); return; } if (!node.isListening() && serialMessage.getPriority() != SerialMessage.SerialMessagePriority.Low) { ZWaveWakeUpCommandClass wakeUpCommandClass = (ZWaveWakeUpCommandClass)node.getCommandClass(ZWaveCommandClass.CommandClass.WAKE_UP); if (wakeUpCommandClass != null && !wakeUpCommandClass.isAwake()) { wakeUpCommandClass.putInWakeUpQueue(serialMessage); //it's a battery operated device, place in wake-up queue. return; } } serialMessage.setTransmitOptions(TRANSMIT_OPTION_ACK | TRANSMIT_OPTION_AUTO_ROUTE | TRANSMIT_OPTION_EXPLORE); if (++sentDataPointer > 0xFF) sentDataPointer = 1; serialMessage.setCallbackId(sentDataPointer); logger.debug("Callback ID = {}", sentDataPointer); this.enqueue(serialMessage); } }
public class class_name { public void sendData(SerialMessage serialMessage) { if (serialMessage.getMessageClass() != SerialMessage.SerialMessageClass.SendData) { logger.error(String.format("Invalid message class %s (0x%02X) for sendData", serialMessage.getMessageClass().getLabel(), serialMessage.getMessageClass().getKey())); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (serialMessage.getMessageType() != SerialMessage.SerialMessageType.Request) { logger.error("Only request messages can be sent"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } ZWaveNode node = this.getNode(serialMessage.getMessageNode()); if (node.getNodeStage() == NodeStage.NODEBUILDINFO_DEAD) { logger.debug("Node {} is dead, not sending message.", node.getNodeId()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (!node.isListening() && serialMessage.getPriority() != SerialMessage.SerialMessagePriority.Low) { ZWaveWakeUpCommandClass wakeUpCommandClass = (ZWaveWakeUpCommandClass)node.getCommandClass(ZWaveCommandClass.CommandClass.WAKE_UP); if (wakeUpCommandClass != null && !wakeUpCommandClass.isAwake()) { wakeUpCommandClass.putInWakeUpQueue(serialMessage); //it's a battery operated device, place in wake-up queue. // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } serialMessage.setTransmitOptions(TRANSMIT_OPTION_ACK | TRANSMIT_OPTION_AUTO_ROUTE | TRANSMIT_OPTION_EXPLORE); if (++sentDataPointer > 0xFF) sentDataPointer = 1; serialMessage.setCallbackId(sentDataPointer); logger.debug("Callback ID = {}", sentDataPointer); this.enqueue(serialMessage); } }
public class class_name { public void marshall(UpdatePipelineStatusRequest updatePipelineStatusRequest, ProtocolMarshaller protocolMarshaller) { if (updatePipelineStatusRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updatePipelineStatusRequest.getId(), ID_BINDING); protocolMarshaller.marshall(updatePipelineStatusRequest.getStatus(), STATUS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(UpdatePipelineStatusRequest updatePipelineStatusRequest, ProtocolMarshaller protocolMarshaller) { if (updatePipelineStatusRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updatePipelineStatusRequest.getId(), ID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(updatePipelineStatusRequest.getStatus(), STATUS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @NonNull public IconicsDrawable drawBackgroundContour(boolean drawBackgroundContour) { if (mDrawBackgroundContour != drawBackgroundContour) { mDrawBackgroundContour = drawBackgroundContour; mIconPadding += (mDrawBackgroundContour ? 1 : -1) * mBackgroundContourWidth * 2; invalidateSelf(); } return this; } }
public class class_name { @NonNull public IconicsDrawable drawBackgroundContour(boolean drawBackgroundContour) { if (mDrawBackgroundContour != drawBackgroundContour) { mDrawBackgroundContour = drawBackgroundContour; // depends on control dependency: [if], data = [none] mIconPadding += (mDrawBackgroundContour ? 1 : -1) * mBackgroundContourWidth * 2; // depends on control dependency: [if], data = [(mDrawBackgroundContour] invalidateSelf(); // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { public void removeValue() { if (!isSimpleValue()) { m_hasValue = false; m_widgetHolder.clear(); generateLabel(); } else { // only deactivate the widget and restore the default value m_widget.setActive(false); m_widget.setValue(""); addActivationHandler(); } addStyleName(formCss().emptyValue()); removeValidationMessage(); } }
public class class_name { public void removeValue() { if (!isSimpleValue()) { m_hasValue = false; // depends on control dependency: [if], data = [none] m_widgetHolder.clear(); // depends on control dependency: [if], data = [none] generateLabel(); // depends on control dependency: [if], data = [none] } else { // only deactivate the widget and restore the default value m_widget.setActive(false); // depends on control dependency: [if], data = [none] m_widget.setValue(""); // depends on control dependency: [if], data = [none] addActivationHandler(); // depends on control dependency: [if], data = [none] } addStyleName(formCss().emptyValue()); removeValidationMessage(); } }
public class class_name { public NetworkInterface withIpv6Addresses(String... ipv6Addresses) { if (this.ipv6Addresses == null) { setIpv6Addresses(new java.util.ArrayList<String>(ipv6Addresses.length)); } for (String ele : ipv6Addresses) { this.ipv6Addresses.add(ele); } return this; } }
public class class_name { public NetworkInterface withIpv6Addresses(String... ipv6Addresses) { if (this.ipv6Addresses == null) { setIpv6Addresses(new java.util.ArrayList<String>(ipv6Addresses.length)); // depends on control dependency: [if], data = [none] } for (String ele : ipv6Addresses) { this.ipv6Addresses.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { protected DoubleDataStore computeIDOS(DBIDs ids, KNNQuery<O> knnQ, DoubleDataStore intDims, DoubleMinMax idosminmax) { WritableDoubleDataStore ldms = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("ID Outlier Scores for objects", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { final KNNList neighbors = knnQ.getKNNForDBID(iter, k_r); double sum = 0.; int cnt = 0; for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { if(DBIDUtil.equal(iter, neighbor)) { continue; } final double id = intDims.doubleValue(neighbor); sum += id > 0 ? 1.0 / id : 0.; if(++cnt == k_r) { // Always stop after at most k_r elements. break; } } final double id_q = intDims.doubleValue(iter); final double idos = id_q > 0 ? id_q * sum / cnt : 0.; ldms.putDouble(iter, idos); idosminmax.put(idos); LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); return ldms; } }
public class class_name { protected DoubleDataStore computeIDOS(DBIDs ids, KNNQuery<O> knnQ, DoubleDataStore intDims, DoubleMinMax idosminmax) { WritableDoubleDataStore ldms = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC); FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("ID Outlier Scores for objects", ids.size(), LOG) : null; for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { final KNNList neighbors = knnQ.getKNNForDBID(iter, k_r); double sum = 0.; int cnt = 0; for(DoubleDBIDListIter neighbor = neighbors.iter(); neighbor.valid(); neighbor.advance()) { if(DBIDUtil.equal(iter, neighbor)) { continue; } final double id = intDims.doubleValue(neighbor); sum += id > 0 ? 1.0 / id : 0.; // depends on control dependency: [for], data = [none] if(++cnt == k_r) { // Always stop after at most k_r elements. break; } } final double id_q = intDims.doubleValue(iter); final double idos = id_q > 0 ? id_q * sum / cnt : 0.; ldms.putDouble(iter, idos); // depends on control dependency: [for], data = [iter] idosminmax.put(idos); // depends on control dependency: [for], data = [none] LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none] } LOG.ensureCompleted(prog); return ldms; } }
public class class_name { private KeyPersonCompensationDataType getCompensation( KeyPersonDto keyPerson, int budgetPeriod) { KeyPersonCompensationDataType keyPersonCompensation = KeyPersonCompensationDataType.Factory .newInstance(); ScaleTwoDecimal baseSalaryByPeriod; if (keyPerson != null) { if (keyPerson.getAcademicMonths() != null) { keyPersonCompensation.setAcademicMonths(keyPerson .getAcademicMonths().bigDecimalValue()); } if (keyPerson.getCalendarMonths() != null) { keyPersonCompensation.setCalendarMonths(keyPerson .getCalendarMonths().bigDecimalValue()); } if (keyPerson.getFringe() != null) { keyPersonCompensation.setFringeBenefits(keyPerson.getFringe() .bigDecimalValue()); } if (keyPerson.getSummerMonths() != null) { keyPersonCompensation.setSummerMonths(keyPerson .getSummerMonths().bigDecimalValue()); } if (keyPerson.getRequestedSalary() != null) { keyPersonCompensation.setRequestedSalary(keyPerson .getRequestedSalary().bigDecimalValue()); } if (keyPerson.getFundsRequested() != null) { keyPersonCompensation.setFundsRequested(keyPerson .getFundsRequested().bigDecimalValue()); } if (pdDoc.getDevelopmentProposal().getBudgets() != null) { baseSalaryByPeriod = s2sBudgetCalculatorService.getBaseSalaryByPeriod(pdDoc.getDevelopmentProposal().getBudgets().get(0) .getBudgetId(), budgetPeriod, keyPerson); if (baseSalaryByPeriod != null && baseSalaryByPeriod.isGreaterThan(ScaleTwoDecimal.ZERO)) { keyPersonCompensation.setBaseSalary(baseSalaryByPeriod.bigDecimalValue()); } else { if (keyPerson.getBaseSalary() != null) { keyPersonCompensation.setBaseSalary(keyPerson.getBaseSalary().bigDecimalValue()); } } } else { if (keyPerson.getBaseSalary() != null) { keyPersonCompensation.setBaseSalary(keyPerson.getBaseSalary().bigDecimalValue()); } } } return keyPersonCompensation; } }
public class class_name { private KeyPersonCompensationDataType getCompensation( KeyPersonDto keyPerson, int budgetPeriod) { KeyPersonCompensationDataType keyPersonCompensation = KeyPersonCompensationDataType.Factory .newInstance(); ScaleTwoDecimal baseSalaryByPeriod; if (keyPerson != null) { if (keyPerson.getAcademicMonths() != null) { keyPersonCompensation.setAcademicMonths(keyPerson .getAcademicMonths().bigDecimalValue()); // depends on control dependency: [if], data = [none] } if (keyPerson.getCalendarMonths() != null) { keyPersonCompensation.setCalendarMonths(keyPerson .getCalendarMonths().bigDecimalValue()); // depends on control dependency: [if], data = [none] } if (keyPerson.getFringe() != null) { keyPersonCompensation.setFringeBenefits(keyPerson.getFringe() .bigDecimalValue()); // depends on control dependency: [if], data = [none] } if (keyPerson.getSummerMonths() != null) { keyPersonCompensation.setSummerMonths(keyPerson .getSummerMonths().bigDecimalValue()); // depends on control dependency: [if], data = [none] } if (keyPerson.getRequestedSalary() != null) { keyPersonCompensation.setRequestedSalary(keyPerson .getRequestedSalary().bigDecimalValue()); // depends on control dependency: [if], data = [none] } if (keyPerson.getFundsRequested() != null) { keyPersonCompensation.setFundsRequested(keyPerson .getFundsRequested().bigDecimalValue()); // depends on control dependency: [if], data = [none] } if (pdDoc.getDevelopmentProposal().getBudgets() != null) { baseSalaryByPeriod = s2sBudgetCalculatorService.getBaseSalaryByPeriod(pdDoc.getDevelopmentProposal().getBudgets().get(0) .getBudgetId(), budgetPeriod, keyPerson); // depends on control dependency: [if], data = [none] if (baseSalaryByPeriod != null && baseSalaryByPeriod.isGreaterThan(ScaleTwoDecimal.ZERO)) { keyPersonCompensation.setBaseSalary(baseSalaryByPeriod.bigDecimalValue()); // depends on control dependency: [if], data = [(baseSalaryByPeriod] } else { if (keyPerson.getBaseSalary() != null) { keyPersonCompensation.setBaseSalary(keyPerson.getBaseSalary().bigDecimalValue()); // depends on control dependency: [if], data = [(keyPerson.getBaseSalary()] } } } else { if (keyPerson.getBaseSalary() != null) { keyPersonCompensation.setBaseSalary(keyPerson.getBaseSalary().bigDecimalValue()); // depends on control dependency: [if], data = [(keyPerson.getBaseSalary()] } } } return keyPersonCompensation; } }
public class class_name { private <Delegated> MvpPresenter<? super Delegated> getMvpPresenter(Delegated target, PresenterField<Delegated> presenterField, String delegateTag) { Class<? extends MvpPresenter<?>> presenterClass = presenterField.getPresenterClass(); PresenterStore presenterStore = MvpFacade.getInstance().getPresenterStore(); PresenterType type = presenterField.getPresenterType(); String tag; if (type == PresenterType.LOCAL) { tag = delegateTag + "$" + presenterField.getTag(target); } else { tag = presenterField.getTag(target); } //noinspection unchecked MvpPresenter<? super Delegated> presenter = presenterStore.get(tag); if (presenter != null) { return presenter; } //noinspection unchecked presenter = (MvpPresenter<? super Delegated>) presenterField.providePresenter(target); if (presenter == null) { return null; } presenter.setPresenterType(type); presenter.setTag(tag); presenter.setPresenterClass(presenterClass); presenterStore.add(tag, presenter); return presenter; } }
public class class_name { private <Delegated> MvpPresenter<? super Delegated> getMvpPresenter(Delegated target, PresenterField<Delegated> presenterField, String delegateTag) { Class<? extends MvpPresenter<?>> presenterClass = presenterField.getPresenterClass(); PresenterStore presenterStore = MvpFacade.getInstance().getPresenterStore(); PresenterType type = presenterField.getPresenterType(); String tag; if (type == PresenterType.LOCAL) { tag = delegateTag + "$" + presenterField.getTag(target); } else { tag = presenterField.getTag(target); } //noinspection unchecked MvpPresenter<? super Delegated> presenter = presenterStore.get(tag); if (presenter != null) { return presenter; // depends on control dependency: [if], data = [none] } //noinspection unchecked presenter = (MvpPresenter<? super Delegated>) presenterField.providePresenter(target); if (presenter == null) { return null; // depends on control dependency: [if], data = [none] } presenter.setPresenterType(type); presenter.setTag(tag); presenter.setPresenterClass(presenterClass); presenterStore.add(tag, presenter); return presenter; } }
public class class_name { private long grow(PrintStream out, List<ItemSet> list, TotalSupportTree ttree, HeaderTableItem header, int[] itemset, int[] localItemSupport, int[] prefixItemset) { long n = 1; int support = header.count; int item = header.id; itemset = insert(itemset, item); collect(out, list, ttree, itemset, support); if (header.node.next == null) { FPTree.Node node = header.node; n += grow(out, list, ttree, node.parent, itemset, support); } else { // Count singles in linked list if (getLocalItemSupport(header.node, localItemSupport)) { // Create local FP tree FPTree fptree = getLocalFPTree(header.node, localItemSupport, prefixItemset); // Mine new FP-tree n += grow(out, list, ttree, fptree, itemset, localItemSupport, prefixItemset); } } return n; } }
public class class_name { private long grow(PrintStream out, List<ItemSet> list, TotalSupportTree ttree, HeaderTableItem header, int[] itemset, int[] localItemSupport, int[] prefixItemset) { long n = 1; int support = header.count; int item = header.id; itemset = insert(itemset, item); collect(out, list, ttree, itemset, support); if (header.node.next == null) { FPTree.Node node = header.node; n += grow(out, list, ttree, node.parent, itemset, support); // depends on control dependency: [if], data = [none] } else { // Count singles in linked list if (getLocalItemSupport(header.node, localItemSupport)) { // Create local FP tree FPTree fptree = getLocalFPTree(header.node, localItemSupport, prefixItemset); // Mine new FP-tree n += grow(out, list, ttree, fptree, itemset, localItemSupport, prefixItemset); // depends on control dependency: [if], data = [none] } } return n; } }
public class class_name { @Nonnull public String getLine() { if (preLoaded) { if (bufferOffset >= 0) { int lineStart = bufferOffset; if (linePos > 0) { lineStart -= linePos - 1; } int lineEnd = bufferOffset; while (lineEnd < bufferLimit && buffer[lineEnd] != '\n') { ++lineEnd; } return new String(buffer, lineStart, lineEnd - lineStart); } } else if (bufferLimit > 0) { if (Math.abs((linePos - 1) - bufferOffset) < 2) { // only return the line if the line has not been consolidated before the // exception. This should avoid showing a bad exception line pointing to // the wrong content. This should never be the case in pretty-printed // JSON unless some really really long strings are causing the error. // // Since linePos does not exactly follow offset, we must accept +- 1. return new String(buffer, 0, bufferLimit - (bufferLineEnd ? 1 : 0)); } } // Otherwise we don't have the requested line, return empty string. return ""; } }
public class class_name { @Nonnull public String getLine() { if (preLoaded) { if (bufferOffset >= 0) { int lineStart = bufferOffset; if (linePos > 0) { lineStart -= linePos - 1; // depends on control dependency: [if], data = [none] } int lineEnd = bufferOffset; while (lineEnd < bufferLimit && buffer[lineEnd] != '\n') { ++lineEnd; // depends on control dependency: [while], data = [none] } return new String(buffer, lineStart, lineEnd - lineStart); // depends on control dependency: [if], data = [none] } } else if (bufferLimit > 0) { if (Math.abs((linePos - 1) - bufferOffset) < 2) { // only return the line if the line has not been consolidated before the // exception. This should avoid showing a bad exception line pointing to // the wrong content. This should never be the case in pretty-printed // JSON unless some really really long strings are causing the error. // // Since linePos does not exactly follow offset, we must accept +- 1. return new String(buffer, 0, bufferLimit - (bufferLineEnd ? 1 : 0)); // depends on control dependency: [if], data = [none] } } // Otherwise we don't have the requested line, return empty string. return ""; } }
public class class_name { public static int determineNumberOfWorkerThreads() { final int threads = Config.getInstance().getPropertyAsInt(Config.AlpineKey.WORKER_THREADS); if (threads > 0) { return threads; } else if (threads == 0) { final int cores = SystemUtil.getCpuCores(); final int multiplier = Config.getInstance().getPropertyAsInt(Config.AlpineKey.WORKER_THREAD_MULTIPLIER); if (multiplier > 0) { return cores * multiplier; } else { return cores; } } return 1; // We have to have a minimum of 1 thread } }
public class class_name { public static int determineNumberOfWorkerThreads() { final int threads = Config.getInstance().getPropertyAsInt(Config.AlpineKey.WORKER_THREADS); if (threads > 0) { return threads; // depends on control dependency: [if], data = [none] } else if (threads == 0) { final int cores = SystemUtil.getCpuCores(); final int multiplier = Config.getInstance().getPropertyAsInt(Config.AlpineKey.WORKER_THREAD_MULTIPLIER); if (multiplier > 0) { return cores * multiplier; // depends on control dependency: [if], data = [none] } else { return cores; // depends on control dependency: [if], data = [none] } } return 1; // We have to have a minimum of 1 thread } }
public class class_name { @Pure public String getInternalId() { final int endIndex = this.id.indexOf('#'); if (endIndex <= 0) { return null; } return this.id.substring(0, endIndex); } }
public class class_name { @Pure public String getInternalId() { final int endIndex = this.id.indexOf('#'); if (endIndex <= 0) { return null; // depends on control dependency: [if], data = [none] } return this.id.substring(0, endIndex); } }
public class class_name { public GenericUtilities.TypeCategory getTypeCategory() { if (hasParameters() && variable == null && extension == null) { return GenericUtilities.TypeCategory.PARAMETERIZED; } else if (!hasParameters() && variable != null && extension == null) { if ("*".equals(variable)) { return GenericUtilities.TypeCategory.WILDCARD; } else { return GenericUtilities.TypeCategory.TYPE_VARIABLE; } } else if (!hasParameters() && variable != null && extension != null) { if ("+".equals(variable)) { return GenericUtilities.TypeCategory.WILDCARD_EXTENDS; } else if ("-".equals(variable)) { return GenericUtilities.TypeCategory.WILDCARD_SUPER; } } // this should never happen throw new IllegalStateException("The Generic Object Type is badly initialized"); } }
public class class_name { public GenericUtilities.TypeCategory getTypeCategory() { if (hasParameters() && variable == null && extension == null) { return GenericUtilities.TypeCategory.PARAMETERIZED; // depends on control dependency: [if], data = [none] } else if (!hasParameters() && variable != null && extension == null) { if ("*".equals(variable)) { return GenericUtilities.TypeCategory.WILDCARD; // depends on control dependency: [if], data = [none] } else { return GenericUtilities.TypeCategory.TYPE_VARIABLE; // depends on control dependency: [if], data = [none] } } else if (!hasParameters() && variable != null && extension != null) { if ("+".equals(variable)) { return GenericUtilities.TypeCategory.WILDCARD_EXTENDS; // depends on control dependency: [if], data = [none] } else if ("-".equals(variable)) { return GenericUtilities.TypeCategory.WILDCARD_SUPER; // depends on control dependency: [if], data = [none] } } // this should never happen throw new IllegalStateException("The Generic Object Type is badly initialized"); } }
public class class_name { private boolean containsIgnoreCase(List<String> stringlist, String match) { for (String s : stringlist) { if (s.equalsIgnoreCase(match)) { return true; } } return false; } }
public class class_name { private boolean containsIgnoreCase(List<String> stringlist, String match) { for (String s : stringlist) { if (s.equalsIgnoreCase(match)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void mergeStatistics(Statistics stats) { if (stats.isEmpty()) return; // Merge stats only if they have the same type if (type.equals(stats.type)) { incrementNumNulls(stats.getNumNulls()); if (stats.hasNonNullValue()) { mergeStatisticsMinMax(stats); markAsNotEmpty(); } } else { throw StatisticsClassException.create(this, stats); } } }
public class class_name { public void mergeStatistics(Statistics stats) { if (stats.isEmpty()) return; // Merge stats only if they have the same type if (type.equals(stats.type)) { incrementNumNulls(stats.getNumNulls()); // depends on control dependency: [if], data = [none] if (stats.hasNonNullValue()) { mergeStatisticsMinMax(stats); // depends on control dependency: [if], data = [none] markAsNotEmpty(); // depends on control dependency: [if], data = [none] } } else { throw StatisticsClassException.create(this, stats); } } }
public class class_name { @Override public void execute(ExecutorService executor) { try { _appMonitor.removeApplication(_aii.getPid()); @SuppressWarnings("rawtypes") ApplicationHandler handler = _aii.getHandler(); @SuppressWarnings("unchecked") Future<Boolean> result = handler.uninstall(_aii); _monitor.onCompletion(result, _listener); } catch (Throwable t) { _listener.failedCompletion(null, t); } } }
public class class_name { @Override public void execute(ExecutorService executor) { try { _appMonitor.removeApplication(_aii.getPid()); // depends on control dependency: [try], data = [none] @SuppressWarnings("rawtypes") ApplicationHandler handler = _aii.getHandler(); @SuppressWarnings("unchecked") Future<Boolean> result = handler.uninstall(_aii); _monitor.onCompletion(result, _listener); // depends on control dependency: [try], data = [none] } catch (Throwable t) { _listener.failedCompletion(null, t); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void accept(SARLQuickfixProvider provider, Issue issue, IssueResolutionAcceptor acceptor) { final String[] data = issue.getData(); if (data != null && data.length > 0) { final String expectedType = data[0]; final ReturnTypeAddModification modification = new ReturnTypeAddModification(expectedType); modification.setIssue(issue); modification.setTools(provider); acceptor.accept(issue, MessageFormat.format(Messages.SARLQuickfixProvider_13, expectedType), Messages.SARLQuickfixProvider_14, JavaPluginImages.IMG_CORRECTION_ADD, modification, IProposalRelevance.CHANGE_RETURN_TYPE); } } }
public class class_name { public static void accept(SARLQuickfixProvider provider, Issue issue, IssueResolutionAcceptor acceptor) { final String[] data = issue.getData(); if (data != null && data.length > 0) { final String expectedType = data[0]; final ReturnTypeAddModification modification = new ReturnTypeAddModification(expectedType); modification.setIssue(issue); // depends on control dependency: [if], data = [none] modification.setTools(provider); // depends on control dependency: [if], data = [none] acceptor.accept(issue, MessageFormat.format(Messages.SARLQuickfixProvider_13, expectedType), Messages.SARLQuickfixProvider_14, JavaPluginImages.IMG_CORRECTION_ADD, modification, IProposalRelevance.CHANGE_RETURN_TYPE); // depends on control dependency: [if], data = [none] } } }
public class class_name { public EEnum getIfcOutletTypeEnum() { if (ifcOutletTypeEnumEEnum == null) { ifcOutletTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(864); } return ifcOutletTypeEnumEEnum; } }
public class class_name { public EEnum getIfcOutletTypeEnum() { if (ifcOutletTypeEnumEEnum == null) { ifcOutletTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(864); // depends on control dependency: [if], data = [none] } return ifcOutletTypeEnumEEnum; } }
public class class_name { private void fireTangoPipeEvent(TangoPipe tangoPipe, EventData eventData) { TangoPipeEvent tangoPipeEvent = new TangoPipeEvent(tangoPipe, eventData); // Notifying those that are interested in this event ArrayList<EventListener> listeners = event_listeners.getListeners(ITangoPipeListener.class); for (EventListener eventListener : listeners) { ((ITangoPipeListener) eventListener).pipe(tangoPipeEvent); } } }
public class class_name { private void fireTangoPipeEvent(TangoPipe tangoPipe, EventData eventData) { TangoPipeEvent tangoPipeEvent = new TangoPipeEvent(tangoPipe, eventData); // Notifying those that are interested in this event ArrayList<EventListener> listeners = event_listeners.getListeners(ITangoPipeListener.class); for (EventListener eventListener : listeners) { ((ITangoPipeListener) eventListener).pipe(tangoPipeEvent); // depends on control dependency: [for], data = [eventListener] } } }
public class class_name { private Assignment createAssignment(final LNGBooleanVector vec, final Collection<Variable> variables) { final Assignment model = new Assignment(); for (int i = 1; i < vec.size(); i++) { final Variable var = this.f.variable(this.idx2name.get(i)); if (vec.get(i)) { if (variables == null || variables.contains(var)) { model.addLiteral(var); } } else if (variables == null || variables.contains(var)) { model.addLiteral(var.negate()); } } return model; } }
public class class_name { private Assignment createAssignment(final LNGBooleanVector vec, final Collection<Variable> variables) { final Assignment model = new Assignment(); for (int i = 1; i < vec.size(); i++) { final Variable var = this.f.variable(this.idx2name.get(i)); if (vec.get(i)) { if (variables == null || variables.contains(var)) { model.addLiteral(var); } // depends on control dependency: [if], data = [none] } else if (variables == null || variables.contains(var)) { model.addLiteral(var.negate()); } // depends on control dependency: [if], data = [none] } return model; } }
public class class_name { public static void fire(I_CmsHasDateBoxEventHandlers source, Date date, boolean isTyping) { if (TYPE != null) { CmsDateBoxEvent event = new CmsDateBoxEvent(date, isTyping); source.fireEvent(event); } } }
public class class_name { public static void fire(I_CmsHasDateBoxEventHandlers source, Date date, boolean isTyping) { if (TYPE != null) { CmsDateBoxEvent event = new CmsDateBoxEvent(date, isTyping); source.fireEvent(event); // depends on control dependency: [if], data = [none] } } }
public class class_name { public final void mWS() throws RecognitionException { try { int _type = WS; int _channel = DEFAULT_TOKEN_CHANNEL; // src/riemann/Query.g:95:5: ( ( ' ' | '\\t' | '\\r' | '\\n' ) ) // src/riemann/Query.g:95:9: ( ' ' | '\\t' | '\\r' | '\\n' ) { if ( (input.LA(1)>='\t' && input.LA(1)<='\n')||input.LA(1)=='\r'||input.LA(1)==' ' ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} _channel=HIDDEN; } state.type = _type; state.channel = _channel; } finally { } } }
public class class_name { public final void mWS() throws RecognitionException { try { int _type = WS; int _channel = DEFAULT_TOKEN_CHANNEL; // src/riemann/Query.g:95:5: ( ( ' ' | '\\t' | '\\r' | '\\n' ) ) // src/riemann/Query.g:95:9: ( ' ' | '\\t' | '\\r' | '\\n' ) { if ( (input.LA(1)>='\t' && input.LA(1)<='\n')||input.LA(1)=='\r'||input.LA(1)==' ' ) { input.consume(); // depends on control dependency: [if], data = [none] } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); // depends on control dependency: [if], data = [none] throw mse;} _channel=HIDDEN; } state.type = _type; state.channel = _channel; } finally { } } }
public class class_name { public long getDestHighMsgs() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getDestHighMsgs"); SibTr.exit(tc, "getDestHighMsgs", Long.valueOf(_destHighMsgs)); } return _destHighMsgs; } }
public class class_name { public long getDestHighMsgs() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getDestHighMsgs"); // depends on control dependency: [if], data = [none] SibTr.exit(tc, "getDestHighMsgs", Long.valueOf(_destHighMsgs)); // depends on control dependency: [if], data = [none] } return _destHighMsgs; } }
public class class_name { public static Path addExtension(Path path, String... extensions) { StringBuilder pathStringBuilder = new StringBuilder(path.toString()); for (String extension : extensions) { if (!Strings.isNullOrEmpty(extension)) { pathStringBuilder.append(extension); } } return new Path(pathStringBuilder.toString()); } }
public class class_name { public static Path addExtension(Path path, String... extensions) { StringBuilder pathStringBuilder = new StringBuilder(path.toString()); for (String extension : extensions) { if (!Strings.isNullOrEmpty(extension)) { pathStringBuilder.append(extension); // depends on control dependency: [if], data = [none] } } return new Path(pathStringBuilder.toString()); } }
public class class_name { private void copyChildren(int upToChildPosition) { // (ensureRoom isn't called here, as we should always be at/behind key additions) if (copyFromChildPosition >= upToChildPosition) return; int len = upToChildPosition - copyFromChildPosition; if (len > 0) { System.arraycopy(copyFrom, getKeyEnd(copyFrom) + copyFromChildPosition, buildChildren, buildChildPosition, len); copyFromChildPosition = upToChildPosition; buildChildPosition += len; } } }
public class class_name { private void copyChildren(int upToChildPosition) { // (ensureRoom isn't called here, as we should always be at/behind key additions) if (copyFromChildPosition >= upToChildPosition) return; int len = upToChildPosition - copyFromChildPosition; if (len > 0) { System.arraycopy(copyFrom, getKeyEnd(copyFrom) + copyFromChildPosition, buildChildren, buildChildPosition, len); // depends on control dependency: [if], data = [none] copyFromChildPosition = upToChildPosition; // depends on control dependency: [if], data = [none] buildChildPosition += len; // depends on control dependency: [if], data = [none] } } }
public class class_name { protected <L extends ListWrapper<T>, T extends SearchEntity> L handleSearchForEntitiesWithPost(Class<T> type, String query, Set<String> fieldSet, SearchParams params) { Map<String, String> uriVariables = restUriVariablesFactory.getUriVariablesForSearchWithPost(BullhornEntityInfo.getTypesRestEntityName(type), fieldSet, params); String url = restUrlFactory.assembleSearchUrlWithPost(params); if (Candidate.class == type) { url = url + "&useV2=true"; } JSONObject body = new JSONObject(); body.put("query", query); return (L) this.performPostRequest(url,body.toString(), BullhornEntityInfo.getTypesListWrapperType(type), uriVariables); } }
public class class_name { protected <L extends ListWrapper<T>, T extends SearchEntity> L handleSearchForEntitiesWithPost(Class<T> type, String query, Set<String> fieldSet, SearchParams params) { Map<String, String> uriVariables = restUriVariablesFactory.getUriVariablesForSearchWithPost(BullhornEntityInfo.getTypesRestEntityName(type), fieldSet, params); String url = restUrlFactory.assembleSearchUrlWithPost(params); if (Candidate.class == type) { url = url + "&useV2=true"; // depends on control dependency: [if], data = [none] } JSONObject body = new JSONObject(); body.put("query", query); return (L) this.performPostRequest(url,body.toString(), BullhornEntityInfo.getTypesListWrapperType(type), uriVariables); } }
public class class_name { public void triggerAsyncRefreshWithDelay(int delayMillis) { final HollowClient client = this.client; final long targetBeginTime = System.currentTimeMillis() + delayMillis; refreshExecutor.execute(new Runnable() { public void run() { try { long delay = targetBeginTime - System.currentTimeMillis(); if(delay > 0) Thread.sleep(delay); client.triggerRefresh(); } catch(Throwable th) { log.log(Level.SEVERE, "Async refresh failed", th); } } }); } }
public class class_name { public void triggerAsyncRefreshWithDelay(int delayMillis) { final HollowClient client = this.client; final long targetBeginTime = System.currentTimeMillis() + delayMillis; refreshExecutor.execute(new Runnable() { public void run() { try { long delay = targetBeginTime - System.currentTimeMillis(); if(delay > 0) Thread.sleep(delay); client.triggerRefresh(); // depends on control dependency: [try], data = [none] } catch(Throwable th) { log.log(Level.SEVERE, "Async refresh failed", th); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { synchronized void listenerStarted(HttpEndpointImpl endpoint, VirtualHostConfig targetConfig, String resolvedHostName, int port, boolean isHttps) { if (!activated) return; // If allowed endpoints are specified for the host and this isn't one of them, don't add this endpoint Collection<String> allowedEndpoints = targetConfig.getAllowedEndpoints(); if (!allowedEndpoints.isEmpty() && !allowedEndpoints.contains(endpoint.getPid())) return; EndpointState oldState = myEndpoints.get(endpoint); if (oldState == null) { oldState = EndpointState.notStarted; } int newHttpPort = isHttps ? oldState.httpPort : port; int newHttpsPort = isHttps ? port : oldState.httpsPort; EndpointState newState = new EndpointState(resolvedHostName, newHttpPort, newHttpsPort); //Check if we are changing an already listening port.. boolean updatedPort = (oldState.httpPort > 0 && oldState.httpPort != newHttpPort) || (oldState.httpsPort > 0 && oldState.httpsPort != newHttpsPort); boolean addedPort = (oldState.httpPort == 0 && oldState.httpPort != newHttpPort) || (oldState.httpsPort == 0 && oldState.httpsPort != newHttpsPort); myEndpoints.put(endpoint, newState); int numPorts; if (addedPort) numPorts = listeningPorts.incrementAndGet(); else numPorts = listeningPorts.get(); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(this, tc, "listener started: " + listeningPorts.get(), oldState, newState, addedPort, updatedPort); } if (addedPort || updatedPort) { // Update registration if a port changed.. osgiService.updateRegistration(activated, targetConfig, true); } // Notify that the endpoint is available only if it is a first port, or a changed port for an endpoint if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(this, tc, "listener started for " + this + " on host " + resolvedHostName + " on port " + port, endpoint); } for (HttpContainerContext ctx : httpContainers) { ctx.notifyExistingContexts(true, resolvedHostName, port, isHttps, numPorts); } } }
public class class_name { synchronized void listenerStarted(HttpEndpointImpl endpoint, VirtualHostConfig targetConfig, String resolvedHostName, int port, boolean isHttps) { if (!activated) return; // If allowed endpoints are specified for the host and this isn't one of them, don't add this endpoint Collection<String> allowedEndpoints = targetConfig.getAllowedEndpoints(); if (!allowedEndpoints.isEmpty() && !allowedEndpoints.contains(endpoint.getPid())) return; EndpointState oldState = myEndpoints.get(endpoint); if (oldState == null) { oldState = EndpointState.notStarted; // depends on control dependency: [if], data = [none] } int newHttpPort = isHttps ? oldState.httpPort : port; int newHttpsPort = isHttps ? port : oldState.httpsPort; EndpointState newState = new EndpointState(resolvedHostName, newHttpPort, newHttpsPort); //Check if we are changing an already listening port.. boolean updatedPort = (oldState.httpPort > 0 && oldState.httpPort != newHttpPort) || (oldState.httpsPort > 0 && oldState.httpsPort != newHttpsPort); boolean addedPort = (oldState.httpPort == 0 && oldState.httpPort != newHttpPort) || (oldState.httpsPort == 0 && oldState.httpsPort != newHttpsPort); myEndpoints.put(endpoint, newState); int numPorts; if (addedPort) numPorts = listeningPorts.incrementAndGet(); else numPorts = listeningPorts.get(); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(this, tc, "listener started: " + listeningPorts.get(), oldState, newState, addedPort, updatedPort); // depends on control dependency: [if], data = [none] } if (addedPort || updatedPort) { // Update registration if a port changed.. osgiService.updateRegistration(activated, targetConfig, true); // depends on control dependency: [if], data = [none] } // Notify that the endpoint is available only if it is a first port, or a changed port for an endpoint if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(this, tc, "listener started for " + this + " on host " + resolvedHostName + " on port " + port, endpoint); } for (HttpContainerContext ctx : httpContainers) { ctx.notifyExistingContexts(true, resolvedHostName, port, isHttps, numPorts); } } }
public class class_name { public void marshall(MaintenanceWindowTask maintenanceWindowTask, ProtocolMarshaller protocolMarshaller) { if (maintenanceWindowTask == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(maintenanceWindowTask.getWindowId(), WINDOWID_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getWindowTaskId(), WINDOWTASKID_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getTaskArn(), TASKARN_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getType(), TYPE_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getTargets(), TARGETS_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getTaskParameters(), TASKPARAMETERS_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getPriority(), PRIORITY_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getLoggingInfo(), LOGGINGINFO_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getServiceRoleArn(), SERVICEROLEARN_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getMaxConcurrency(), MAXCONCURRENCY_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getMaxErrors(), MAXERRORS_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getName(), NAME_BINDING); protocolMarshaller.marshall(maintenanceWindowTask.getDescription(), DESCRIPTION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(MaintenanceWindowTask maintenanceWindowTask, ProtocolMarshaller protocolMarshaller) { if (maintenanceWindowTask == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(maintenanceWindowTask.getWindowId(), WINDOWID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getWindowTaskId(), WINDOWTASKID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getTaskArn(), TASKARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getType(), TYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getTargets(), TARGETS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getTaskParameters(), TASKPARAMETERS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getPriority(), PRIORITY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getLoggingInfo(), LOGGINGINFO_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getServiceRoleArn(), SERVICEROLEARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getMaxConcurrency(), MAXCONCURRENCY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getMaxErrors(), MAXERRORS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(maintenanceWindowTask.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private synchronized long getUniqueTimeStamp() { //Create a default time stamp: long timeStamp = System.currentTimeMillis(); //Lock file for ensuring we create a unique time stamp for all //processes on the system: File lockFile = null; try { lockFile = new File(TEMPORARY_DIRECTORY, "guid.lock"); //Last modified time stamp for the lock file: long lastModified = fileLastModified(lockFile); //Maximum absolute time stamp to execute the lock file algorithm: //NOTE: This upper bound (1000 milliseconds or 1 second) provides an escape mechanism for the //scenario where a previous instance of the JVM crashed and a stray lock file persists on the //local file system. long maxWaitTimeStamp = (System.currentTimeMillis() + 1000); //Loop until we can create the lock file (e.g. exclusive rights) //since creating a file is atomic or the maximum absolute time //stamp to execute the lock file algorithm has expired: while (true) { try { //After the lock file has been created, we have exclusive //rights to the lock file and enter the process //synchronized block: if (lockFile.createNewFile()) { break; } //If we cannot create the lock file, only continue until //the maximum absolute time stamp to execute the lock //file algorithm has expired: else if (System.currentTimeMillis() > maxWaitTimeStamp) { //If the lock file has not been updated before the //maximum absolute time stamp to execute the lock //file algorithm has expired, delete the lock file //and attempt to execute the lock algorithm again: if (fileLastModified(lockFile) <= lastModified) { delete(lockFile); //Only permit this instance of the algorithm to //attempt to delete the lock file once: lastModified = -1; } //If a lock cannot be obtained before the maximum //absolute time stamp to execute the lock file //algorithm has expired, return the non-atomic //current time stamp: else { return timeStamp; } } } catch (IOException i) { //Return the non-atomic current time stamp since the //lock file could not be created. return timeStamp; } } //Wait 1/10 second (100 milliseconds) to ensure a unique time //stamp: try { Thread.sleep(100); } catch (InterruptedException i) { //Ignore since we are only sleeping. } //Create a unique time stamp as an atomic operation: timeStamp = System.currentTimeMillis(); } catch (SecurityException s) { //Ignore all security exceptions and exit gracefully. } finally { try { //After the lock file has been atomically deleted, we release //the exclusive rights to the lock file exit the process //synchronized block: if (lockFile != null) { delete(lockFile); } } catch (SecurityException s) { //Ignore all security exceptions and exit gracefully. } } return timeStamp; } }
public class class_name { private synchronized long getUniqueTimeStamp() { //Create a default time stamp: long timeStamp = System.currentTimeMillis(); //Lock file for ensuring we create a unique time stamp for all //processes on the system: File lockFile = null; try { lockFile = new File(TEMPORARY_DIRECTORY, "guid.lock"); // depends on control dependency: [try], data = [none] //Last modified time stamp for the lock file: long lastModified = fileLastModified(lockFile); //Maximum absolute time stamp to execute the lock file algorithm: //NOTE: This upper bound (1000 milliseconds or 1 second) provides an escape mechanism for the //scenario where a previous instance of the JVM crashed and a stray lock file persists on the //local file system. long maxWaitTimeStamp = (System.currentTimeMillis() + 1000); //Loop until we can create the lock file (e.g. exclusive rights) //since creating a file is atomic or the maximum absolute time //stamp to execute the lock file algorithm has expired: while (true) { try { //After the lock file has been created, we have exclusive //rights to the lock file and enter the process //synchronized block: if (lockFile.createNewFile()) { break; } //If we cannot create the lock file, only continue until //the maximum absolute time stamp to execute the lock //file algorithm has expired: else if (System.currentTimeMillis() > maxWaitTimeStamp) { //If the lock file has not been updated before the //maximum absolute time stamp to execute the lock //file algorithm has expired, delete the lock file //and attempt to execute the lock algorithm again: if (fileLastModified(lockFile) <= lastModified) { delete(lockFile); // depends on control dependency: [if], data = [none] //Only permit this instance of the algorithm to //attempt to delete the lock file once: lastModified = -1; // depends on control dependency: [if], data = [none] } //If a lock cannot be obtained before the maximum //absolute time stamp to execute the lock file //algorithm has expired, return the non-atomic //current time stamp: else { return timeStamp; // depends on control dependency: [if], data = [none] } } } catch (IOException i) { //Return the non-atomic current time stamp since the //lock file could not be created. return timeStamp; } // depends on control dependency: [catch], data = [none] } //Wait 1/10 second (100 milliseconds) to ensure a unique time //stamp: try { Thread.sleep(100); // depends on control dependency: [try], data = [none] } catch (InterruptedException i) { //Ignore since we are only sleeping. } // depends on control dependency: [catch], data = [none] //Create a unique time stamp as an atomic operation: timeStamp = System.currentTimeMillis(); // depends on control dependency: [try], data = [none] } catch (SecurityException s) { //Ignore all security exceptions and exit gracefully. } finally { // depends on control dependency: [catch], data = [none] try { //After the lock file has been atomically deleted, we release //the exclusive rights to the lock file exit the process //synchronized block: if (lockFile != null) { delete(lockFile); // depends on control dependency: [if], data = [(lockFile] } } catch (SecurityException s) { //Ignore all security exceptions and exit gracefully. } // depends on control dependency: [catch], data = [none] } return timeStamp; } }
public class class_name { protected void addToQueue(double closestDistanceSq , KdTree.Node node , P target ) { if( !node.isLeaf() ) { Helper h; if( unused.isEmpty() ) { h = new Helper(); } else { h = unused.remove( unused.size()-1 ); } h.closestPossibleSq = closestDistanceSq; h.node = node; queue.add(h); } else { checkBestDistance(node, target); } } }
public class class_name { protected void addToQueue(double closestDistanceSq , KdTree.Node node , P target ) { if( !node.isLeaf() ) { Helper h; if( unused.isEmpty() ) { h = new Helper(); // depends on control dependency: [if], data = [none] } else { h = unused.remove( unused.size()-1 ); // depends on control dependency: [if], data = [none] } h.closestPossibleSq = closestDistanceSq; // depends on control dependency: [if], data = [none] h.node = node; // depends on control dependency: [if], data = [none] queue.add(h); // depends on control dependency: [if], data = [none] } else { checkBestDistance(node, target); // depends on control dependency: [if], data = [none] } } }
public class class_name { private static void pushAnnotations( Deque<GraphAnnotationState> stack, Collection<? extends Annotatable> haveAnnotations) { stack.push(new GraphAnnotationState(haveAnnotations.size())); for (Annotatable h : haveAnnotations) { stack.peek().add(new AnnotationState(h, h.getAnnotation())); h.setAnnotation(null); } } }
public class class_name { private static void pushAnnotations( Deque<GraphAnnotationState> stack, Collection<? extends Annotatable> haveAnnotations) { stack.push(new GraphAnnotationState(haveAnnotations.size())); for (Annotatable h : haveAnnotations) { stack.peek().add(new AnnotationState(h, h.getAnnotation())); // depends on control dependency: [for], data = [h] h.setAnnotation(null); // depends on control dependency: [for], data = [h] } } }