code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public IStatus handleDrop(CommonDropAdapter aDropAdapter, DropTargetEvent aDropTargetEvent, Object aTarget) { // if (VdmUIPlugin.DEBUG) // System.out.println("Target Object for drop: " // + aTarget.getClass().toString()); if (Policy.DEBUG_DND) { System.out .println("ResourceDropAdapterAssistant.handleDrop (begin)"); //$NON-NLS-1$ } // alwaysOverwrite = false; if (aDropAdapter.getCurrentTarget() == null || aDropTargetEvent.data == null) { return Status.CANCEL_STATUS; } IStatus status = null; IResource[] resources = null; TransferData currentTransfer = aDropAdapter.getCurrentTransfer(); if (LocalSelectionTransfer.getTransfer().isSupportedType( currentTransfer)) { resources = getSelectedResources(); aDropTargetEvent.detail = DND.DROP_NONE; } else if (ResourceTransfer.getInstance().isSupportedType( currentTransfer)) { resources = (IResource[]) aDropTargetEvent.data; } if (FileTransfer.getInstance().isSupportedType(currentTransfer)) { status = performFileDrop(aDropAdapter, aDropTargetEvent.data); } else if (resources != null && resources.length > 0) { if (aDropAdapter.getCurrentOperation() == DND.DROP_COPY) { if (Policy.DEBUG_DND) { System.out .println("ResourceDropAdapterAssistant.handleDrop executing COPY."); //$NON-NLS-1$ } status = performResourceCopy(aDropAdapter, getShell(), resources); } else { if (Policy.DEBUG_DND) { System.out .println("ResourceDropAdapterAssistant.handleDrop executing MOVE."); //$NON-NLS-1$ } status = performResourceMove(aDropAdapter, resources); } } openError(status); IContainer target = null; if (aDropAdapter.getCurrentTarget() instanceof IVdmContainer) { target = ((IVdmContainer) aDropAdapter.getCurrentTarget()) .getContainer(); } else { target = getActualTarget((IResource) aDropAdapter.getCurrentTarget()); } if (target != null && target.isAccessible()) { try { target.refreshLocal(IResource.DEPTH_ONE, null); } catch (CoreException e) { } } return status; } }
public class class_name { public IStatus handleDrop(CommonDropAdapter aDropAdapter, DropTargetEvent aDropTargetEvent, Object aTarget) { // if (VdmUIPlugin.DEBUG) // System.out.println("Target Object for drop: " // + aTarget.getClass().toString()); if (Policy.DEBUG_DND) { System.out .println("ResourceDropAdapterAssistant.handleDrop (begin)"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none] } // alwaysOverwrite = false; if (aDropAdapter.getCurrentTarget() == null || aDropTargetEvent.data == null) { return Status.CANCEL_STATUS; // depends on control dependency: [if], data = [none] } IStatus status = null; IResource[] resources = null; TransferData currentTransfer = aDropAdapter.getCurrentTransfer(); if (LocalSelectionTransfer.getTransfer().isSupportedType( currentTransfer)) { resources = getSelectedResources(); // depends on control dependency: [if], data = [none] aDropTargetEvent.detail = DND.DROP_NONE; // depends on control dependency: [if], data = [none] } else if (ResourceTransfer.getInstance().isSupportedType( currentTransfer)) { resources = (IResource[]) aDropTargetEvent.data; // depends on control dependency: [if], data = [none] } if (FileTransfer.getInstance().isSupportedType(currentTransfer)) { status = performFileDrop(aDropAdapter, aDropTargetEvent.data); // depends on control dependency: [if], data = [none] } else if (resources != null && resources.length > 0) { if (aDropAdapter.getCurrentOperation() == DND.DROP_COPY) { if (Policy.DEBUG_DND) { System.out .println("ResourceDropAdapterAssistant.handleDrop executing COPY."); //$NON-NLS-1$ // depends on control dependency: [if], data = [none] } status = performResourceCopy(aDropAdapter, getShell(), resources); // depends on control dependency: [if], data = [none] } else { if (Policy.DEBUG_DND) { System.out .println("ResourceDropAdapterAssistant.handleDrop executing MOVE."); //$NON-NLS-1$ // depends on control dependency: [if], data = [none] } status = performResourceMove(aDropAdapter, resources); // depends on control dependency: [if], data = [none] } } openError(status); IContainer target = null; if (aDropAdapter.getCurrentTarget() instanceof IVdmContainer) { target = ((IVdmContainer) aDropAdapter.getCurrentTarget()) .getContainer(); // depends on control dependency: [if], data = [none] } else { target = getActualTarget((IResource) aDropAdapter.getCurrentTarget()); // depends on control dependency: [if], data = [none] } if (target != null && target.isAccessible()) { try { target.refreshLocal(IResource.DEPTH_ONE, null); // depends on control dependency: [try], data = [none] } catch (CoreException e) { } // depends on control dependency: [catch], data = [none] } return status; } }
public class class_name { public boolean isHttpRedirect() { if (isRevisitDigest() && (getDuplicatePayload() != null)) { return getDuplicatePayload().isHttpRedirect(); } String httpCode = getHttpCode(); return (httpCode.startsWith("3")); } }
public class class_name { public boolean isHttpRedirect() { if (isRevisitDigest() && (getDuplicatePayload() != null)) { return getDuplicatePayload().isHttpRedirect(); // depends on control dependency: [if], data = [none] } String httpCode = getHttpCode(); return (httpCode.startsWith("3")); } }
public class class_name { public void addPlugins(String pluginTypeNames) { String[] pluginTypeNameList = pluginTypeNames.split(","); for (String pluginTypeName : pluginTypeNameList) { pluginTypeName = pluginTypeName.trim(); if (!pluginTypeName.equals("")) { addPlugin(pluginTypeName); } } } }
public class class_name { public void addPlugins(String pluginTypeNames) { String[] pluginTypeNameList = pluginTypeNames.split(","); for (String pluginTypeName : pluginTypeNameList) { pluginTypeName = pluginTypeName.trim(); // depends on control dependency: [for], data = [pluginTypeName] if (!pluginTypeName.equals("")) { addPlugin(pluginTypeName); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private void updateVersionMessageRelayTxesBeforeFilter(VersionMessage ver) { // We will provide the remote node with a bloom filter (ie they shouldn't relay yet) // if chain == null || !chain.shouldVerifyTransactions() and a wallet is added and bloom filters are enabled // Note that the default here means that no tx invs will be received if no wallet is ever added lock.lock(); try { boolean spvMode = chain != null && !chain.shouldVerifyTransactions(); boolean willSendFilter = spvMode && peerFilterProviders.size() > 0 && vBloomFilteringEnabled; ver.relayTxesBeforeFilter = !willSendFilter; } finally { lock.unlock(); } } }
public class class_name { private void updateVersionMessageRelayTxesBeforeFilter(VersionMessage ver) { // We will provide the remote node with a bloom filter (ie they shouldn't relay yet) // if chain == null || !chain.shouldVerifyTransactions() and a wallet is added and bloom filters are enabled // Note that the default here means that no tx invs will be received if no wallet is ever added lock.lock(); try { boolean spvMode = chain != null && !chain.shouldVerifyTransactions(); boolean willSendFilter = spvMode && peerFilterProviders.size() > 0 && vBloomFilteringEnabled; ver.relayTxesBeforeFilter = !willSendFilter; // depends on control dependency: [try], data = [none] } finally { lock.unlock(); } } }
public class class_name { private void calcRmsd(Point3d[] x, Point3d[] y) { if (centered) { innerProduct(y, x); } else { // translate to origin xref = CalcPoint.clonePoint3dArray(x); xtrans = CalcPoint.centroid(xref); logger.debug("x centroid: " + xtrans); xtrans.negate(); CalcPoint.translate(new Vector3d(xtrans), xref); yref = CalcPoint.clonePoint3dArray(y); ytrans = CalcPoint.centroid(yref); logger.debug("y centroid: " + ytrans); ytrans.negate(); CalcPoint.translate(new Vector3d(ytrans), yref); innerProduct(yref, xref); } calcRmsd(wsum); } }
public class class_name { private void calcRmsd(Point3d[] x, Point3d[] y) { if (centered) { innerProduct(y, x); // depends on control dependency: [if], data = [none] } else { // translate to origin xref = CalcPoint.clonePoint3dArray(x); // depends on control dependency: [if], data = [none] xtrans = CalcPoint.centroid(xref); // depends on control dependency: [if], data = [none] logger.debug("x centroid: " + xtrans); // depends on control dependency: [if], data = [none] xtrans.negate(); // depends on control dependency: [if], data = [none] CalcPoint.translate(new Vector3d(xtrans), xref); // depends on control dependency: [if], data = [none] yref = CalcPoint.clonePoint3dArray(y); // depends on control dependency: [if], data = [none] ytrans = CalcPoint.centroid(yref); // depends on control dependency: [if], data = [none] logger.debug("y centroid: " + ytrans); // depends on control dependency: [if], data = [none] ytrans.negate(); // depends on control dependency: [if], data = [none] CalcPoint.translate(new Vector3d(ytrans), yref); // depends on control dependency: [if], data = [none] innerProduct(yref, xref); // depends on control dependency: [if], data = [none] } calcRmsd(wsum); } }
public class class_name { public void marshall(ServiceChange serviceChange, ProtocolMarshaller protocolMarshaller) { if (serviceChange == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(serviceChange.getDescription(), DESCRIPTION_BINDING); protocolMarshaller.marshall(serviceChange.getDnsConfig(), DNSCONFIG_BINDING); protocolMarshaller.marshall(serviceChange.getHealthCheckConfig(), HEALTHCHECKCONFIG_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ServiceChange serviceChange, ProtocolMarshaller protocolMarshaller) { if (serviceChange == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(serviceChange.getDescription(), DESCRIPTION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(serviceChange.getDnsConfig(), DNSCONFIG_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(serviceChange.getHealthCheckConfig(), HEALTHCHECKCONFIG_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void credentialsMigration(T overrider, Class overriderClass) { try { deployerMigration(overrider, overriderClass); resolverMigration(overrider, overriderClass); } catch (NoSuchFieldException | IllegalAccessException | IOException e) { converterErrors.add(getConversionErrorMessage(overrider, e)); } } }
public class class_name { public void credentialsMigration(T overrider, Class overriderClass) { try { deployerMigration(overrider, overriderClass); // depends on control dependency: [try], data = [none] resolverMigration(overrider, overriderClass); // depends on control dependency: [try], data = [none] } catch (NoSuchFieldException | IllegalAccessException | IOException e) { converterErrors.add(getConversionErrorMessage(overrider, e)); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Version withRequiredCapabilities(String... requiredCapabilities) { if (this.requiredCapabilities == null) { setRequiredCapabilities(new java.util.ArrayList<String>(requiredCapabilities.length)); } for (String ele : requiredCapabilities) { this.requiredCapabilities.add(ele); } return this; } }
public class class_name { public Version withRequiredCapabilities(String... requiredCapabilities) { if (this.requiredCapabilities == null) { setRequiredCapabilities(new java.util.ArrayList<String>(requiredCapabilities.length)); // depends on control dependency: [if], data = [none] } for (String ele : requiredCapabilities) { this.requiredCapabilities.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static String trimToNull(String text) { text = trim(text); if (text == null || text.isEmpty()) { return null; } return text; } }
public class class_name { public static String trimToNull(String text) { text = trim(text); if (text == null || text.isEmpty()) { return null; // depends on control dependency: [if], data = [none] } return text; } }
public class class_name { @Override protected File[] getReportList() { File file = new File(reportDirectory); if (!file.exists()) file = new File(FileReportsProvider.class.getResource(reportDirectory).getFile()); if (!file.exists()) { errors.add("Couldn't open report directory, doesn't exist."); return null; } return file.listFiles(); } }
public class class_name { @Override protected File[] getReportList() { File file = new File(reportDirectory); if (!file.exists()) file = new File(FileReportsProvider.class.getResource(reportDirectory).getFile()); if (!file.exists()) { errors.add("Couldn't open report directory, doesn't exist."); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } return file.listFiles(); } }
public class class_name { public void fireChangeEvent() { String result = ""; if (m_textbox.getText() != null) { if (!m_textbox.getText().equals(m_default)) { result = m_textbox.getText(); } } ValueChangeEvent.fire(this, result); } }
public class class_name { public void fireChangeEvent() { String result = ""; if (m_textbox.getText() != null) { if (!m_textbox.getText().equals(m_default)) { result = m_textbox.getText(); // depends on control dependency: [if], data = [none] } } ValueChangeEvent.fire(this, result); } }
public class class_name { private static AnnotationValue assertAnnotationValue( Declaration element, String annotationName, String valueName, boolean defaultIsNull ) { AnnotationInstance ann = getAnnotation( element, annotationName ); if ( ann == null ) { return null; } else { return getAnnotationValue( ann, valueName, defaultIsNull ); } } }
public class class_name { private static AnnotationValue assertAnnotationValue( Declaration element, String annotationName, String valueName, boolean defaultIsNull ) { AnnotationInstance ann = getAnnotation( element, annotationName ); if ( ann == null ) { return null; // depends on control dependency: [if], data = [none] } else { return getAnnotationValue( ann, valueName, defaultIsNull ); // depends on control dependency: [if], data = [( ann] } } }
public class class_name { public String[] getCodeBaseLocForPerm(Permission perm) { final Permission inPerm = perm; return AccessController.doPrivileged(new java.security.PrivilegedAction<String[]>() { @Override public String[] run() { Class<?>[] classes = getClassContext(); StringBuffer sb = new StringBuffer(classes.length * 100); sb.append(lineSep); // one for offending class and the other for code base // location String[] retMsg = new String[2]; ProtectionDomain pd2 = null; for (int i = 0; i < classes.length; i++) { Class<?> clazz = classes[i]; ProtectionDomain pd = clazz.getProtectionDomain(); // check for occurrence of checkPermission from stack if (classes[i].getName().indexOf("com.ibm.ws.kernel.launch.internal.MissingDoPrivDetectionSecurityManager") != -1) { // found SecurityManager, start to go through // the stack starting next class for (int j = i + 1; j < classes.length; j++) { pd2 = classes[j].getProtectionDomain(); if (isOffendingClass(classes, j, pd2, inPerm)) { retMsg[0] = lineSep + lineSep + " " + classes[j].getName() + " in " + "{" + getCodeSource(pd2) + "}" + lineSep + lineSep; StringBuffer sb2 = new StringBuffer(classes.length * 100); sb2.append(lineSep); sb2.append(classes[j].getName()).append(" : ").append(getCodeSource(pd2) + lineSep); sb2.append(" ").append(permissionToString(pd2.getCodeSource(), classes[j].getClassLoader(), pd2.getPermissions())) .append(lineSep); break; } } } java.security.CodeSource cs = pd.getCodeSource(); String csStr = getCodeSource(pd); // class name : location sb.append(classes[i].getName()).append(" : ").append(csStr + lineSep); sb.append(" ").append(permissionToString(cs, clazz.getClassLoader(), pd.getPermissions())) .append(lineSep); } Tr.info(tc, "java.security.permdenied.class.info", retMsg[0]); Tr.info(tc, "java.security.permdenied.codebaseloc.info", sb.toString()); retMsg[1] = getCodeSource(pd2).concat(lineSep); return retMsg; } }); } }
public class class_name { public String[] getCodeBaseLocForPerm(Permission perm) { final Permission inPerm = perm; return AccessController.doPrivileged(new java.security.PrivilegedAction<String[]>() { @Override public String[] run() { Class<?>[] classes = getClassContext(); StringBuffer sb = new StringBuffer(classes.length * 100); sb.append(lineSep); // one for offending class and the other for code base // location String[] retMsg = new String[2]; ProtectionDomain pd2 = null; for (int i = 0; i < classes.length; i++) { Class<?> clazz = classes[i]; ProtectionDomain pd = clazz.getProtectionDomain(); // check for occurrence of checkPermission from stack if (classes[i].getName().indexOf("com.ibm.ws.kernel.launch.internal.MissingDoPrivDetectionSecurityManager") != -1) { // found SecurityManager, start to go through // the stack starting next class for (int j = i + 1; j < classes.length; j++) { pd2 = classes[j].getProtectionDomain(); // depends on control dependency: [for], data = [j] if (isOffendingClass(classes, j, pd2, inPerm)) { retMsg[0] = lineSep + lineSep + " " + classes[j].getName() + " in " + "{" + getCodeSource(pd2) + "}" + lineSep + lineSep; // depends on control dependency: [if], data = [none] StringBuffer sb2 = new StringBuffer(classes.length * 100); sb2.append(lineSep); // depends on control dependency: [if], data = [none] sb2.append(classes[j].getName()).append(" : ").append(getCodeSource(pd2) + lineSep); // depends on control dependency: [if], data = [none] sb2.append(" ").append(permissionToString(pd2.getCodeSource(), classes[j].getClassLoader(), pd2.getPermissions())) .append(lineSep); // depends on control dependency: [if], data = [none] break; } } } java.security.CodeSource cs = pd.getCodeSource(); String csStr = getCodeSource(pd); // class name : location sb.append(classes[i].getName()).append(" : ").append(csStr + lineSep); sb.append(" ").append(permissionToString(cs, clazz.getClassLoader(), pd.getPermissions())) .append(lineSep); } Tr.info(tc, "java.security.permdenied.class.info", retMsg[0]); Tr.info(tc, "java.security.permdenied.codebaseloc.info", sb.toString()); retMsg[1] = getCodeSource(pd2).concat(lineSep); return retMsg; } }); } }
public class class_name { public Map<String, V> prefix(@NonNull String prefix) { final TrieNode<V> match = root.find(prefix); if (match == null) { return Collections.emptyMap(); } return new AbstractMap<String, V>() { @Override public Set<Entry<String, V>> entrySet() { return new AbstractSet<Entry<String, V>>() { @Override public Iterator<Entry<String, V>> iterator() { return Iterators.unmodifiableIterator(new EntryIterator<>(match)); } @Override public int size() { return match.size; } }; } }; } }
public class class_name { public Map<String, V> prefix(@NonNull String prefix) { final TrieNode<V> match = root.find(prefix); if (match == null) { return Collections.emptyMap(); // depends on control dependency: [if], data = [none] } return new AbstractMap<String, V>() { @Override public Set<Entry<String, V>> entrySet() { return new AbstractSet<Entry<String, V>>() { @Override public Iterator<Entry<String, V>> iterator() { return Iterators.unmodifiableIterator(new EntryIterator<>(match)); } @Override public int size() { return match.size; } }; } }; } }
public class class_name { public CmsUUID getFormatterId() { String value = getStringValue(null); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(value)) { String[] parts = value.split(SEPARATOR); if (parts.length == 2) { return new CmsUUID(parts[1]); } } return null; } }
public class class_name { public CmsUUID getFormatterId() { String value = getStringValue(null); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(value)) { String[] parts = value.split(SEPARATOR); if (parts.length == 2) { return new CmsUUID(parts[1]); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public void setResourceShareAssociations(java.util.Collection<ResourceShareAssociation> resourceShareAssociations) { if (resourceShareAssociations == null) { this.resourceShareAssociations = null; return; } this.resourceShareAssociations = new java.util.ArrayList<ResourceShareAssociation>(resourceShareAssociations); } }
public class class_name { public void setResourceShareAssociations(java.util.Collection<ResourceShareAssociation> resourceShareAssociations) { if (resourceShareAssociations == null) { this.resourceShareAssociations = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.resourceShareAssociations = new java.util.ArrayList<ResourceShareAssociation>(resourceShareAssociations); } }
public class class_name { protected void postMessage (PresentsConnection conn, Message msg) { if (!isRunning()) { log.warning("Posting message to inactive connection manager", "msg", msg, new Exception()); } // sanity check if (conn == null || msg == null) { log.warning("postMessage() bogosity", "conn", conn, "msg", msg, new Exception()); return; } // more sanity check; messages must only be posted from the dobjmgr thread if (!_omgr.isDispatchThread()) { log.warning("Message posted on non-distributed object thread", "conn", conn, "msg", msg, "thread", Thread.currentThread(), new Exception()); // let it through though as we don't want to break things unnecessarily } try { // send it as a datagram if hinted and possible (pongs must be sent as part of the // negotation process) if (!msg.getTransport().isReliable() && (conn.getTransmitDatagrams() || msg instanceof PongResponse) && postDatagram(conn, msg)) { return; } // note the actual transport msg.noteActualTransport(Transport.RELIABLE_ORDERED); _framer.resetFrame(); // flatten this message using the connection's output stream ObjectOutputStream oout = conn.getObjectOutputStream(_framer); oout.writeObject(msg); oout.flush(); // now extract that data into a byte array ByteBuffer buffer = _framer.frameAndReturnBuffer(); byte[] data = new byte[buffer.limit()]; buffer.get(data); // log.info("Flattened " + msg + " into " + data.length + " bytes."); // and slap both on the queue _outq.append(Tuple.<Connection, byte[]>newTuple(conn, data)); } catch (Exception e) { log.warning("Failure flattening message", "conn", conn, "msg", msg, e); } } }
public class class_name { protected void postMessage (PresentsConnection conn, Message msg) { if (!isRunning()) { log.warning("Posting message to inactive connection manager", "msg", msg, new Exception()); // depends on control dependency: [if], data = [none] } // sanity check if (conn == null || msg == null) { log.warning("postMessage() bogosity", "conn", conn, "msg", msg, new Exception()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // more sanity check; messages must only be posted from the dobjmgr thread if (!_omgr.isDispatchThread()) { log.warning("Message posted on non-distributed object thread", "conn", conn, "msg", msg, "thread", Thread.currentThread(), new Exception()); // depends on control dependency: [if], data = [none] // let it through though as we don't want to break things unnecessarily } try { // send it as a datagram if hinted and possible (pongs must be sent as part of the // negotation process) if (!msg.getTransport().isReliable() && (conn.getTransmitDatagrams() || msg instanceof PongResponse) && postDatagram(conn, msg)) { return; // depends on control dependency: [if], data = [none] } // note the actual transport msg.noteActualTransport(Transport.RELIABLE_ORDERED); // depends on control dependency: [try], data = [none] _framer.resetFrame(); // depends on control dependency: [try], data = [none] // flatten this message using the connection's output stream ObjectOutputStream oout = conn.getObjectOutputStream(_framer); oout.writeObject(msg); // depends on control dependency: [try], data = [none] oout.flush(); // depends on control dependency: [try], data = [none] // now extract that data into a byte array ByteBuffer buffer = _framer.frameAndReturnBuffer(); byte[] data = new byte[buffer.limit()]; buffer.get(data); // depends on control dependency: [try], data = [none] // log.info("Flattened " + msg + " into " + data.length + " bytes."); // and slap both on the queue _outq.append(Tuple.<Connection, byte[]>newTuple(conn, data)); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.warning("Failure flattening message", "conn", conn, "msg", msg, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public ConfigValueImpl<List<T>> asList() { isList = true; ConfigValueImpl<List<T>> listTypedResolver = (ConfigValueImpl<List<T>>) this; if (defaultValue == null) { // the default for lists is an empty list instead of null return listTypedResolver.withDefault(Collections.<T>emptyList()); } return listTypedResolver; } }
public class class_name { public ConfigValueImpl<List<T>> asList() { isList = true; ConfigValueImpl<List<T>> listTypedResolver = (ConfigValueImpl<List<T>>) this; if (defaultValue == null) { // the default for lists is an empty list instead of null return listTypedResolver.withDefault(Collections.<T>emptyList()); // depends on control dependency: [if], data = [none] } return listTypedResolver; } }
public class class_name { protected void updateObject(final Object object) { if (logger.isDebugEnabled()) { logger.debug("merging " + object + "..."); } getCurrentSession().beginTransaction(); Object merged = getHibernateTemplate().merge(object); if (logger.isDebugEnabled()) { logger.debug("done, updating " + merged + "..."); } getHibernateTemplate().update(merged); getCurrentSession().getTransaction().commit(); if (logger.isDebugEnabled()) { logger.debug("done."); } } }
public class class_name { protected void updateObject(final Object object) { if (logger.isDebugEnabled()) { logger.debug("merging " + object + "..."); // depends on control dependency: [if], data = [none] } getCurrentSession().beginTransaction(); Object merged = getHibernateTemplate().merge(object); if (logger.isDebugEnabled()) { logger.debug("done, updating " + merged + "..."); // depends on control dependency: [if], data = [none] } getHibernateTemplate().update(merged); getCurrentSession().getTransaction().commit(); if (logger.isDebugEnabled()) { logger.debug("done."); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void marshall(DescribeCachediSCSIVolumesRequest describeCachediSCSIVolumesRequest, ProtocolMarshaller protocolMarshaller) { if (describeCachediSCSIVolumesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeCachediSCSIVolumesRequest.getVolumeARNs(), VOLUMEARNS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DescribeCachediSCSIVolumesRequest describeCachediSCSIVolumesRequest, ProtocolMarshaller protocolMarshaller) { if (describeCachediSCSIVolumesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeCachediSCSIVolumesRequest.getVolumeARNs(), VOLUMEARNS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public DataPoint[] getLastN(int n, int steps, DataPoint.Type type) { if (steps < 1) { steps = 1; } if (n < 1) { n = 1; } Calendar now = Calendar.getInstance(); long currentTimestamp = now.getTimeInMillis(); now.add(Calendar.SECOND, -(n - 1) * steps); now.set(Calendar.MILLISECOND, 0); if (steps >= 60) { now.set(Calendar.SECOND, 0); } if (steps >= 60 * 60) { now.set(Calendar.MINUTE, 0); } if (steps >= 24 * 60 * 60) { now.set(Calendar.HOUR_OF_DAY, 0); } long timestampStart = now.getTimeInMillis(); return getSeries(timestampStart, currentTimestamp + 1, steps, type); /* as of 0.5.0: timestampStart is no longer rounded */ // int blockSize = RESOLUTION_MS * steps; // long delta = currentTimestamp % blockSize; // long timestampStart = currentTimestamp - delta - (n - 1) * blockSize; // return getSeries(timestampStart, currentTimestamp + 1, steps, type); } }
public class class_name { @Override public DataPoint[] getLastN(int n, int steps, DataPoint.Type type) { if (steps < 1) { steps = 1; // depends on control dependency: [if], data = [none] } if (n < 1) { n = 1; // depends on control dependency: [if], data = [none] } Calendar now = Calendar.getInstance(); long currentTimestamp = now.getTimeInMillis(); now.add(Calendar.SECOND, -(n - 1) * steps); now.set(Calendar.MILLISECOND, 0); if (steps >= 60) { now.set(Calendar.SECOND, 0); // depends on control dependency: [if], data = [none] } if (steps >= 60 * 60) { now.set(Calendar.MINUTE, 0); // depends on control dependency: [if], data = [none] } if (steps >= 24 * 60 * 60) { now.set(Calendar.HOUR_OF_DAY, 0); // depends on control dependency: [if], data = [none] } long timestampStart = now.getTimeInMillis(); return getSeries(timestampStart, currentTimestamp + 1, steps, type); /* as of 0.5.0: timestampStart is no longer rounded */ // int blockSize = RESOLUTION_MS * steps; // long delta = currentTimestamp % blockSize; // long timestampStart = currentTimestamp - delta - (n - 1) * blockSize; // return getSeries(timestampStart, currentTimestamp + 1, steps, type); } }
public class class_name { @SuppressWarnings("resource") private boolean importModules() { boolean result = true; try { m_logStream.println("Checking module dependencies."); Multimap<String, String> dependencies = HashMultimap.create(); Set<String> unsortedModules = Sets.newHashSet(m_modulesToExport); for (String module : m_modulesToExport) { String manifestPath = CmsStringUtil.joinPaths( m_currentConfiguration.getModulesPath(), module, m_currentConfiguration.getResourcesSubFolder(), "manifest.xml"); Document doc = CmsXmlUtils.unmarshalHelper( CmsFileUtil.readFully(new FileInputStream(manifestPath)), new CmsXmlEntityResolver(null)); List<?> depNodes = doc.getRootElement().selectNodes("//dependencies/dependency/@name"); for (Object nodeObj : depNodes) { Node node = ((Node)nodeObj); String dependency = node.getText(); if (m_modulesToExport.contains(dependency)) { // we can only handle dependencies between selected modules // and just have to assume that other dependencies are fulfilled dependencies.put(module, dependency); } } } List<String> sortedModules = Lists.newArrayList(); // if there are no cycles, this loop will find one element on each iteration for (int i = 0; i < m_modulesToExport.size(); i++) { String nextModule = null; for (String key : unsortedModules) { if (dependencies.get(key).isEmpty()) { nextModule = key; break; } } if (nextModule != null) { sortedModules.add(nextModule); unsortedModules.remove(nextModule); for (String key : Sets.newHashSet(dependencies.keySet())) { // copy key set to avoid concurrent modification exception dependencies.get(key).remove(nextModule); } } } m_logStream.println("Modules sorted by dependencies: " + sortedModules); for (String moduleName : sortedModules) { String dir = CmsStringUtil.joinPaths( m_currentConfiguration.getModulesPath(), moduleName, m_currentConfiguration.getResourcesSubFolder()); File dirEntry = new File(dir); if (!dirEntry.exists()) { continue; } try { m_logStream.println("Creating temp file for module " + moduleName); File outputFile = File.createTempFile(moduleName + "-", ".zip"); FileOutputStream fos = new FileOutputStream(outputFile); m_logStream.println("Zipping module structure to " + outputFile.getAbsolutePath()); zipRfsFolder(dirEntry, fos); result &= importModule(outputFile); outputFile.delete(); } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); e.printStackTrace(m_logStream); result = false; } } } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); m_logStream.println("Unable to check dependencies for modules, giving up."); e.printStackTrace(m_logStream); result = false; } return result; } }
public class class_name { @SuppressWarnings("resource") private boolean importModules() { boolean result = true; try { m_logStream.println("Checking module dependencies."); Multimap<String, String> dependencies = HashMultimap.create(); Set<String> unsortedModules = Sets.newHashSet(m_modulesToExport); for (String module : m_modulesToExport) { String manifestPath = CmsStringUtil.joinPaths( m_currentConfiguration.getModulesPath(), module, m_currentConfiguration.getResourcesSubFolder(), "manifest.xml"); Document doc = CmsXmlUtils.unmarshalHelper( CmsFileUtil.readFully(new FileInputStream(manifestPath)), new CmsXmlEntityResolver(null)); List<?> depNodes = doc.getRootElement().selectNodes("//dependencies/dependency/@name"); for (Object nodeObj : depNodes) { Node node = ((Node)nodeObj); String dependency = node.getText(); if (m_modulesToExport.contains(dependency)) { // we can only handle dependencies between selected modules // and just have to assume that other dependencies are fulfilled dependencies.put(module, dependency); // depends on control dependency: [if], data = [none] } } } List<String> sortedModules = Lists.newArrayList(); // if there are no cycles, this loop will find one element on each iteration for (int i = 0; i < m_modulesToExport.size(); i++) { String nextModule = null; for (String key : unsortedModules) { if (dependencies.get(key).isEmpty()) { nextModule = key; // depends on control dependency: [if], data = [none] break; } } if (nextModule != null) { sortedModules.add(nextModule); // depends on control dependency: [if], data = [(nextModule] unsortedModules.remove(nextModule); // depends on control dependency: [if], data = [(nextModule] for (String key : Sets.newHashSet(dependencies.keySet())) { // copy key set to avoid concurrent modification exception dependencies.get(key).remove(nextModule); // depends on control dependency: [for], data = [key] } } } m_logStream.println("Modules sorted by dependencies: " + sortedModules); for (String moduleName : sortedModules) { String dir = CmsStringUtil.joinPaths( m_currentConfiguration.getModulesPath(), moduleName, m_currentConfiguration.getResourcesSubFolder()); File dirEntry = new File(dir); if (!dirEntry.exists()) { continue; } try { m_logStream.println("Creating temp file for module " + moduleName); // depends on control dependency: [try], data = [none] File outputFile = File.createTempFile(moduleName + "-", ".zip"); FileOutputStream fos = new FileOutputStream(outputFile); m_logStream.println("Zipping module structure to " + outputFile.getAbsolutePath()); // depends on control dependency: [try], data = [none] zipRfsFolder(dirEntry, fos); // depends on control dependency: [try], data = [none] result &= importModule(outputFile); // depends on control dependency: [try], data = [none] outputFile.delete(); // depends on control dependency: [try], data = [none] } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); e.printStackTrace(m_logStream); result = false; } // depends on control dependency: [catch], data = [none] } } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); m_logStream.println("Unable to check dependencies for modules, giving up."); e.printStackTrace(m_logStream); result = false; } return result; } }
public class class_name { public void removeRequestUrlAndParameters(HttpServletRequest request, HttpServletResponse response) { ReferrerURLCookieHandler referrerURLCookieHandler = getCookieHandler(); referrerURLCookieHandler.invalidateReferrerURLCookie(request, response, ReferrerURLCookieHandler.REFERRER_URL_COOKIENAME); WebAppSecurityConfig webAppSecConfig = getWebAppSecurityConfig(); if (isPostDataSavedInCookie(webAppSecConfig)) { deleteCookie(request, response, PostParameterHelper.POSTPARAM_COOKIE, webAppSecConfig); } else { removePostParameterSessionAttributes(request); } } }
public class class_name { public void removeRequestUrlAndParameters(HttpServletRequest request, HttpServletResponse response) { ReferrerURLCookieHandler referrerURLCookieHandler = getCookieHandler(); referrerURLCookieHandler.invalidateReferrerURLCookie(request, response, ReferrerURLCookieHandler.REFERRER_URL_COOKIENAME); WebAppSecurityConfig webAppSecConfig = getWebAppSecurityConfig(); if (isPostDataSavedInCookie(webAppSecConfig)) { deleteCookie(request, response, PostParameterHelper.POSTPARAM_COOKIE, webAppSecConfig); // depends on control dependency: [if], data = [none] } else { removePostParameterSessionAttributes(request); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void reconstituteMQLink(int startMode) throws MessageStoreException, SIResourceException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "reconstituteMQLink", new Object[] { Integer.valueOf(startMode), this }); int localisationCount = 0; MQLinkMessageItemStream mqlinkMessageItemStream = null; // There can be one or more localisations in the BaseDestinationHandler. NonLockingCursor cursor = _baseDestinationHandler.newNonLockingItemStreamCursor( new ClassEqualsFilter(MQLinkMessageItemStream.class)); do { mqlinkMessageItemStream = (MQLinkMessageItemStream) cursor.next(); if (mqlinkMessageItemStream != null) { localisationCount++; mqlinkMessageItemStream.reconstitute(_baseDestinationHandler); attachLocalPtoPLocalisation(mqlinkMessageItemStream); /* Feature 176658.3.2 */ //TODO: Check need all this for MQLinks assignQueuePointOutputHandler( mqlinkMessageItemStream.getOutputHandler(), mqlinkMessageItemStream.getLocalizingMEUuid()); ConsumerDispatcher consumerDispatcher = (ConsumerDispatcher) mqlinkMessageItemStream.getOutputHandler(); consumerDispatcher.setReadyForUse(); //If the local queue point is awaiting deletion ensure it is correctly configured if (mqlinkMessageItemStream.isToBeDeleted()) { dereferenceLocalisation(mqlinkMessageItemStream); } } } while (mqlinkMessageItemStream != null); cursor.finished(); // Sanity - There should never be more than one mediation itemstream if (localisationCount > 1) { SIErrorException e = new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0005", new Object[] { "com.ibm.ws.sib.processor.impl.destination.JSPtoPRealization", "1:458:1.24.1.7", _baseDestinationHandler.getName() }, null)); FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.destination.JSPtoPRealization.reconstituteMQLink", "1:454:1.24.1.25", this); SibTr.exception(tc, e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0005", new Object[] { "com.ibm.ws.sib.processor.impl.destination.JSPtoPRealization", "1:461:1.24.1.25", _baseDestinationHandler.getName() }); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "reconstituteMQLink", e); throw e; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "reconstituteMQLink"); } }
public class class_name { private void reconstituteMQLink(int startMode) throws MessageStoreException, SIResourceException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "reconstituteMQLink", new Object[] { Integer.valueOf(startMode), this }); int localisationCount = 0; MQLinkMessageItemStream mqlinkMessageItemStream = null; // There can be one or more localisations in the BaseDestinationHandler. NonLockingCursor cursor = _baseDestinationHandler.newNonLockingItemStreamCursor( new ClassEqualsFilter(MQLinkMessageItemStream.class)); do { mqlinkMessageItemStream = (MQLinkMessageItemStream) cursor.next(); if (mqlinkMessageItemStream != null) { localisationCount++; // depends on control dependency: [if], data = [none] mqlinkMessageItemStream.reconstitute(_baseDestinationHandler); // depends on control dependency: [if], data = [none] attachLocalPtoPLocalisation(mqlinkMessageItemStream); // depends on control dependency: [if], data = [(mqlinkMessageItemStream] /* Feature 176658.3.2 */ //TODO: Check need all this for MQLinks assignQueuePointOutputHandler( mqlinkMessageItemStream.getOutputHandler(), mqlinkMessageItemStream.getLocalizingMEUuid()); // depends on control dependency: [if], data = [none] ConsumerDispatcher consumerDispatcher = (ConsumerDispatcher) mqlinkMessageItemStream.getOutputHandler(); consumerDispatcher.setReadyForUse(); // depends on control dependency: [if], data = [none] //If the local queue point is awaiting deletion ensure it is correctly configured if (mqlinkMessageItemStream.isToBeDeleted()) { dereferenceLocalisation(mqlinkMessageItemStream); // depends on control dependency: [if], data = [none] } } } while (mqlinkMessageItemStream != null); cursor.finished(); // Sanity - There should never be more than one mediation itemstream if (localisationCount > 1) { SIErrorException e = new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0005", new Object[] { "com.ibm.ws.sib.processor.impl.destination.JSPtoPRealization", "1:458:1.24.1.7", _baseDestinationHandler.getName() }, null)); FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.destination.JSPtoPRealization.reconstituteMQLink", "1:454:1.24.1.25", this); // depends on control dependency: [if], data = [none] SibTr.exception(tc, e); // depends on control dependency: [if], data = [none] SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0005", new Object[] { "com.ibm.ws.sib.processor.impl.destination.JSPtoPRealization", "1:461:1.24.1.25", _baseDestinationHandler.getName() }); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "reconstituteMQLink", e); throw e; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "reconstituteMQLink"); } }
public class class_name { public boolean containsTagHandler(String ns, String localName) { for (int i = 0; i < this.libraries.length; i++) { if (this.libraries[i].containsTagHandler(ns, localName)) { return true; } } return false; } }
public class class_name { public boolean containsTagHandler(String ns, String localName) { for (int i = 0; i < this.libraries.length; i++) { if (this.libraries[i].containsTagHandler(ns, localName)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { static AbstractConfigValue makeReplacement(ResolveContext context, List<AbstractConfigValue> stack, int skipping) { List<AbstractConfigValue> subStack = stack.subList(skipping, stack.size()); if (subStack.isEmpty()) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(context.depth(), "Nothing else in the merge stack, replacing with null"); return null; } else { // generate a new merge stack from only the remaining items AbstractConfigValue merged = null; for (AbstractConfigValue v : subStack) { if (merged == null) merged = v; else merged = merged.withFallback(v); } return merged; } } }
public class class_name { static AbstractConfigValue makeReplacement(ResolveContext context, List<AbstractConfigValue> stack, int skipping) { List<AbstractConfigValue> subStack = stack.subList(skipping, stack.size()); if (subStack.isEmpty()) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(context.depth(), "Nothing else in the merge stack, replacing with null"); return null; // depends on control dependency: [if], data = [none] } else { // generate a new merge stack from only the remaining items AbstractConfigValue merged = null; for (AbstractConfigValue v : subStack) { if (merged == null) merged = v; else merged = merged.withFallback(v); } return merged; // depends on control dependency: [if], data = [none] } } }
public class class_name { private Status sendResponseHeaders(Status status) { // NOTE: Line 572-574 of CBL_Router.m is not in CBL Java Core // This check is in sendResponse(); connection.getResHeader().add("Server", String.format(Locale.ENGLISH, "Couchbase Lite %s", getVersionString())); // When response body is not null, we can assume that the body is JSON: boolean hasJSONBody = connection.getResponseBody() != null; String contentType = hasJSONBody ? CONTENT_TYPE_JSON : null; // Check for a mismatch between the Accept request header and the response type: String accept = getRequestHeaderValue("Accept"); if (accept != null && accept.indexOf("*/*") < 0) { String baseContentType = connection.getBaseContentType(); if (baseContentType == null) baseContentType = contentType; if (baseContentType != null && accept.indexOf(baseContentType) < 0) { Log.w(TAG, "Error 406: Can't satisfy request Accept: %s (Content-Type = %s)", accept, contentType); // Reset response body: connection.setResponseBody(null); status = new Status(Status.NOT_ACCEPTABLE); return status; } } if (contentType != null) { Header resHeader = connection.getResHeader(); if (resHeader != null && resHeader.get("Content-Type") == null) resHeader.add("Content-Type", contentType); else Log.d(TAG, "Cannot add Content-Type header because getResHeader() returned null"); } // NOTE: Line 596-607 of CBL_Router.m is not in CBL Java Core return status; } }
public class class_name { private Status sendResponseHeaders(Status status) { // NOTE: Line 572-574 of CBL_Router.m is not in CBL Java Core // This check is in sendResponse(); connection.getResHeader().add("Server", String.format(Locale.ENGLISH, "Couchbase Lite %s", getVersionString())); // When response body is not null, we can assume that the body is JSON: boolean hasJSONBody = connection.getResponseBody() != null; String contentType = hasJSONBody ? CONTENT_TYPE_JSON : null; // Check for a mismatch between the Accept request header and the response type: String accept = getRequestHeaderValue("Accept"); if (accept != null && accept.indexOf("*/*") < 0) { String baseContentType = connection.getBaseContentType(); if (baseContentType == null) baseContentType = contentType; if (baseContentType != null && accept.indexOf(baseContentType) < 0) { Log.w(TAG, "Error 406: Can't satisfy request Accept: %s (Content-Type = %s)", accept, contentType); // depends on control dependency: [if], data = [none] // Reset response body: connection.setResponseBody(null); // depends on control dependency: [if], data = [none] status = new Status(Status.NOT_ACCEPTABLE); // depends on control dependency: [if], data = [none] return status; // depends on control dependency: [if], data = [none] } } if (contentType != null) { Header resHeader = connection.getResHeader(); if (resHeader != null && resHeader.get("Content-Type") == null) resHeader.add("Content-Type", contentType); else Log.d(TAG, "Cannot add Content-Type header because getResHeader() returned null"); } // NOTE: Line 596-607 of CBL_Router.m is not in CBL Java Core return status; } }
public class class_name { public String getResource(String resource, Map<String, String> replacements) { String result = cache.get(resource); if (result == null) { try { InputStream is = MiniProfilerResourceLoader.class.getResourceAsStream(resource); try { result = new Scanner(is).useDelimiter("\\A").next(); } finally { is.close(); } if (replacements != null) { for (Map.Entry<String, String> e : replacements.entrySet()) { result = result.replace(e.getKey(), e.getValue()); } } cache.putIfAbsent(resource, result); } catch (Exception e) { result = null; } } return result; } }
public class class_name { public String getResource(String resource, Map<String, String> replacements) { String result = cache.get(resource); if (result == null) { try { InputStream is = MiniProfilerResourceLoader.class.getResourceAsStream(resource); try { result = new Scanner(is).useDelimiter("\\A").next(); // depends on control dependency: [try], data = [none] } finally { is.close(); } if (replacements != null) { for (Map.Entry<String, String> e : replacements.entrySet()) { result = result.replace(e.getKey(), e.getValue()); // depends on control dependency: [for], data = [e] } } cache.putIfAbsent(resource, result); // depends on control dependency: [try], data = [none] } catch (Exception e) { result = null; } // depends on control dependency: [catch], data = [none] } return result; } }
public class class_name { private void leafFieldtoDoc(UNode parentNode, String fieldName) { assert parentNode != null; Set<String> addSet = null; if (m_valueMap.containsKey(fieldName)) { addSet = new TreeSet<String>(m_valueMap.get(fieldName)); } List<String> removeSet = m_valueRemoveMap.get(fieldName); if (addSet != null && addSet.size() == 1 && removeSet == null) { parentNode.addValueNode(fieldName, addSet.iterator().next(), "field"); } else { UNode fieldNode = parentNode.addMapNode(fieldName, "field"); if (addSet != null && addSet.size() > 0) { UNode addNode = fieldNode.addArrayNode("add"); for (String value : addSet) { addNode.addValueNode("value", value); } } if (removeSet != null && removeSet.size() > 0) { UNode addNode = fieldNode.addArrayNode("remove"); for (String value : removeSet) { addNode.addValueNode("value", value); } } } } }
public class class_name { private void leafFieldtoDoc(UNode parentNode, String fieldName) { assert parentNode != null; Set<String> addSet = null; if (m_valueMap.containsKey(fieldName)) { addSet = new TreeSet<String>(m_valueMap.get(fieldName)); // depends on control dependency: [if], data = [none] } List<String> removeSet = m_valueRemoveMap.get(fieldName); if (addSet != null && addSet.size() == 1 && removeSet == null) { parentNode.addValueNode(fieldName, addSet.iterator().next(), "field"); // depends on control dependency: [if], data = [none] } else { UNode fieldNode = parentNode.addMapNode(fieldName, "field"); if (addSet != null && addSet.size() > 0) { UNode addNode = fieldNode.addArrayNode("add"); for (String value : addSet) { addNode.addValueNode("value", value); // depends on control dependency: [for], data = [value] } } if (removeSet != null && removeSet.size() > 0) { UNode addNode = fieldNode.addArrayNode("remove"); for (String value : removeSet) { addNode.addValueNode("value", value); // depends on control dependency: [for], data = [value] } } } } }
public class class_name { public Response put(URI uri, Object object, boolean newEntity, int writeQuorum) { assertNotEmpty(object, "object"); final JsonObject json = getGson().toJsonTree(object).getAsJsonObject(); String id = getAsString(json, "_id"); String rev = getAsString(json, "_rev"); if (newEntity) { // save assertNull(rev, "rev"); id = (id == null) ? generateUUID() : id; } else { // update assertNotEmpty(id, "id"); assertNotEmpty(rev, "rev"); } URI httpUri = null; if (writeQuorum > -1) { httpUri = new DatabaseURIHelper(uri).documentUri(id, "w", writeQuorum); } else { httpUri = new DatabaseURIHelper(uri).documentUri(id); } HttpConnection connection = Http.PUT(httpUri, "application/json"); connection.setRequestBody(json.toString()); return executeToResponse(connection); } }
public class class_name { public Response put(URI uri, Object object, boolean newEntity, int writeQuorum) { assertNotEmpty(object, "object"); final JsonObject json = getGson().toJsonTree(object).getAsJsonObject(); String id = getAsString(json, "_id"); String rev = getAsString(json, "_rev"); if (newEntity) { // save assertNull(rev, "rev"); // depends on control dependency: [if], data = [none] id = (id == null) ? generateUUID() : id; // depends on control dependency: [if], data = [none] } else { // update assertNotEmpty(id, "id"); // depends on control dependency: [if], data = [none] assertNotEmpty(rev, "rev"); // depends on control dependency: [if], data = [none] } URI httpUri = null; if (writeQuorum > -1) { httpUri = new DatabaseURIHelper(uri).documentUri(id, "w", writeQuorum); // depends on control dependency: [if], data = [none] } else { httpUri = new DatabaseURIHelper(uri).documentUri(id); // depends on control dependency: [if], data = [none] } HttpConnection connection = Http.PUT(httpUri, "application/json"); connection.setRequestBody(json.toString()); return executeToResponse(connection); } }
public class class_name { public boolean equalsTo(Object o) { if (o == null || !(o instanceof Document)) { return false; } Document docCompared = (Document)o; if(getName() == null || !getName().equals(docCompared.getName())) return false; if(getRepository() == null || !getRepository().equals(docCompared.getRepository())) return false; return true; } }
public class class_name { public boolean equalsTo(Object o) { if (o == null || !(o instanceof Document)) { return false; // depends on control dependency: [if], data = [none] } Document docCompared = (Document)o; if(getName() == null || !getName().equals(docCompared.getName())) return false; if(getRepository() == null || !getRepository().equals(docCompared.getRepository())) return false; return true; } }
public class class_name { public EClass getIfcInventory() { if (ifcInventoryEClass == null) { ifcInventoryEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(281); } return ifcInventoryEClass; } }
public class class_name { public EClass getIfcInventory() { if (ifcInventoryEClass == null) { ifcInventoryEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(281); // depends on control dependency: [if], data = [none] } return ifcInventoryEClass; } }
public class class_name { public void getResult (ResultListener<T> rl) { if (_error != null) { rl.requestFailed(_error); } else if (_list == null) { // _list == null when we have a result rl.requestCompleted(_result); } else { _list.add(rl); } } }
public class class_name { public void getResult (ResultListener<T> rl) { if (_error != null) { rl.requestFailed(_error); // depends on control dependency: [if], data = [(_error] } else if (_list == null) { // _list == null when we have a result rl.requestCompleted(_result); // depends on control dependency: [if], data = [none] } else { _list.add(rl); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Internal @SuppressWarnings("WeakerAccess") @UsedByGeneratedCode protected final Object getBeanForMethodArgument(BeanResolutionContext resolutionContext, BeanContext context, int methodIndex, int argIndex) { MethodInjectionPoint injectionPoint = methodInjectionPoints.get(methodIndex); Argument argument = injectionPoint.getArguments()[argIndex]; if (argument instanceof DefaultArgument) { argument = new EnvironmentAwareArgument((DefaultArgument) argument); instrumentAnnotationMetadata(context, argument); } return getBeanForMethodArgument(resolutionContext, context, injectionPoint, argument); } }
public class class_name { @Internal @SuppressWarnings("WeakerAccess") @UsedByGeneratedCode protected final Object getBeanForMethodArgument(BeanResolutionContext resolutionContext, BeanContext context, int methodIndex, int argIndex) { MethodInjectionPoint injectionPoint = methodInjectionPoints.get(methodIndex); Argument argument = injectionPoint.getArguments()[argIndex]; if (argument instanceof DefaultArgument) { argument = new EnvironmentAwareArgument((DefaultArgument) argument); // depends on control dependency: [if], data = [none] instrumentAnnotationMetadata(context, argument); // depends on control dependency: [if], data = [none] } return getBeanForMethodArgument(resolutionContext, context, injectionPoint, argument); } }
public class class_name { public byte[] compHandshakeData(CommsConnection conn, int version, byte[] data) { try { return SchemaManager.receiveHandshake(conn, data); } catch (Exception ex) { FFDCFilter.processException(ex, "compData", "96", this); return null; } } }
public class class_name { public byte[] compHandshakeData(CommsConnection conn, int version, byte[] data) { try { return SchemaManager.receiveHandshake(conn, data); // depends on control dependency: [try], data = [none] } catch (Exception ex) { FFDCFilter.processException(ex, "compData", "96", this); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static ClassLoader getRootLoader(ClassLoader self) { while (true) { if (self == null) return null; if (isRootLoaderClassOrSubClass(self)) return self; self = self.getParent(); } } }
public class class_name { public static ClassLoader getRootLoader(ClassLoader self) { while (true) { if (self == null) return null; if (isRootLoaderClassOrSubClass(self)) return self; self = self.getParent(); // depends on control dependency: [while], data = [none] } } }
public class class_name { private Content processParamTags(boolean isNonTypeParams, ParamTag[] paramTags, Map<String, String> rankMap, TagletWriter writer, Set<String> alreadyDocumented) { Content result = writer.getOutputInstance(); if (paramTags.length > 0) { for (int i = 0; i < paramTags.length; ++i) { ParamTag pt = paramTags[i]; String paramName = isNonTypeParams ? pt.parameterName() : "<" + pt.parameterName() + ">"; if (! rankMap.containsKey(pt.parameterName())) { writer.getMsgRetriever().warning(pt.position(), isNonTypeParams ? "doclet.Parameters_warn" : "doclet.Type_Parameters_warn", paramName); } String rank = rankMap.get(pt.parameterName()); if (rank != null && alreadyDocumented.contains(rank)) { writer.getMsgRetriever().warning(pt.position(), isNonTypeParams ? "doclet.Parameters_dup_warn" : "doclet.Type_Parameters_dup_warn", paramName); } result.addContent(processParamTag(isNonTypeParams, writer, pt, pt.parameterName(), alreadyDocumented.size() == 0)); alreadyDocumented.add(rank); } } return result; } }
public class class_name { private Content processParamTags(boolean isNonTypeParams, ParamTag[] paramTags, Map<String, String> rankMap, TagletWriter writer, Set<String> alreadyDocumented) { Content result = writer.getOutputInstance(); if (paramTags.length > 0) { for (int i = 0; i < paramTags.length; ++i) { ParamTag pt = paramTags[i]; String paramName = isNonTypeParams ? pt.parameterName() : "<" + pt.parameterName() + ">"; if (! rankMap.containsKey(pt.parameterName())) { writer.getMsgRetriever().warning(pt.position(), isNonTypeParams ? "doclet.Parameters_warn" : "doclet.Type_Parameters_warn", paramName); // depends on control dependency: [if], data = [none] } String rank = rankMap.get(pt.parameterName()); if (rank != null && alreadyDocumented.contains(rank)) { writer.getMsgRetriever().warning(pt.position(), isNonTypeParams ? "doclet.Parameters_dup_warn" : "doclet.Type_Parameters_dup_warn", paramName); // depends on control dependency: [if], data = [none] } result.addContent(processParamTag(isNonTypeParams, writer, pt, pt.parameterName(), alreadyDocumented.size() == 0)); // depends on control dependency: [for], data = [none] alreadyDocumented.add(rank); // depends on control dependency: [for], data = [none] } } return result; } }
public class class_name { protected void initializeNewEngineComponent(JsEngineComponent engineComponent) throws Exception { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "initializeNewEngineComponent", engineComponent); } // Synchronize on stateChangeLock to manage the startup of the engine with // potential addition of new engine components via dynamic config. synchronized (stateChangeLock) { if (_state != STATE_UNINITIALIZED && _state != STATE_DESTROYED) { // Initialize engineComponent.initialize(this); // Set ME config engineComponent.setConfig(this); } if (_state == STATE_STARTING || _state == STATE_STARTED) { // Start engine component (in recovery mode if needed) int mode = JsConstants.ME_START_DEFAULT; if (_mainImpl.isServerInRecoveryMode() == true) { mode += JsConstants.ME_START_RECOVERY; } engineComponent.start(mode); } } // synchronized (stateChangeLock) if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "initializeNewEngineComponent"); } } }
public class class_name { protected void initializeNewEngineComponent(JsEngineComponent engineComponent) throws Exception { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "initializeNewEngineComponent", engineComponent); } // Synchronize on stateChangeLock to manage the startup of the engine with // potential addition of new engine components via dynamic config. synchronized (stateChangeLock) { if (_state != STATE_UNINITIALIZED && _state != STATE_DESTROYED) { // Initialize engineComponent.initialize(this); // depends on control dependency: [if], data = [none] // Set ME config engineComponent.setConfig(this); // depends on control dependency: [if], data = [none] } if (_state == STATE_STARTING || _state == STATE_STARTED) { // Start engine component (in recovery mode if needed) int mode = JsConstants.ME_START_DEFAULT; if (_mainImpl.isServerInRecoveryMode() == true) { mode += JsConstants.ME_START_RECOVERY; // depends on control dependency: [if], data = [none] } engineComponent.start(mode); // depends on control dependency: [if], data = [none] } } // synchronized (stateChangeLock) if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "initializeNewEngineComponent"); } } }
public class class_name { private void doPreemption() { long now = ClusterManager.clock.getTime(); if (now - lastPreemptionTime > PREEMPTION_PERIOD) { lastPreemptionTime = now; doPreemptionNow(); } } }
public class class_name { private void doPreemption() { long now = ClusterManager.clock.getTime(); if (now - lastPreemptionTime > PREEMPTION_PERIOD) { lastPreemptionTime = now; // depends on control dependency: [if], data = [none] doPreemptionNow(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String getPrettyPath(List<Puzzle> path, int size) { // Print each row of all states StringBuffer output = new StringBuffer(); for (int y = 0; y < size; y++) { String row = ""; for (Puzzle state : path) { int[][] board = state.getMatrixBoard(); row += "| "; for (int x = 0; x < size; x++) { row += board[y][x] + " "; } row += "| "; } row += "\n"; output.append(row); } return output.toString(); } }
public class class_name { public static String getPrettyPath(List<Puzzle> path, int size) { // Print each row of all states StringBuffer output = new StringBuffer(); for (int y = 0; y < size; y++) { String row = ""; for (Puzzle state : path) { int[][] board = state.getMatrixBoard(); row += "| "; // depends on control dependency: [for], data = [none] for (int x = 0; x < size; x++) { row += board[y][x] + " "; // depends on control dependency: [for], data = [x] } row += "| "; // depends on control dependency: [for], data = [none] } row += "\n"; // depends on control dependency: [for], data = [none] output.append(row); // depends on control dependency: [for], data = [none] } return output.toString(); } }
public class class_name { private BigInteger evalLogicalOrExpression(AstNode exprAst) { AstNode operand = exprAst.getFirstChild(); boolean result = evalToBoolean(operand); while (!result && ((operand = getNextOperand(operand)) != null)) { result = evalToBoolean(operand); } return result ? BigInteger.ONE : BigInteger.ZERO; } }
public class class_name { private BigInteger evalLogicalOrExpression(AstNode exprAst) { AstNode operand = exprAst.getFirstChild(); boolean result = evalToBoolean(operand); while (!result && ((operand = getNextOperand(operand)) != null)) { result = evalToBoolean(operand); // depends on control dependency: [while], data = [none] } return result ? BigInteger.ONE : BigInteger.ZERO; } }
public class class_name { public static CompleteRestoreTaskParameters deserialize(String taskParameters) { JaxbJsonSerializer<CompleteRestoreTaskParameters> serializer = new JaxbJsonSerializer<>(CompleteRestoreTaskParameters.class); try { CompleteRestoreTaskParameters params = serializer.deserialize(taskParameters); // Verify expected parameters if (null == params.getSpaceId() || params.getSpaceId().isEmpty()) { throw new SnapshotDataException( "Task parameter values may not be empty"); } if (params.getDaysToExpire() < 0) { throw new SnapshotDataException( "Task parameter value must be a positive integer"); } return params; } catch (IOException e) { throw new SnapshotDataException( "Unable to parse task parameters due to: " + e.getMessage()); } } }
public class class_name { public static CompleteRestoreTaskParameters deserialize(String taskParameters) { JaxbJsonSerializer<CompleteRestoreTaskParameters> serializer = new JaxbJsonSerializer<>(CompleteRestoreTaskParameters.class); try { CompleteRestoreTaskParameters params = serializer.deserialize(taskParameters); // Verify expected parameters if (null == params.getSpaceId() || params.getSpaceId().isEmpty()) { throw new SnapshotDataException( "Task parameter values may not be empty"); } if (params.getDaysToExpire() < 0) { throw new SnapshotDataException( "Task parameter value must be a positive integer"); } return params; // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new SnapshotDataException( "Unable to parse task parameters due to: " + e.getMessage()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public MultiPolylineOptions toPolylines(MultiLineString multiLineString) { MultiPolylineOptions polylines = new MultiPolylineOptions(); for (LineString lineString : multiLineString.getLineStrings()) { PolylineOptions polyline = toPolyline(lineString); polylines.add(polyline); } return polylines; } }
public class class_name { public MultiPolylineOptions toPolylines(MultiLineString multiLineString) { MultiPolylineOptions polylines = new MultiPolylineOptions(); for (LineString lineString : multiLineString.getLineStrings()) { PolylineOptions polyline = toPolyline(lineString); polylines.add(polyline); // depends on control dependency: [for], data = [none] } return polylines; } }
public class class_name { protected void replaceEntry(T oldEntry, T newEntry) { T prev = oldEntry.getPrev(); T next = newEntry.getNext(); if (prev != null) { prev.setNext(newEntry); } else { head = newEntry; } if (next != null) { next.setPrev(newEntry); } else { last = newEntry; } } }
public class class_name { protected void replaceEntry(T oldEntry, T newEntry) { T prev = oldEntry.getPrev(); T next = newEntry.getNext(); if (prev != null) { prev.setNext(newEntry); // depends on control dependency: [if], data = [none] } else { head = newEntry; // depends on control dependency: [if], data = [none] } if (next != null) { next.setPrev(newEntry); // depends on control dependency: [if], data = [none] } else { last = newEntry; // depends on control dependency: [if], data = [none] } } }
public class class_name { public java.util.List<String> getImportTablesInProgress() { if (importTablesInProgress == null) { importTablesInProgress = new com.amazonaws.internal.SdkInternalList<String>(); } return importTablesInProgress; } }
public class class_name { public java.util.List<String> getImportTablesInProgress() { if (importTablesInProgress == null) { importTablesInProgress = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none] } return importTablesInProgress; } }
public class class_name { private Set<String> getLinkTargetObjIDs(FieldDefinition linkDef, DBObjectBatch dbObjBatch) { Set<String> targObjIDs = new HashSet<>(); for (DBObject dbObj : dbObjBatch.getObjects()) { List<String> objIDs = dbObj.getFieldValues(linkDef.getName()); if (objIDs != null) { targObjIDs.addAll(objIDs); } Set<String> removeIDs = dbObj.getRemoveValues(linkDef.getName()); if (removeIDs != null) { targObjIDs.addAll(removeIDs); } } return targObjIDs; } }
public class class_name { private Set<String> getLinkTargetObjIDs(FieldDefinition linkDef, DBObjectBatch dbObjBatch) { Set<String> targObjIDs = new HashSet<>(); for (DBObject dbObj : dbObjBatch.getObjects()) { List<String> objIDs = dbObj.getFieldValues(linkDef.getName()); if (objIDs != null) { targObjIDs.addAll(objIDs); // depends on control dependency: [if], data = [(objIDs] } Set<String> removeIDs = dbObj.getRemoveValues(linkDef.getName()); if (removeIDs != null) { targObjIDs.addAll(removeIDs); // depends on control dependency: [if], data = [(removeIDs] } } return targObjIDs; } }
public class class_name { public void reset() { if (getListViewWrapper() == null) { throw new IllegalStateException("Call setAbsListView() on this AnimationAdapter first!"); } assert mViewAnimator != null; mViewAnimator.reset(); mGridViewPossiblyMeasuring = true; mGridViewMeasuringPosition = -1; if (getDecoratedBaseAdapter() instanceof AnimationAdapter) { ((AnimationAdapter) getDecoratedBaseAdapter()).reset(); } } }
public class class_name { public void reset() { if (getListViewWrapper() == null) { throw new IllegalStateException("Call setAbsListView() on this AnimationAdapter first!"); } assert mViewAnimator != null; mViewAnimator.reset(); mGridViewPossiblyMeasuring = true; mGridViewMeasuringPosition = -1; if (getDecoratedBaseAdapter() instanceof AnimationAdapter) { ((AnimationAdapter) getDecoratedBaseAdapter()).reset(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static Date nextMonth(Date date, int months) { Calendar cal = Calendar.getInstance(); if (date != null) { cal.setTime(date); } cal.add(Calendar.MONTH, months); return cal.getTime(); } }
public class class_name { public static Date nextMonth(Date date, int months) { Calendar cal = Calendar.getInstance(); if (date != null) { cal.setTime(date); // depends on control dependency: [if], data = [(date] } cal.add(Calendar.MONTH, months); return cal.getTime(); } }
public class class_name { int delete(Session session, Table table, RowSetNavigator oldRows) { if (table.fkMainConstraints.length == 0) { deleteRows(session, table, oldRows); oldRows.beforeFirst(); if (table.hasTrigger(Trigger.DELETE_AFTER)) { table.fireAfterTriggers(session, Trigger.DELETE_AFTER, oldRows); } return oldRows.getSize(); } HashSet path = session.sessionContext.getConstraintPath(); HashMappedList tableUpdateList = session.sessionContext.getTableUpdateList(); if (session.database.isReferentialIntegrity()) { oldRows.beforeFirst(); while (oldRows.hasNext()) { oldRows.next(); Row row = oldRows.getCurrentRow(); path.clear(); checkCascadeDelete(session, table, tableUpdateList, row, false, path); } } if (session.database.isReferentialIntegrity()) { oldRows.beforeFirst(); while (oldRows.hasNext()) { oldRows.next(); Row row = oldRows.getCurrentRow(); path.clear(); checkCascadeDelete(session, table, tableUpdateList, row, true, path); } } oldRows.beforeFirst(); while (oldRows.hasNext()) { oldRows.next(); Row row = oldRows.getCurrentRow(); if (!row.isDeleted(session)) { table.deleteNoRefCheck(session, row); } } for (int i = 0; i < tableUpdateList.size(); i++) { Table targetTable = (Table) tableUpdateList.getKey(i); HashMappedList updateList = (HashMappedList) tableUpdateList.get(i); if (updateList.size() > 0) { targetTable.updateRowSet(session, updateList, null, true); updateList.clear(); } } oldRows.beforeFirst(); if (table.hasTrigger(Trigger.DELETE_AFTER)) { table.fireAfterTriggers(session, Trigger.DELETE_AFTER, oldRows); } path.clear(); return oldRows.getSize(); } }
public class class_name { int delete(Session session, Table table, RowSetNavigator oldRows) { if (table.fkMainConstraints.length == 0) { deleteRows(session, table, oldRows); // depends on control dependency: [if], data = [none] oldRows.beforeFirst(); // depends on control dependency: [if], data = [none] if (table.hasTrigger(Trigger.DELETE_AFTER)) { table.fireAfterTriggers(session, Trigger.DELETE_AFTER, oldRows); // depends on control dependency: [if], data = [none] } return oldRows.getSize(); // depends on control dependency: [if], data = [none] } HashSet path = session.sessionContext.getConstraintPath(); HashMappedList tableUpdateList = session.sessionContext.getTableUpdateList(); if (session.database.isReferentialIntegrity()) { oldRows.beforeFirst(); // depends on control dependency: [if], data = [none] while (oldRows.hasNext()) { oldRows.next(); // depends on control dependency: [while], data = [none] Row row = oldRows.getCurrentRow(); path.clear(); // depends on control dependency: [while], data = [none] checkCascadeDelete(session, table, tableUpdateList, row, false, path); // depends on control dependency: [while], data = [none] } } if (session.database.isReferentialIntegrity()) { oldRows.beforeFirst(); // depends on control dependency: [if], data = [none] while (oldRows.hasNext()) { oldRows.next(); // depends on control dependency: [while], data = [none] Row row = oldRows.getCurrentRow(); path.clear(); // depends on control dependency: [while], data = [none] checkCascadeDelete(session, table, tableUpdateList, row, true, path); // depends on control dependency: [while], data = [none] } } oldRows.beforeFirst(); while (oldRows.hasNext()) { oldRows.next(); // depends on control dependency: [while], data = [none] Row row = oldRows.getCurrentRow(); if (!row.isDeleted(session)) { table.deleteNoRefCheck(session, row); // depends on control dependency: [if], data = [none] } } for (int i = 0; i < tableUpdateList.size(); i++) { Table targetTable = (Table) tableUpdateList.getKey(i); HashMappedList updateList = (HashMappedList) tableUpdateList.get(i); if (updateList.size() > 0) { targetTable.updateRowSet(session, updateList, null, true); // depends on control dependency: [if], data = [none] updateList.clear(); // depends on control dependency: [if], data = [none] } } oldRows.beforeFirst(); if (table.hasTrigger(Trigger.DELETE_AFTER)) { table.fireAfterTriggers(session, Trigger.DELETE_AFTER, oldRows); // depends on control dependency: [if], data = [none] } path.clear(); return oldRows.getSize(); } }
public class class_name { public static ObjectId valueOf( String uuid ) { int p = uuid.indexOf("/"); if (p < 0) { return new ObjectId(Type.OBJECT, uuid); } int p1 = p; while (p > 0) { p1 = p; p = uuid.indexOf("/", p + 1); } p = p1; String ident = uuid.substring(0, p); String type = uuid.substring(p + 1); return new ObjectId(Type.valueOf(type.toUpperCase()), ident); } }
public class class_name { public static ObjectId valueOf( String uuid ) { int p = uuid.indexOf("/"); if (p < 0) { return new ObjectId(Type.OBJECT, uuid); // depends on control dependency: [if], data = [none] } int p1 = p; while (p > 0) { p1 = p; // depends on control dependency: [while], data = [none] p = uuid.indexOf("/", p + 1); // depends on control dependency: [while], data = [none] } p = p1; String ident = uuid.substring(0, p); String type = uuid.substring(p + 1); return new ObjectId(Type.valueOf(type.toUpperCase()), ident); } }
public class class_name { @Override public Hashtable<String, Object> getDataAsHashtable() { logger.entering(); if (null == resource.getCls()) { resource.setCls(KeyValueMap.class); } Hashtable<String, Object> dataHashTable = new Hashtable<>(); try { JAXBContext context = JAXBContext.newInstance(resource.getCls()); Unmarshaller unmarshaller = context.createUnmarshaller(); StreamSource xmlStreamSource = new StreamSource(resource.getInputStream()); Map<String, KeyValuePair> keyValueItems = unmarshaller .unmarshal(xmlStreamSource, KeyValueMap.class).getValue().getMap(); for (Entry<?, ?> entry : keyValueItems.entrySet()) { dataHashTable.put((String) entry.getKey(), entry.getValue()); } } catch (JAXBException excp) { logger.exiting(excp.getMessage()); throw new DataProviderException("Error unmarshalling XML file.", excp); } logger.exiting(); return dataHashTable; } }
public class class_name { @Override public Hashtable<String, Object> getDataAsHashtable() { logger.entering(); if (null == resource.getCls()) { resource.setCls(KeyValueMap.class); // depends on control dependency: [if], data = [none] } Hashtable<String, Object> dataHashTable = new Hashtable<>(); try { JAXBContext context = JAXBContext.newInstance(resource.getCls()); Unmarshaller unmarshaller = context.createUnmarshaller(); StreamSource xmlStreamSource = new StreamSource(resource.getInputStream()); Map<String, KeyValuePair> keyValueItems = unmarshaller .unmarshal(xmlStreamSource, KeyValueMap.class).getValue().getMap(); for (Entry<?, ?> entry : keyValueItems.entrySet()) { dataHashTable.put((String) entry.getKey(), entry.getValue()); } } catch (JAXBException excp) { logger.exiting(excp.getMessage()); throw new DataProviderException("Error unmarshalling XML file.", excp); } logger.exiting(); return dataHashTable; } }
public class class_name { public String curCSS(Element elem, String name, boolean force) { if (elem == null) { return ""; } name = fixPropertyName(name); // value defined in the element style String ret = elem.getStyle().getProperty(name); if (force) { Element toDetach = null; if (JsUtils.isDetached(elem)) { // If the element is detached to the DOM we attach temporary to it toDetach = attachTemporary(elem); } if (sizeRegex.test(name)) { ret = getVisibleSize(elem, name) + "px"; } else if ("opacity".equalsIgnoreCase(name)) { ret = String.valueOf(getOpacity(elem)); } else { ret = getComputedStyle(elem, JsUtils.hyphenize(name), name, null); } // If the element was previously attached, detached it. if (toDetach != null) { toDetach.removeFromParent(); } } return ret == null ? "" : ret; } }
public class class_name { public String curCSS(Element elem, String name, boolean force) { if (elem == null) { return ""; // depends on control dependency: [if], data = [none] } name = fixPropertyName(name); // value defined in the element style String ret = elem.getStyle().getProperty(name); if (force) { Element toDetach = null; if (JsUtils.isDetached(elem)) { // If the element is detached to the DOM we attach temporary to it toDetach = attachTemporary(elem); // depends on control dependency: [if], data = [none] } if (sizeRegex.test(name)) { ret = getVisibleSize(elem, name) + "px"; // depends on control dependency: [if], data = [none] } else if ("opacity".equalsIgnoreCase(name)) { ret = String.valueOf(getOpacity(elem)); // depends on control dependency: [if], data = [none] } else { ret = getComputedStyle(elem, JsUtils.hyphenize(name), name, null); // depends on control dependency: [if], data = [none] } // If the element was previously attached, detached it. if (toDetach != null) { toDetach.removeFromParent(); // depends on control dependency: [if], data = [none] } } return ret == null ? "" : ret; } }
public class class_name { @Deprecated public HashMap<Long, Long> getPartitions(long previousWatermark) { HashMap<Long, Long> defaultPartition = Maps.newHashMap(); if (!isWatermarkExists()) { defaultPartition.put(ConfigurationKeys.DEFAULT_WATERMARK_VALUE, ConfigurationKeys.DEFAULT_WATERMARK_VALUE); LOG.info("Watermark column or type not found - Default partition with low watermark and high watermark as " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE); return defaultPartition; } ExtractType extractType = ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); WatermarkType watermarkType = WatermarkType.valueOf( this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); int interval = getUpdatedInterval(this.state.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_PARTITION_INTERVAL, 0), extractType, watermarkType); int sourceMaxAllowedPartitions = this.state.getPropAsInt(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 0); int maxPartitions = (sourceMaxAllowedPartitions != 0 ? sourceMaxAllowedPartitions : ConfigurationKeys.DEFAULT_MAX_NUMBER_OF_PARTITIONS); WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType); int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark(); LOG.info("is watermark override: " + this.isWatermarkOverride()); LOG.info("is full extract: " + this.isFullDump()); long lowWatermark = this.getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark); long highWatermark = this.getHighWatermark(extractType, watermarkType); if (lowWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE || highWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) { LOG.info( "Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: " + lowWatermark + " and high watermark: " + highWatermark); defaultPartition.put(lowWatermark, highWatermark); return defaultPartition; } LOG.info("Generate partitions with low watermark: " + lowWatermark + "; high watermark: " + highWatermark + "; partition interval in hours: " + interval + "; Maximum number of allowed partitions: " + maxPartitions); return watermark.getPartitions(lowWatermark, highWatermark, interval, maxPartitions); } }
public class class_name { @Deprecated public HashMap<Long, Long> getPartitions(long previousWatermark) { HashMap<Long, Long> defaultPartition = Maps.newHashMap(); if (!isWatermarkExists()) { defaultPartition.put(ConfigurationKeys.DEFAULT_WATERMARK_VALUE, ConfigurationKeys.DEFAULT_WATERMARK_VALUE); // depends on control dependency: [if], data = [none] LOG.info("Watermark column or type not found - Default partition with low watermark and high watermark as " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE); // depends on control dependency: [if], data = [none] return defaultPartition; // depends on control dependency: [if], data = [none] } ExtractType extractType = ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); WatermarkType watermarkType = WatermarkType.valueOf( this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); int interval = getUpdatedInterval(this.state.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_PARTITION_INTERVAL, 0), extractType, watermarkType); int sourceMaxAllowedPartitions = this.state.getPropAsInt(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 0); int maxPartitions = (sourceMaxAllowedPartitions != 0 ? sourceMaxAllowedPartitions : ConfigurationKeys.DEFAULT_MAX_NUMBER_OF_PARTITIONS); WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType); int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark(); LOG.info("is watermark override: " + this.isWatermarkOverride()); LOG.info("is full extract: " + this.isFullDump()); long lowWatermark = this.getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark); long highWatermark = this.getHighWatermark(extractType, watermarkType); if (lowWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE || highWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) { LOG.info( "Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: " + lowWatermark + " and high watermark: " + highWatermark); // depends on control dependency: [if], data = [none] defaultPartition.put(lowWatermark, highWatermark); // depends on control dependency: [if], data = [(lowWatermark] return defaultPartition; // depends on control dependency: [if], data = [none] } LOG.info("Generate partitions with low watermark: " + lowWatermark + "; high watermark: " + highWatermark + "; partition interval in hours: " + interval + "; Maximum number of allowed partitions: " + maxPartitions); return watermark.getPartitions(lowWatermark, highWatermark, interval, maxPartitions); } }
public class class_name { void closeHandle(HelloWorldConnection handle) { connections.remove((HelloWorldConnectionImpl)handle); ConnectionEvent event = new ConnectionEvent(this, ConnectionEvent.CONNECTION_CLOSED); event.setConnectionHandle(handle); for (ConnectionEventListener cel : listeners) { cel.connectionClosed(event); } } }
public class class_name { void closeHandle(HelloWorldConnection handle) { connections.remove((HelloWorldConnectionImpl)handle); ConnectionEvent event = new ConnectionEvent(this, ConnectionEvent.CONNECTION_CLOSED); event.setConnectionHandle(handle); for (ConnectionEventListener cel : listeners) { cel.connectionClosed(event); // depends on control dependency: [for], data = [cel] } } }
public class class_name { @SuppressWarnings("unchecked") static <T> void subscribe(CoreSubscriber<? super T> s, Iterator<? extends T> it, @Nullable Runnable onClose) { //noinspection ConstantConditions if (it == null) { Operators.error(s, new NullPointerException("The iterator is null")); return; } boolean b; try { b = it.hasNext(); } catch (Throwable e) { Operators.error(s, Operators.onOperatorError(e, s.currentContext())); if (onClose != null) { try { onClose.run(); } catch (Throwable t) { Operators.onErrorDropped(t, s.currentContext()); } } return; } if (!b) { Operators.complete(s); if (onClose != null) { try { onClose.run(); } catch (Throwable t) { Operators.onErrorDropped(t, s.currentContext()); } } return; } if (s instanceof ConditionalSubscriber) { s.onSubscribe(new IterableSubscriptionConditional<>((ConditionalSubscriber<? super T>) s, it, onClose)); } else { s.onSubscribe(new IterableSubscription<>(s, it, onClose)); } } }
public class class_name { @SuppressWarnings("unchecked") static <T> void subscribe(CoreSubscriber<? super T> s, Iterator<? extends T> it, @Nullable Runnable onClose) { //noinspection ConstantConditions if (it == null) { Operators.error(s, new NullPointerException("The iterator is null")); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } boolean b; try { b = it.hasNext(); // depends on control dependency: [try], data = [none] } catch (Throwable e) { Operators.error(s, Operators.onOperatorError(e, s.currentContext())); if (onClose != null) { try { onClose.run(); // depends on control dependency: [try], data = [none] } catch (Throwable t) { Operators.onErrorDropped(t, s.currentContext()); } // depends on control dependency: [catch], data = [none] } return; } // depends on control dependency: [catch], data = [none] if (!b) { Operators.complete(s); // depends on control dependency: [if], data = [none] if (onClose != null) { try { onClose.run(); // depends on control dependency: [try], data = [none] } catch (Throwable t) { Operators.onErrorDropped(t, s.currentContext()); } // depends on control dependency: [catch], data = [none] } return; // depends on control dependency: [if], data = [none] } if (s instanceof ConditionalSubscriber) { s.onSubscribe(new IterableSubscriptionConditional<>((ConditionalSubscriber<? super T>) s, it, onClose)); // depends on control dependency: [if], data = [none] } else { s.onSubscribe(new IterableSubscription<>(s, it, onClose)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String getWorkManager(DistributableWork work) { if (work != null && work instanceof WorkContextProvider) { List<WorkContext> contexts = ((WorkContextProvider)work).getWorkContexts(); if (contexts != null) { for (WorkContext wc : contexts) { if (wc instanceof DistributableContext) { DistributableContext dc = (DistributableContext)wc; return dc.getWorkManager(); } else if (wc instanceof HintsContext) { HintsContext hc = (HintsContext)wc; if (hc.getHints().keySet().contains(DistributableContext.WORKMANAGER)) { Serializable value = hc.getHints().get(DistributableContext.WORKMANAGER); if (value != null && value instanceof String) { return (String)value; } } } } } } return null; } }
public class class_name { public static String getWorkManager(DistributableWork work) { if (work != null && work instanceof WorkContextProvider) { List<WorkContext> contexts = ((WorkContextProvider)work).getWorkContexts(); if (contexts != null) { for (WorkContext wc : contexts) { if (wc instanceof DistributableContext) { DistributableContext dc = (DistributableContext)wc; return dc.getWorkManager(); // depends on control dependency: [if], data = [none] } else if (wc instanceof HintsContext) { HintsContext hc = (HintsContext)wc; if (hc.getHints().keySet().contains(DistributableContext.WORKMANAGER)) { Serializable value = hc.getHints().get(DistributableContext.WORKMANAGER); if (value != null && value instanceof String) { return (String)value; // depends on control dependency: [if], data = [none] } } } } } } return null; } }
public class class_name { Item completeBinop(JCTree lhs, JCTree rhs, OperatorSymbol operator) { MethodType optype = (MethodType)operator.type; int opcode = operator.opcode; if (opcode >= if_icmpeq && opcode <= if_icmple && rhs.type.constValue() instanceof Number && ((Number) rhs.type.constValue()).intValue() == 0) { opcode = opcode + (ifeq - if_icmpeq); } else if (opcode >= if_acmpeq && opcode <= if_acmpne && TreeInfo.isNull(rhs)) { opcode = opcode + (if_acmp_null - if_acmpeq); } else { // The expected type of the right operand is // the second parameter type of the operator, except for // shifts with long shiftcount, where we convert the opcode // to a short shift and the expected type to int. Type rtype = operator.erasure(types).getParameterTypes().tail.head; if (opcode >= ishll && opcode <= lushrl) { opcode = opcode + (ishl - ishll); rtype = syms.intType; } // Generate code for right operand and load. genExpr(rhs, rtype).load(); // If there are two consecutive opcode instructions, // emit the first now. if (opcode >= (1 << preShift)) { code.emitop0(opcode >> preShift); opcode = opcode & 0xFF; } } if (opcode >= ifeq && opcode <= if_acmpne || opcode == if_acmp_null || opcode == if_acmp_nonnull) { return items.makeCondItem(opcode); } else { code.emitop0(opcode); return items.makeStackItem(optype.restype); } } }
public class class_name { Item completeBinop(JCTree lhs, JCTree rhs, OperatorSymbol operator) { MethodType optype = (MethodType)operator.type; int opcode = operator.opcode; if (opcode >= if_icmpeq && opcode <= if_icmple && rhs.type.constValue() instanceof Number && ((Number) rhs.type.constValue()).intValue() == 0) { opcode = opcode + (ifeq - if_icmpeq); // depends on control dependency: [if], data = [none] } else if (opcode >= if_acmpeq && opcode <= if_acmpne && TreeInfo.isNull(rhs)) { opcode = opcode + (if_acmp_null - if_acmpeq); // depends on control dependency: [if], data = [none] } else { // The expected type of the right operand is // the second parameter type of the operator, except for // shifts with long shiftcount, where we convert the opcode // to a short shift and the expected type to int. Type rtype = operator.erasure(types).getParameterTypes().tail.head; if (opcode >= ishll && opcode <= lushrl) { opcode = opcode + (ishl - ishll); // depends on control dependency: [if], data = [none] rtype = syms.intType; // depends on control dependency: [if], data = [none] } // Generate code for right operand and load. genExpr(rhs, rtype).load(); // depends on control dependency: [if], data = [none] // If there are two consecutive opcode instructions, // emit the first now. if (opcode >= (1 << preShift)) { code.emitop0(opcode >> preShift); // depends on control dependency: [if], data = [(opcode] opcode = opcode & 0xFF; // depends on control dependency: [if], data = [none] } } if (opcode >= ifeq && opcode <= if_acmpne || opcode == if_acmp_null || opcode == if_acmp_nonnull) { return items.makeCondItem(opcode); // depends on control dependency: [if], data = [(opcode] } else { code.emitop0(opcode); // depends on control dependency: [if], data = [(opcode] return items.makeStackItem(optype.restype); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void closeAllOperators() throws Exception { // We need to close them first to last, since upstream operators in the chain might emit // elements in their close methods. StreamOperator<?>[] allOperators = operatorChain.getAllOperators(); for (int i = allOperators.length - 1; i >= 0; i--) { StreamOperator<?> operator = allOperators[i]; if (operator != null) { operator.close(); } } } }
public class class_name { private void closeAllOperators() throws Exception { // We need to close them first to last, since upstream operators in the chain might emit // elements in their close methods. StreamOperator<?>[] allOperators = operatorChain.getAllOperators(); for (int i = allOperators.length - 1; i >= 0; i--) { StreamOperator<?> operator = allOperators[i]; if (operator != null) { operator.close(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private void addLinkConstraints(ReconfigurationProblem rp) { // Links limitation List<Task> tasksListUp = new ArrayList<>(); List<Task> tasksListDown = new ArrayList<>(); List<IntVar> heightsListUp = new ArrayList<>(); List<IntVar> heightsListDown = new ArrayList<>(); for (Link l : net.getLinks()) { for (VM vm : rp.getVMs()) { VMTransition a = rp.getVMAction(vm); if (a instanceof RelocatableVM && !a.getDSlice().getHoster().isInstantiatedTo(a.getCSlice().getHoster().getValue())) { Node src = source.getMapping().getVMLocation(vm); Node dst = rp.getNode(a.getDSlice().getHoster().getValue()); List<Link> path = net.getRouting().getPath(src, dst); // Check first if the link is on migration path if (path.contains(l)) { // Get link direction LinkDirection linkDirection = net.getRouting().getLinkDirection(src, dst, l); // UpLink if (linkDirection == LinkDirection.UPLINK) { tasksListUp.add(((RelocatableVM) a).getMigrationTask()); heightsListUp.add(((RelocatableVM) a).getBandwidth()); } // DownLink else { tasksListDown.add(((RelocatableVM) a).getMigrationTask()); heightsListDown.add(((RelocatableVM) a).getBandwidth()); } } } } if (!tasksListUp.isEmpty()) { // Post the cumulative constraint for the current UpLink csp.post(csp.cumulative( tasksListUp.toArray(new Task[tasksListUp.size()]), heightsListUp.toArray(new IntVar[heightsListUp.size()]), csp.intVar(l.getCapacity()), true )); tasksListUp.clear(); heightsListUp.clear(); } if (!tasksListDown.isEmpty()) { // Post the cumulative constraint for the current DownLink csp.post(csp.cumulative( tasksListDown.toArray(new Task[tasksListDown.size()]), heightsListDown.toArray(new IntVar[heightsListDown.size()]), csp.intVar(l.getCapacity()), true )); tasksListDown.clear(); heightsListDown.clear(); } } } }
public class class_name { private void addLinkConstraints(ReconfigurationProblem rp) { // Links limitation List<Task> tasksListUp = new ArrayList<>(); List<Task> tasksListDown = new ArrayList<>(); List<IntVar> heightsListUp = new ArrayList<>(); List<IntVar> heightsListDown = new ArrayList<>(); for (Link l : net.getLinks()) { for (VM vm : rp.getVMs()) { VMTransition a = rp.getVMAction(vm); if (a instanceof RelocatableVM && !a.getDSlice().getHoster().isInstantiatedTo(a.getCSlice().getHoster().getValue())) { Node src = source.getMapping().getVMLocation(vm); Node dst = rp.getNode(a.getDSlice().getHoster().getValue()); List<Link> path = net.getRouting().getPath(src, dst); // Check first if the link is on migration path if (path.contains(l)) { // Get link direction LinkDirection linkDirection = net.getRouting().getLinkDirection(src, dst, l); // UpLink if (linkDirection == LinkDirection.UPLINK) { tasksListUp.add(((RelocatableVM) a).getMigrationTask()); // depends on control dependency: [if], data = [none] heightsListUp.add(((RelocatableVM) a).getBandwidth()); // depends on control dependency: [if], data = [none] } // DownLink else { tasksListDown.add(((RelocatableVM) a).getMigrationTask()); // depends on control dependency: [if], data = [none] heightsListDown.add(((RelocatableVM) a).getBandwidth()); // depends on control dependency: [if], data = [none] } } } } if (!tasksListUp.isEmpty()) { // Post the cumulative constraint for the current UpLink csp.post(csp.cumulative( tasksListUp.toArray(new Task[tasksListUp.size()]), heightsListUp.toArray(new IntVar[heightsListUp.size()]), csp.intVar(l.getCapacity()), true )); // depends on control dependency: [if], data = [none] tasksListUp.clear(); // depends on control dependency: [if], data = [none] heightsListUp.clear(); // depends on control dependency: [if], data = [none] } if (!tasksListDown.isEmpty()) { // Post the cumulative constraint for the current DownLink csp.post(csp.cumulative( tasksListDown.toArray(new Task[tasksListDown.size()]), heightsListDown.toArray(new IntVar[heightsListDown.size()]), csp.intVar(l.getCapacity()), true )); // depends on control dependency: [if], data = [none] tasksListDown.clear(); // depends on control dependency: [if], data = [none] heightsListDown.clear(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override @SuppressWarnings("checkstyle:diamondoperatorforvariabledefinition") public synchronized void onNext(final RemoteEvent<byte[]> value) { LOG.log(Level.FINER, "RemoteManager: {0} value: {1}", new Object[] {this.name, value}); final T decodedEvent = this.codec.decode(value.getEvent()); final Class<? extends T> clazz = (Class<? extends T>) decodedEvent.getClass(); LOG.log(Level.FINEST, "RemoteManager: {0} decoded event {1} :: {2}", new Object[] {this.name, clazz.getCanonicalName(), decodedEvent}); // check remote identifier and message type final SocketRemoteIdentifier id = new SocketRemoteIdentifier((InetSocketAddress)value.remoteAddress()); final Tuple2<RemoteIdentifier, Class<? extends T>> tuple = new Tuple2<RemoteIdentifier, Class<? extends T>>(id, clazz); final EventHandler<? super T> tupleHandler = this.tupleToHandlerMap.get(tuple); if (tupleHandler != null) { LOG.log(Level.FINER, "Tuple handler: {0},{1}", new Object[] {tuple.getT1(), tuple.getT2().getCanonicalName()}); tupleHandler.onNext(decodedEvent); } else { final EventHandler<RemoteMessage<? extends T>> messageHandler = this.msgTypeToHandlerMap.get(clazz); if (messageHandler == null) { final RuntimeException ex = new RemoteRuntimeException( "Unknown message type in dispatch: " + clazz.getCanonicalName() + " from " + id); LOG.log(Level.WARNING, "Unknown message type in dispatch.", ex); throw ex; } LOG.log(Level.FINER, "Message handler: {0}", clazz.getCanonicalName()); messageHandler.onNext(new DefaultRemoteMessage<>(id, decodedEvent)); } } }
public class class_name { @Override @SuppressWarnings("checkstyle:diamondoperatorforvariabledefinition") public synchronized void onNext(final RemoteEvent<byte[]> value) { LOG.log(Level.FINER, "RemoteManager: {0} value: {1}", new Object[] {this.name, value}); final T decodedEvent = this.codec.decode(value.getEvent()); final Class<? extends T> clazz = (Class<? extends T>) decodedEvent.getClass(); LOG.log(Level.FINEST, "RemoteManager: {0} decoded event {1} :: {2}", new Object[] {this.name, clazz.getCanonicalName(), decodedEvent}); // check remote identifier and message type final SocketRemoteIdentifier id = new SocketRemoteIdentifier((InetSocketAddress)value.remoteAddress()); final Tuple2<RemoteIdentifier, Class<? extends T>> tuple = new Tuple2<RemoteIdentifier, Class<? extends T>>(id, clazz); final EventHandler<? super T> tupleHandler = this.tupleToHandlerMap.get(tuple); if (tupleHandler != null) { LOG.log(Level.FINER, "Tuple handler: {0},{1}", new Object[] {tuple.getT1(), tuple.getT2().getCanonicalName()}); // depends on control dependency: [if], data = [none] tupleHandler.onNext(decodedEvent); // depends on control dependency: [if], data = [none] } else { final EventHandler<RemoteMessage<? extends T>> messageHandler = this.msgTypeToHandlerMap.get(clazz); if (messageHandler == null) { final RuntimeException ex = new RemoteRuntimeException( "Unknown message type in dispatch: " + clazz.getCanonicalName() + " from " + id); LOG.log(Level.WARNING, "Unknown message type in dispatch.", ex); // depends on control dependency: [if], data = [none] throw ex; } LOG.log(Level.FINER, "Message handler: {0}", clazz.getCanonicalName()); // depends on control dependency: [if], data = [none] messageHandler.onNext(new DefaultRemoteMessage<>(id, decodedEvent)); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected String internalGenerate(final JWTClaimsSet claimsSet) { JWT jwt; // signature? if (signatureConfiguration == null) { jwt = new PlainJWT(claimsSet); } else { jwt = signatureConfiguration.sign(claimsSet); } // encryption? if (encryptionConfiguration != null) { return encryptionConfiguration.encrypt(jwt); } else { return jwt.serialize(); } } }
public class class_name { protected String internalGenerate(final JWTClaimsSet claimsSet) { JWT jwt; // signature? if (signatureConfiguration == null) { jwt = new PlainJWT(claimsSet); // depends on control dependency: [if], data = [none] } else { jwt = signatureConfiguration.sign(claimsSet); // depends on control dependency: [if], data = [none] } // encryption? if (encryptionConfiguration != null) { return encryptionConfiguration.encrypt(jwt); // depends on control dependency: [if], data = [none] } else { return jwt.serialize(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public boolean isOK(PublicKey key) { try { final var digester = MessageDigest.getInstance(get(DIGEST_KEY).getString()); final var ser = unsigned(); final var digestValue = digester.digest(ser); final var cipher = Cipher.getInstance(key.getAlgorithm()); cipher.init(Cipher.DECRYPT_MODE, key); final var sigDigest = cipher.doFinal(getSignature()); return Arrays.equals(digestValue, sigDigest); } catch (Exception e) { return false; } } }
public class class_name { public boolean isOK(PublicKey key) { try { final var digester = MessageDigest.getInstance(get(DIGEST_KEY).getString()); final var ser = unsigned(); final var digestValue = digester.digest(ser); final var cipher = Cipher.getInstance(key.getAlgorithm()); cipher.init(Cipher.DECRYPT_MODE, key); // depends on control dependency: [try], data = [none] final var sigDigest = cipher.doFinal(getSignature()); return Arrays.equals(digestValue, sigDigest); // depends on control dependency: [try], data = [none] } catch (Exception e) { return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") protected boolean executeOneCompleteStmt(final String _complStmt, final List<OneSelect> _oneSelects) throws EFapsException { boolean ret = false; ConnectionResource con = null; try { AbstractPrintQuery.LOG.debug("Executing SQL: {}", _complStmt); List<Object[]> rows = null; boolean cached = false; if (isCacheEnabled()) { final QueryKey querykey = QueryKey.get(getKey(), _complStmt); final Cache<QueryKey, Object> cache = QueryCache.getSqlCache(); if (cache.containsKey(querykey)) { final Object object = cache.get(querykey); if (object instanceof List) { rows = (List<Object[]>) object; } cached = true; } } if (!cached) { con = Context.getThreadContext().getConnectionResource(); final Statement stmt = con.createStatement(); final ResultSet rs = stmt.executeQuery(_complStmt); final ArrayListHandler handler = new ArrayListHandler(Context.getDbType().getRowProcessor()); rows = handler.handle(rs); rs.close(); stmt.close(); if (isCacheEnabled()) { QueryCache.put((ICacheDefinition) this, QueryKey.get(getKey(), _complStmt), rows); } } for (final Object[] row : rows) { for (final OneSelect onesel : _oneSelects) { onesel.addObject(row); } ret = true; } final List<Instance> tmpList = new ArrayList<>(); final Map<Instance, Integer> sortMap = new HashMap<>(); int i = 0; for (final Object[] row : rows) { final Instance instance; if (getMainType().getMainTable().getSqlColType() != null) { instance = Instance.get(Type.get((Long) row[this.typeColumnIndex - 1]), (Long) row[0]); } else { instance = Instance.get(getMainType(), (Long) row[0]); } sortMap.put(instance, i); tmpList.add(instance); i++; } if (this.enforceSorted) { for (final OneSelect onesel : _oneSelects) { onesel.sortByInstanceList(getInstanceList(), sortMap); } } else { getInstanceList().clear(); getInstanceList().addAll(tmpList); } } catch (final SQLException e) { throw new EFapsException(InstanceQuery.class, "executeOneCompleteStmt", e); } return ret; } }
public class class_name { @SuppressWarnings("unchecked") protected boolean executeOneCompleteStmt(final String _complStmt, final List<OneSelect> _oneSelects) throws EFapsException { boolean ret = false; ConnectionResource con = null; try { AbstractPrintQuery.LOG.debug("Executing SQL: {}", _complStmt); List<Object[]> rows = null; boolean cached = false; if (isCacheEnabled()) { final QueryKey querykey = QueryKey.get(getKey(), _complStmt); final Cache<QueryKey, Object> cache = QueryCache.getSqlCache(); if (cache.containsKey(querykey)) { final Object object = cache.get(querykey); if (object instanceof List) { rows = (List<Object[]>) object; // depends on control dependency: [if], data = [none] } cached = true; // depends on control dependency: [if], data = [none] } } if (!cached) { con = Context.getThreadContext().getConnectionResource(); // depends on control dependency: [if], data = [none] final Statement stmt = con.createStatement(); final ResultSet rs = stmt.executeQuery(_complStmt); final ArrayListHandler handler = new ArrayListHandler(Context.getDbType().getRowProcessor()); rows = handler.handle(rs); // depends on control dependency: [if], data = [none] rs.close(); // depends on control dependency: [if], data = [none] stmt.close(); // depends on control dependency: [if], data = [none] if (isCacheEnabled()) { QueryCache.put((ICacheDefinition) this, QueryKey.get(getKey(), _complStmt), rows); // depends on control dependency: [if], data = [none] } } for (final Object[] row : rows) { for (final OneSelect onesel : _oneSelects) { onesel.addObject(row); // depends on control dependency: [for], data = [onesel] } ret = true; // depends on control dependency: [for], data = [none] } final List<Instance> tmpList = new ArrayList<>(); final Map<Instance, Integer> sortMap = new HashMap<>(); int i = 0; for (final Object[] row : rows) { final Instance instance; if (getMainType().getMainTable().getSqlColType() != null) { instance = Instance.get(Type.get((Long) row[this.typeColumnIndex - 1]), (Long) row[0]); // depends on control dependency: [if], data = [none] } else { instance = Instance.get(getMainType(), (Long) row[0]); // depends on control dependency: [if], data = [none] } sortMap.put(instance, i); // depends on control dependency: [for], data = [none] tmpList.add(instance); // depends on control dependency: [for], data = [none] i++; // depends on control dependency: [for], data = [none] } if (this.enforceSorted) { for (final OneSelect onesel : _oneSelects) { onesel.sortByInstanceList(getInstanceList(), sortMap); // depends on control dependency: [for], data = [onesel] } } else { getInstanceList().clear(); // depends on control dependency: [if], data = [none] getInstanceList().addAll(tmpList); // depends on control dependency: [if], data = [none] } } catch (final SQLException e) { throw new EFapsException(InstanceQuery.class, "executeOneCompleteStmt", e); } return ret; } }
public class class_name { protected boolean inPeriod(AlarmRule alarmRule) { String rule = alarmRule.getMatchValue(); if (StringUtils.isEmpty(rule)) { log.info("rule is empty " + alarmRule); return false; } String periods = StringUtils.substringAfterLast(rule, "@"); if (StringUtils.isEmpty(periods)) { // 没有时间要求,则任务在报警时间段内 return isInPeriodWhenNoPeriod(); } Calendar calendar = currentCalendar(); periods = StringUtils.trim(periods); for (String period : StringUtils.split(periods, ",")) { String[] startAndEnd = StringUtils.split(period, "-"); if (startAndEnd == null || startAndEnd.length != 2) { log.error("error period time format in rule : " + alarmRule); return isInPeriodWhenErrorFormat(); } String start = startAndEnd[0]; String end = startAndEnd[1]; if (checkInPeriod(calendar, start, end)) { log.info("rule is in period : " + alarmRule); return true; } } log.info("rule is not in period : " + alarmRule); return false; } }
public class class_name { protected boolean inPeriod(AlarmRule alarmRule) { String rule = alarmRule.getMatchValue(); if (StringUtils.isEmpty(rule)) { log.info("rule is empty " + alarmRule); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } String periods = StringUtils.substringAfterLast(rule, "@"); if (StringUtils.isEmpty(periods)) { // 没有时间要求,则任务在报警时间段内 return isInPeriodWhenNoPeriod(); // depends on control dependency: [if], data = [none] } Calendar calendar = currentCalendar(); periods = StringUtils.trim(periods); for (String period : StringUtils.split(periods, ",")) { String[] startAndEnd = StringUtils.split(period, "-"); if (startAndEnd == null || startAndEnd.length != 2) { log.error("error period time format in rule : " + alarmRule); // depends on control dependency: [if], data = [none] return isInPeriodWhenErrorFormat(); // depends on control dependency: [if], data = [none] } String start = startAndEnd[0]; String end = startAndEnd[1]; if (checkInPeriod(calendar, start, end)) { log.info("rule is in period : " + alarmRule); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } } log.info("rule is not in period : " + alarmRule); return false; } }
public class class_name { public UnitCellBoundingBox getTranslatedBbs(Vector3d translation) { UnitCellBoundingBox translatedBbs = new UnitCellBoundingBox(numOperatorsSg, numPolyChainsAu); for (int i=0; i<numOperatorsSg; i++) { for (int j = 0;j<numPolyChainsAu; j++) { translatedBbs.chainBbs[i][j] = new BoundingBox(this.chainBbs[i][j]); translatedBbs.chainBbs[i][j].translate(translation); } translatedBbs.auBbs[i] = new BoundingBox(translatedBbs.chainBbs[i]); } return translatedBbs; } }
public class class_name { public UnitCellBoundingBox getTranslatedBbs(Vector3d translation) { UnitCellBoundingBox translatedBbs = new UnitCellBoundingBox(numOperatorsSg, numPolyChainsAu); for (int i=0; i<numOperatorsSg; i++) { for (int j = 0;j<numPolyChainsAu; j++) { translatedBbs.chainBbs[i][j] = new BoundingBox(this.chainBbs[i][j]); // depends on control dependency: [for], data = [j] translatedBbs.chainBbs[i][j].translate(translation); // depends on control dependency: [for], data = [j] } translatedBbs.auBbs[i] = new BoundingBox(translatedBbs.chainBbs[i]); // depends on control dependency: [for], data = [i] } return translatedBbs; } }
public class class_name { public SerIterable createIterable(SerIterable iterable) { List<Class<?>> valueTypeTypes = iterable.valueTypeTypes(); if (valueTypeTypes.size() > 0) { Class<?> valueType = iterable.valueType(); if (NavigableSet.class.isAssignableFrom(valueType)) { return navigableSet(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); } if (SortedSet.class.isAssignableFrom(valueType)) { return sortedSet(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); } if (Set.class.isAssignableFrom(valueType)) { return set(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); } if (Collection.class.isAssignableFrom(valueType)) { // includes List return list(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); } if (NavigableMap.class.isAssignableFrom(valueType)) { if (valueTypeTypes.size() == 2) { return navigableMap(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES); } return navigableMap(Object.class, Object.class, EMPTY_VALUE_TYPES); } if (SortedMap.class.isAssignableFrom(valueType)) { if (valueTypeTypes.size() == 2) { return sortedMap(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES); } return sortedMap(Object.class, Object.class, EMPTY_VALUE_TYPES); } if (Map.class.isAssignableFrom(valueType)) { if (valueTypeTypes.size() == 2) { return map(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES); } return map(Object.class, Object.class, EMPTY_VALUE_TYPES); } if (valueType.isArray()) { if (valueType.getComponentType().isPrimitive()) { return arrayPrimitive(valueType.getComponentType()); } else { return array(valueType.getComponentType()); } } } return null; } }
public class class_name { public SerIterable createIterable(SerIterable iterable) { List<Class<?>> valueTypeTypes = iterable.valueTypeTypes(); if (valueTypeTypes.size() > 0) { Class<?> valueType = iterable.valueType(); if (NavigableSet.class.isAssignableFrom(valueType)) { return navigableSet(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (SortedSet.class.isAssignableFrom(valueType)) { return sortedSet(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (Set.class.isAssignableFrom(valueType)) { return set(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (Collection.class.isAssignableFrom(valueType)) { // includes List return list(valueTypeTypes.get(0), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (NavigableMap.class.isAssignableFrom(valueType)) { if (valueTypeTypes.size() == 2) { return navigableMap(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } return navigableMap(Object.class, Object.class, EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (SortedMap.class.isAssignableFrom(valueType)) { if (valueTypeTypes.size() == 2) { return sortedMap(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } return sortedMap(Object.class, Object.class, EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (Map.class.isAssignableFrom(valueType)) { if (valueTypeTypes.size() == 2) { return map(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } return map(Object.class, Object.class, EMPTY_VALUE_TYPES); // depends on control dependency: [if], data = [none] } if (valueType.isArray()) { if (valueType.getComponentType().isPrimitive()) { return arrayPrimitive(valueType.getComponentType()); // depends on control dependency: [if], data = [none] } else { return array(valueType.getComponentType()); // depends on control dependency: [if], data = [none] } } } return null; } }
public class class_name { public void loadModule(final Class<? extends JsiiModule> moduleClass) { if (!JsiiModule.class.isAssignableFrom(moduleClass)) { throw new JsiiException("Invalid module class " + moduleClass.getName() + ". It must be derived from JsiiModule"); } JsiiModule module; try { module = moduleClass.newInstance(); } catch (IllegalAccessException | InstantiationException e) { throw new JsiiException(e); } if (this.loadedModules.containsKey(module.getModuleName())) { return; } // Load dependencies for (Class<? extends JsiiModule> dep: module.getDependencies()) { loadModule(dep); } this.getClient().loadModule(module); // indicate that it was loaded this.loadedModules.put(module.getModuleName(), module); } }
public class class_name { public void loadModule(final Class<? extends JsiiModule> moduleClass) { if (!JsiiModule.class.isAssignableFrom(moduleClass)) { throw new JsiiException("Invalid module class " + moduleClass.getName() + ". It must be derived from JsiiModule"); } JsiiModule module; try { module = moduleClass.newInstance(); // depends on control dependency: [try], data = [none] } catch (IllegalAccessException | InstantiationException e) { throw new JsiiException(e); } // depends on control dependency: [catch], data = [none] if (this.loadedModules.containsKey(module.getModuleName())) { return; // depends on control dependency: [if], data = [none] } // Load dependencies for (Class<? extends JsiiModule> dep: module.getDependencies()) { loadModule(dep); } this.getClient().loadModule(module); // indicate that it was loaded this.loadedModules.put(module.getModuleName(), module); } }
public class class_name { @CanIgnoreReturnValue public final Ordered containsAtLeastElementsIn(Iterable<?> expectedIterable) { List<?> actual = Lists.newLinkedList(actual()); final Collection<?> expected = iterableToCollection(expectedIterable); List<Object> missing = newArrayList(); List<Object> actualNotInOrder = newArrayList(); boolean ordered = true; // step through the expected elements... for (Object e : expected) { int index = actual.indexOf(e); if (index != -1) { // if we find the element in the actual list... // drain all the elements that come before that element into actualNotInOrder moveElements(actual, actualNotInOrder, index); // and remove the element from the actual list actual.remove(0); } else { // otherwise try removing it from actualNotInOrder... if (actualNotInOrder.remove(e)) { // if it was in actualNotInOrder, we're not in order ordered = false; } else { // if it's not in actualNotInOrder, we're missing an expected element missing.add(e); } } } // if we have any missing expected elements, fail if (!missing.isEmpty()) { return failAtLeast(expected, missing); } /* * TODO(cpovirk): In the NotInOrder case, also include a Fact that shows _only_ the required * elements (that is, without any extras) but in the order they were actually found. That should * make it easier for users to compare the actual order of the required elements to the expected * order. Or, if that's too much trouble, at least try to find a better title for the full * actual iterable than the default of "but was," which may _sound_ like it should show only the * required elements, rather than the full actual iterable. */ return ordered ? IN_ORDER : new Ordered() { @Override public void inOrder() { failWithActual( simpleFact("required elements were all found, but order was wrong"), fact("expected order for required elements", expected)); } }; } }
public class class_name { @CanIgnoreReturnValue public final Ordered containsAtLeastElementsIn(Iterable<?> expectedIterable) { List<?> actual = Lists.newLinkedList(actual()); final Collection<?> expected = iterableToCollection(expectedIterable); List<Object> missing = newArrayList(); List<Object> actualNotInOrder = newArrayList(); boolean ordered = true; // step through the expected elements... for (Object e : expected) { int index = actual.indexOf(e); if (index != -1) { // if we find the element in the actual list... // drain all the elements that come before that element into actualNotInOrder moveElements(actual, actualNotInOrder, index); // depends on control dependency: [if], data = [none] // and remove the element from the actual list actual.remove(0); // depends on control dependency: [if], data = [none] } else { // otherwise try removing it from actualNotInOrder... if (actualNotInOrder.remove(e)) { // if it was in actualNotInOrder, we're not in order ordered = false; // depends on control dependency: [if], data = [none] } else { // if it's not in actualNotInOrder, we're missing an expected element missing.add(e); // depends on control dependency: [if], data = [none] } } } // if we have any missing expected elements, fail if (!missing.isEmpty()) { return failAtLeast(expected, missing); // depends on control dependency: [if], data = [none] } /* * TODO(cpovirk): In the NotInOrder case, also include a Fact that shows _only_ the required * elements (that is, without any extras) but in the order they were actually found. That should * make it easier for users to compare the actual order of the required elements to the expected * order. Or, if that's too much trouble, at least try to find a better title for the full * actual iterable than the default of "but was," which may _sound_ like it should show only the * required elements, rather than the full actual iterable. */ return ordered ? IN_ORDER : new Ordered() { @Override public void inOrder() { failWithActual( simpleFact("required elements were all found, but order was wrong"), fact("expected order for required elements", expected)); } }; } }
public class class_name { public static Channel openChannel(Session session, ChannelType channelType) { final Channel channel = createChannel(session, channelType); try { channel.connect(); } catch (JSchException e) { throw new JschRuntimeException(e); } return channel; } }
public class class_name { public static Channel openChannel(Session session, ChannelType channelType) { final Channel channel = createChannel(session, channelType); try { channel.connect(); // depends on control dependency: [try], data = [none] } catch (JSchException e) { throw new JschRuntimeException(e); } // depends on control dependency: [catch], data = [none] return channel; } }
public class class_name { @Override public synchronized void stop() { if (started) { try { lock.release(); // Closing RandomAccessFile so that files gets deleted on windows and // org.ehcache.internal.persistence.DefaultLocalPersistenceServiceTest.testLocksDirectoryAndUnlocks() // passes on windows rw.close(); if (!lockFile.delete()) { LOGGER.debug("Lock file was not deleted {}.", lockFile.getPath()); } } catch (IOException e) { throw new RuntimeException("Couldn't unlock rootDir: " + rootDirectory.getAbsolutePath(), e); } started = false; LOGGER.debug("RootDirectory Unlocked"); } } }
public class class_name { @Override public synchronized void stop() { if (started) { try { lock.release(); // depends on control dependency: [try], data = [none] // Closing RandomAccessFile so that files gets deleted on windows and // org.ehcache.internal.persistence.DefaultLocalPersistenceServiceTest.testLocksDirectoryAndUnlocks() // passes on windows rw.close(); // depends on control dependency: [try], data = [none] if (!lockFile.delete()) { LOGGER.debug("Lock file was not deleted {}.", lockFile.getPath()); // depends on control dependency: [if], data = [none] } } catch (IOException e) { throw new RuntimeException("Couldn't unlock rootDir: " + rootDirectory.getAbsolutePath(), e); } // depends on control dependency: [catch], data = [none] started = false; // depends on control dependency: [if], data = [none] LOGGER.debug("RootDirectory Unlocked"); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void constructFault(FaultException faultException, Object argument) { //check argument if(argument != null && faultException.getArgument() == null) faultException.setArgument(argument); StackTraceElement stackTraceElement = null; if(faultException.getModule() == null) { if(this.defaultModule != null && this.defaultModule.length() > 0) faultException.setModule(defaultModule); else { stackTraceElement = determineCauseStackTraceElementsn(faultException); if(stackTraceElement != null) faultException.setModule(stackTraceElement.getFileName()); } } if(faultException.getOperation() == null) { if(this.defaultOperation != null && this.defaultOperation.length() > 0) faultException.setOperation(this.defaultOperation); else { if(stackTraceElement == null) stackTraceElement = determineCauseStackTraceElementsn(faultException); if(stackTraceElement != null) faultException.setOperation(stackTraceElement.getMethodName()); //add line number if((faultException.getNotes() == null || faultException.getNotes().length() == 0) && stackTraceElement != null) { faultException.setNotes(lineNumberNotePrefix+stackTraceElement.getLineNumber()); } } } } }
public class class_name { public void constructFault(FaultException faultException, Object argument) { //check argument if(argument != null && faultException.getArgument() == null) faultException.setArgument(argument); StackTraceElement stackTraceElement = null; if(faultException.getModule() == null) { if(this.defaultModule != null && this.defaultModule.length() > 0) faultException.setModule(defaultModule); else { stackTraceElement = determineCauseStackTraceElementsn(faultException); // depends on control dependency: [if], data = [none] if(stackTraceElement != null) faultException.setModule(stackTraceElement.getFileName()); } } if(faultException.getOperation() == null) { if(this.defaultOperation != null && this.defaultOperation.length() > 0) faultException.setOperation(this.defaultOperation); else { if(stackTraceElement == null) stackTraceElement = determineCauseStackTraceElementsn(faultException); if(stackTraceElement != null) faultException.setOperation(stackTraceElement.getMethodName()); //add line number if((faultException.getNotes() == null || faultException.getNotes().length() == 0) && stackTraceElement != null) { faultException.setNotes(lineNumberNotePrefix+stackTraceElement.getLineNumber()); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public final void retractStatement() throws RecognitionException { int retractStatement_StartIndex = input.index(); Token s=null; Token c=null; ParserRuleReturnScope expression9 =null; try { if ( state.backtracking>0 && alreadyParsedRule(input, 91) ) { return; } // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:951:5: (s= 'retract' '(' expression c= ')' ) // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:951:7: s= 'retract' '(' expression c= ')' { s=(Token)match(input,103,FOLLOW_103_in_retractStatement4097); if (state.failed) return; match(input,36,FOLLOW_36_in_retractStatement4099); if (state.failed) return; pushFollow(FOLLOW_expression_in_retractStatement4105); expression9=expression(); state._fsp--; if (state.failed) return; c=(Token)match(input,37,FOLLOW_37_in_retractStatement4115); if (state.failed) return; if ( state.backtracking==0 ) { JavaStatementBlockDescr d = new JavaStatementBlockDescr( (expression9!=null?input.toString(expression9.start,expression9.stop):null), JavaBlockDescr.BlockType.DELETE ); d.setStart( ((CommonToken)s).getStartIndex() ); this.addBlockDescr( d ); d.setEnd( ((CommonToken)c).getStopIndex() ); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving if ( state.backtracking>0 ) { memoize(input, 91, retractStatement_StartIndex); } } } }
public class class_name { public final void retractStatement() throws RecognitionException { int retractStatement_StartIndex = input.index(); Token s=null; Token c=null; ParserRuleReturnScope expression9 =null; try { if ( state.backtracking>0 && alreadyParsedRule(input, 91) ) { return; } // depends on control dependency: [if], data = [none] // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:951:5: (s= 'retract' '(' expression c= ')' ) // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:951:7: s= 'retract' '(' expression c= ')' { s=(Token)match(input,103,FOLLOW_103_in_retractStatement4097); if (state.failed) return; match(input,36,FOLLOW_36_in_retractStatement4099); if (state.failed) return; pushFollow(FOLLOW_expression_in_retractStatement4105); expression9=expression(); state._fsp--; if (state.failed) return; c=(Token)match(input,37,FOLLOW_37_in_retractStatement4115); if (state.failed) return; if ( state.backtracking==0 ) { JavaStatementBlockDescr d = new JavaStatementBlockDescr( (expression9!=null?input.toString(expression9.start,expression9.stop):null), JavaBlockDescr.BlockType.DELETE ); d.setStart( ((CommonToken)s).getStartIndex() ); // depends on control dependency: [if], data = [none] this.addBlockDescr( d ); // depends on control dependency: [if], data = [none] d.setEnd( ((CommonToken)c).getStopIndex() ); // depends on control dependency: [if], data = [none] } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving if ( state.backtracking>0 ) { memoize(input, 91, retractStatement_StartIndex); } // depends on control dependency: [if], data = [none] } } }
public class class_name { public static <T> CsvWriterDSL<T> from(Type type) { ClassMeta<T> classMeta = ReflectionService.newInstance().getClassMeta(type); CellWriter cellWriter = CsvCellWriter.DEFAULT_WRITER; CsvWriterBuilder<T> builder = CsvWriterBuilder .newBuilder(classMeta, cellWriter); MapperConfig<CsvColumnKey, ?> mapperConfig = MapperConfig.<CsvColumnKey, CsvRow>fieldMapperConfig(); try { String[] headers = defaultHeaders(classMeta); for(String header : headers) { builder.addColumn(header); } ContextualSourceFieldMapperImpl<T, Appendable> mapper = builder.mapper(); return new DefaultCsvWriterDSL<T>( CsvWriter.<T>toColumnDefinitions(headers), cellWriter, mapper, classMeta, mapperConfig, false); } catch (UnsupportedOperationException e) { return new NoColumnCsvWriterDSL<T>( cellWriter, classMeta, mapperConfig, false); } } }
public class class_name { public static <T> CsvWriterDSL<T> from(Type type) { ClassMeta<T> classMeta = ReflectionService.newInstance().getClassMeta(type); CellWriter cellWriter = CsvCellWriter.DEFAULT_WRITER; CsvWriterBuilder<T> builder = CsvWriterBuilder .newBuilder(classMeta, cellWriter); MapperConfig<CsvColumnKey, ?> mapperConfig = MapperConfig.<CsvColumnKey, CsvRow>fieldMapperConfig(); try { String[] headers = defaultHeaders(classMeta); for(String header : headers) { builder.addColumn(header); // depends on control dependency: [for], data = [header] } ContextualSourceFieldMapperImpl<T, Appendable> mapper = builder.mapper(); return new DefaultCsvWriterDSL<T>( CsvWriter.<T>toColumnDefinitions(headers), cellWriter, mapper, classMeta, mapperConfig, false); // depends on control dependency: [try], data = [none] } catch (UnsupportedOperationException e) { return new NoColumnCsvWriterDSL<T>( cellWriter, classMeta, mapperConfig, false); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static String replace(final String value, final String search, final String newValue, final boolean caseSensitive) { validate(value, NULL_STRING_PREDICATE, NULL_STRING_MSG_SUPPLIER); validate(search, NULL_STRING_PREDICATE, NULL_STRING_MSG_SUPPLIER); if (caseSensitive) { return value.replace(search, newValue); } return Pattern.compile(search, Pattern.CASE_INSENSITIVE).matcher(value) .replaceAll(Matcher.quoteReplacement(newValue)); } }
public class class_name { public static String replace(final String value, final String search, final String newValue, final boolean caseSensitive) { validate(value, NULL_STRING_PREDICATE, NULL_STRING_MSG_SUPPLIER); validate(search, NULL_STRING_PREDICATE, NULL_STRING_MSG_SUPPLIER); if (caseSensitive) { return value.replace(search, newValue); // depends on control dependency: [if], data = [none] } return Pattern.compile(search, Pattern.CASE_INSENSITIVE).matcher(value) .replaceAll(Matcher.quoteReplacement(newValue)); } }
public class class_name { public synchronized boolean log(WsByteBuffer data) { if (null == data) { // return failure return false; } // if we've stopped then there is no worker to hand this to; however, the // caller does not expect to have to release buffers handed to the // logger so do that here first if (State.RUNNING != this.state) { data.release(); return false; } int length = data.remaining(); if (isOverFileLimit(length)) { rotate(); } boolean rc = true; int bytesWritten = 0; try { ByteBuffer buffer = data.getWrappedByteBuffer(); while (bytesWritten < length) { bytesWritten += this.myChannel.write(buffer); } } catch (Exception e) { FFDCFilter.processException(e, getClass().getName() + ".log", "166", this); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, getFileName() + ": error writing to log; " + e); } rc = false; } finally { data.release(); } return rc; } }
public class class_name { public synchronized boolean log(WsByteBuffer data) { if (null == data) { // return failure return false; // depends on control dependency: [if], data = [none] } // if we've stopped then there is no worker to hand this to; however, the // caller does not expect to have to release buffers handed to the // logger so do that here first if (State.RUNNING != this.state) { data.release(); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } int length = data.remaining(); if (isOverFileLimit(length)) { rotate(); // depends on control dependency: [if], data = [none] } boolean rc = true; int bytesWritten = 0; try { ByteBuffer buffer = data.getWrappedByteBuffer(); while (bytesWritten < length) { bytesWritten += this.myChannel.write(buffer); // depends on control dependency: [while], data = [none] } } catch (Exception e) { FFDCFilter.processException(e, getClass().getName() + ".log", "166", this); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, getFileName() + ": error writing to log; " + e); // depends on control dependency: [if], data = [none] } rc = false; } finally { // depends on control dependency: [catch], data = [none] data.release(); } return rc; } }
public class class_name { private List<Function> getAtomFrom(IQTree tree, Queue<RuleHead> heads, Map<QueryNode, DataAtom> subQueryProjectionAtoms, boolean isNested) { List<Function> body = new ArrayList<>(); /* * Basic Atoms */ final QueryNode node = tree.getRootNode(); if (node instanceof ConstructionNode) { ConstructionNode constructionNode = (ConstructionNode) node; DataAtom projectionAtom = Optional.ofNullable( subQueryProjectionAtoms.get(constructionNode)) .orElseGet(() -> generateProjectionAtom(constructionNode.getVariables())); heads.add(new RuleHead(constructionNode.getSubstitution(), projectionAtom, Optional.of(((UnaryIQTree)tree).getChild()))); subQueryProjectionAtoms.put(constructionNode, projectionAtom); Function mutAt = immutabilityTools.convertToMutableFunction(projectionAtom); body.add(mutAt); return body; } else if (node instanceof FilterNode) { ImmutableExpression filter = ((FilterNode) node).getFilterCondition(); List<IQTree> children = tree.getChildren(); body.addAll(getAtomFrom(children.get(0), heads, subQueryProjectionAtoms, true)); filter.flattenAND().stream() .map(immutabilityTools::convertToMutableBooleanExpression) .forEach(body::add); return body; } else if (node instanceof DataNode) { DataAtom atom = ((DataNode)node).getProjectionAtom(); Function mutAt = immutabilityTools.convertToMutableFunction(atom); body.add(mutAt); return body; /** * Nested Atoms */ } else if (node instanceof InnerJoinNode) { return getAtomsFromJoinNode((InnerJoinNode)node, tree, heads, subQueryProjectionAtoms, isNested); } else if (node instanceof LeftJoinNode) { Optional<ImmutableExpression> filter = ((LeftJoinNode)node).getOptionalFilterCondition(); BinaryNonCommutativeIQTree ljTree = (BinaryNonCommutativeIQTree) tree; List<Function> atomsListLeft = getAtomFrom(ljTree.getLeftChild(), heads, subQueryProjectionAtoms, true); List<Function> atomsListRight = getAtomFrom(ljTree.getRightChild(), heads, subQueryProjectionAtoms, true); if (filter.isPresent()){ ImmutableExpression filter2 = filter.get(); Expression mutFilter = immutabilityTools.convertToMutableBooleanExpression(filter2); Function newLJAtom = datalogFactory.getSPARQLLeftJoin(atomsListLeft, atomsListRight, Optional.of(mutFilter)); body.add(newLJAtom); return body; }else{ Function newLJAtom = datalogFactory.getSPARQLLeftJoin(atomsListLeft, atomsListRight, Optional.empty()); body.add(newLJAtom); return body; } } else if (node instanceof UnionNode) { // Optional<ConstructionNode> parentNode = te.getParent(node) // .filter(p -> p instanceof ConstructionNode) // .map(p -> (ConstructionNode) p); // // DistinctVariableOnlyDataAtom freshHeadAtom; // if(parentNode.isPresent()) { // freshHeadAtom = generateProjectionAtom(parentNode.get().getChildVariables()); // } // else{ // freshHeadAtom = generateProjectionAtom(((UnionNode) node).getVariables()); // } DistinctVariableOnlyDataAtom freshHeadAtom = generateProjectionAtom(((UnionNode) node).getVariables()); for (IQTree child : tree.getChildren()) { QueryNode childRoot = child.getRootNode(); if (childRoot instanceof ConstructionNode) { ConstructionNode cn = (ConstructionNode) childRoot; Optional<IQTree> grandChild = Optional.of(((UnaryIQTree)child).getChild()); subQueryProjectionAtoms.put(cn, freshHeadAtom); heads.add(new RuleHead(cn.getSubstitution(), freshHeadAtom, grandChild)); } else { ConstructionNode cn = iqFactory.createConstructionNode(((UnionNode) node).getVariables()); subQueryProjectionAtoms.put(cn, freshHeadAtom); heads.add(new RuleHead(cn.getSubstitution(), freshHeadAtom, Optional.of(child))); } } //end for Function bodyAtom = immutabilityTools.convertToMutableFunction(freshHeadAtom); body.add(bodyAtom); return body; } else if (node instanceof TrueNode) { /** * * TODO: what should we do when it is the left child of a LJ? * * Add a 0-ary atom */ //DataAtom projectionAtom = generateProjectionAtom(ImmutableSet.of()); //heads.add(new RuleHead(new ImmutableSubstitutionImpl<>(ImmutableMap.of()), projectionAtom,Optional.empty())); //return body; if (isNested) { body.add(termFactory.getFunction( datalogFactory.getDummyPredicate(++dummyPredCounter), new ArrayList<>())); } // Otherwise, ignores it return body; } else { throw new UnsupportedOperationException("Unexpected type of node in the intermediate tree: " + node); } } }
public class class_name { private List<Function> getAtomFrom(IQTree tree, Queue<RuleHead> heads, Map<QueryNode, DataAtom> subQueryProjectionAtoms, boolean isNested) { List<Function> body = new ArrayList<>(); /* * Basic Atoms */ final QueryNode node = tree.getRootNode(); if (node instanceof ConstructionNode) { ConstructionNode constructionNode = (ConstructionNode) node; DataAtom projectionAtom = Optional.ofNullable( subQueryProjectionAtoms.get(constructionNode)) .orElseGet(() -> generateProjectionAtom(constructionNode.getVariables())); heads.add(new RuleHead(constructionNode.getSubstitution(), projectionAtom, Optional.of(((UnaryIQTree)tree).getChild()))); // depends on control dependency: [if], data = [none] subQueryProjectionAtoms.put(constructionNode, projectionAtom); // depends on control dependency: [if], data = [none] Function mutAt = immutabilityTools.convertToMutableFunction(projectionAtom); body.add(mutAt); // depends on control dependency: [if], data = [none] return body; // depends on control dependency: [if], data = [none] } else if (node instanceof FilterNode) { ImmutableExpression filter = ((FilterNode) node).getFilterCondition(); List<IQTree> children = tree.getChildren(); body.addAll(getAtomFrom(children.get(0), heads, subQueryProjectionAtoms, true)); // depends on control dependency: [if], data = [none] filter.flattenAND().stream() .map(immutabilityTools::convertToMutableBooleanExpression) .forEach(body::add); // depends on control dependency: [if], data = [none] return body; // depends on control dependency: [if], data = [none] } else if (node instanceof DataNode) { DataAtom atom = ((DataNode)node).getProjectionAtom(); Function mutAt = immutabilityTools.convertToMutableFunction(atom); body.add(mutAt); // depends on control dependency: [if], data = [none] return body; // depends on control dependency: [if], data = [none] /** * Nested Atoms */ } else if (node instanceof InnerJoinNode) { return getAtomsFromJoinNode((InnerJoinNode)node, tree, heads, subQueryProjectionAtoms, isNested); // depends on control dependency: [if], data = [none] } else if (node instanceof LeftJoinNode) { Optional<ImmutableExpression> filter = ((LeftJoinNode)node).getOptionalFilterCondition(); BinaryNonCommutativeIQTree ljTree = (BinaryNonCommutativeIQTree) tree; List<Function> atomsListLeft = getAtomFrom(ljTree.getLeftChild(), heads, subQueryProjectionAtoms, true); List<Function> atomsListRight = getAtomFrom(ljTree.getRightChild(), heads, subQueryProjectionAtoms, true); if (filter.isPresent()){ ImmutableExpression filter2 = filter.get(); Expression mutFilter = immutabilityTools.convertToMutableBooleanExpression(filter2); Function newLJAtom = datalogFactory.getSPARQLLeftJoin(atomsListLeft, atomsListRight, Optional.of(mutFilter)); body.add(newLJAtom); // depends on control dependency: [if], data = [none] return body; // depends on control dependency: [if], data = [none] }else{ Function newLJAtom = datalogFactory.getSPARQLLeftJoin(atomsListLeft, atomsListRight, Optional.empty()); body.add(newLJAtom); // depends on control dependency: [if], data = [none] return body; // depends on control dependency: [if], data = [none] } } else if (node instanceof UnionNode) { // Optional<ConstructionNode> parentNode = te.getParent(node) // .filter(p -> p instanceof ConstructionNode) // .map(p -> (ConstructionNode) p); // // DistinctVariableOnlyDataAtom freshHeadAtom; // if(parentNode.isPresent()) { // freshHeadAtom = generateProjectionAtom(parentNode.get().getChildVariables()); // } // else{ // freshHeadAtom = generateProjectionAtom(((UnionNode) node).getVariables()); // } DistinctVariableOnlyDataAtom freshHeadAtom = generateProjectionAtom(((UnionNode) node).getVariables()); for (IQTree child : tree.getChildren()) { QueryNode childRoot = child.getRootNode(); if (childRoot instanceof ConstructionNode) { ConstructionNode cn = (ConstructionNode) childRoot; Optional<IQTree> grandChild = Optional.of(((UnaryIQTree)child).getChild()); subQueryProjectionAtoms.put(cn, freshHeadAtom); // depends on control dependency: [if], data = [none] heads.add(new RuleHead(cn.getSubstitution(), freshHeadAtom, grandChild)); // depends on control dependency: [if], data = [none] } else { ConstructionNode cn = iqFactory.createConstructionNode(((UnionNode) node).getVariables()); subQueryProjectionAtoms.put(cn, freshHeadAtom); // depends on control dependency: [if], data = [none] heads.add(new RuleHead(cn.getSubstitution(), freshHeadAtom, Optional.of(child))); // depends on control dependency: [if], data = [none] } } //end for Function bodyAtom = immutabilityTools.convertToMutableFunction(freshHeadAtom); body.add(bodyAtom); // depends on control dependency: [if], data = [none] return body; // depends on control dependency: [if], data = [none] } else if (node instanceof TrueNode) { /** * * TODO: what should we do when it is the left child of a LJ? * * Add a 0-ary atom */ //DataAtom projectionAtom = generateProjectionAtom(ImmutableSet.of()); //heads.add(new RuleHead(new ImmutableSubstitutionImpl<>(ImmutableMap.of()), projectionAtom,Optional.empty())); //return body; if (isNested) { body.add(termFactory.getFunction( datalogFactory.getDummyPredicate(++dummyPredCounter), new ArrayList<>())); // depends on control dependency: [if], data = [none] } // Otherwise, ignores it return body; // depends on control dependency: [if], data = [none] } else { throw new UnsupportedOperationException("Unexpected type of node in the intermediate tree: " + node); } } }
public class class_name { public void sendNotification(final Notification notification) { ContextAwareTimer.Context timer = this.notificationTimer.time(); if(!this.notificationTargets.isEmpty()) { for (final Map.Entry<UUID, Function<Notification, Void>> entry : this.notificationTargets.entrySet()) { try { entry.getValue().apply(notification); } catch (RuntimeException exception) { LOG.warn("RuntimeException when running notification target. Skipping.", exception); } } } if(getParent().isPresent()) { getParent().get().sendNotification(notification); } timer.stop(); } }
public class class_name { public void sendNotification(final Notification notification) { ContextAwareTimer.Context timer = this.notificationTimer.time(); if(!this.notificationTargets.isEmpty()) { for (final Map.Entry<UUID, Function<Notification, Void>> entry : this.notificationTargets.entrySet()) { try { entry.getValue().apply(notification); // depends on control dependency: [try], data = [none] } catch (RuntimeException exception) { LOG.warn("RuntimeException when running notification target. Skipping.", exception); } // depends on control dependency: [catch], data = [none] } } if(getParent().isPresent()) { getParent().get().sendNotification(notification); // depends on control dependency: [if], data = [none] } timer.stop(); } }
public class class_name { private static URL getUrlForResourceAsStream(final ExternalContext externalContext, String path) throws MalformedURLException { URLStreamHandler handler = new URLStreamHandler() { protected URLConnection openConnection(URL u) throws IOException { final String file = u.getFile(); return new URLConnection(u) { public void connect() throws IOException { } public InputStream getInputStream() throws IOException { if (log.isLoggable(Level.FINE)) { log.fine("Opening internal url to " + file); } Object ctx = externalContext.getContext(); // Or maybe fetch the external context afresh ? // Object ctx = // FacesContext.getCurrentInstance().getExternalContext().getContext(); if (ctx instanceof ServletContext) { ServletContext servletContext = (ServletContext) ctx; InputStream stream = servletContext.getResourceAsStream(file); if (stream == null) { throw new FileNotFoundException("Cannot open resource " + file); } return stream; } else { throw new IOException("Cannot open resource for an context of " + (ctx != null ? ctx.getClass() : null)); } } }; } }; return new URL("internal", null, 0, path, handler); } }
public class class_name { private static URL getUrlForResourceAsStream(final ExternalContext externalContext, String path) throws MalformedURLException { URLStreamHandler handler = new URLStreamHandler() { protected URLConnection openConnection(URL u) throws IOException { final String file = u.getFile(); return new URLConnection(u) { public void connect() throws IOException { } public InputStream getInputStream() throws IOException { if (log.isLoggable(Level.FINE)) { log.fine("Opening internal url to " + file); // depends on control dependency: [if], data = [none] } Object ctx = externalContext.getContext(); // Or maybe fetch the external context afresh ? // Object ctx = // FacesContext.getCurrentInstance().getExternalContext().getContext(); if (ctx instanceof ServletContext) { ServletContext servletContext = (ServletContext) ctx; InputStream stream = servletContext.getResourceAsStream(file); if (stream == null) { throw new FileNotFoundException("Cannot open resource " + file); } return stream; // depends on control dependency: [if], data = [none] } else { throw new IOException("Cannot open resource for an context of " + (ctx != null ? ctx.getClass() : null)); } } }; } }; return new URL("internal", null, 0, path, handler); } }
public class class_name { public Request deleteByQueryAsync(@NonNull Query query, CompletionHandler completionHandler) { final WriteTransaction transaction = newTransaction(); final Query queryCopy = new Query(query); return getClient().new AsyncTaskRequest(completionHandler, getClient().transactionExecutorService) { @NonNull @Override protected JSONObject run() throws AlgoliaException { try { Collection<String> deletedObjectIDs = deleteByQuerySync(queryCopy, transaction); transaction.commitSync(); return new JSONObject() .put("objectIDs", new JSONArray(deletedObjectIDs)) .put("updatedAt", DateUtils.iso8601String(new Date())) .put("taskID", transaction.id); } catch (JSONException e) { throw new RuntimeException(e); // should never happen } } }.start(); } }
public class class_name { public Request deleteByQueryAsync(@NonNull Query query, CompletionHandler completionHandler) { final WriteTransaction transaction = newTransaction(); final Query queryCopy = new Query(query); return getClient().new AsyncTaskRequest(completionHandler, getClient().transactionExecutorService) { @NonNull @Override protected JSONObject run() throws AlgoliaException { try { Collection<String> deletedObjectIDs = deleteByQuerySync(queryCopy, transaction); transaction.commitSync(); // depends on control dependency: [try], data = [none] return new JSONObject() .put("objectIDs", new JSONArray(deletedObjectIDs)) .put("updatedAt", DateUtils.iso8601String(new Date())) .put("taskID", transaction.id); // depends on control dependency: [try], data = [none] } catch (JSONException e) { throw new RuntimeException(e); // should never happen } // depends on control dependency: [catch], data = [none] } }.start(); } }
public class class_name { private void populateAliasDestinations(Map<String, Object> properties, HashMap<String, BaseDestination> destinationList, ConfigurationAdmin configAdmin) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "populateAliasDestinations", new Object[] { properties, destinationList, configAdmin }); } String[] aliasDestinations = (String[]) properties.get(JsAdminConstants.ALIAS); if (aliasDestinations != null) { for (String aliasDestinationPid : aliasDestinations) { pids.add(aliasDestinationPid); Configuration config = null; try { config = configAdmin.getConfiguration(aliasDestinationPid, bundleLocation); } catch (IOException e) { SibTr.exception(tc, e); FFDCFilter.processException(e, this.getClass().getName(), "561", this); } Dictionary aliasDestinationProperties = config.getProperties(); AliasDestination aliasDest = new AliasDestinationImpl(); String aliasDestinationName = (String) aliasDestinationProperties.get(JsAdminConstants.ID); String targetDestinationName = (String) aliasDestinationProperties.get(JsAdminConstants.TARGETDESTINATION); if (destinationList.containsKey(aliasDestinationName)) { SibTr.error(tc, "ALIAS_SAME_DEST_ID_SIAS0125", new Object[] { aliasDestinationName }); continue; } if (aliasDestinationName != null && !aliasDestinationName.toString().trim() .isEmpty()) { if (targetDestinationName == null || targetDestinationName.toString().trim() .isEmpty()) { SibTr.error(tc, "INVALID_TARGET_DEST_SIAS0110", new Object[] { aliasDestinationProperties.get(JsAdminConstants.ID) }); continue; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.debug(this, tc, "Destination ID : " + aliasDestinationProperties.get(JsAdminConstants.ID)); } // set the name of the queue.Here ID is considered as the name aliasDest.setName(aliasDestinationName); // set the target destination aliasDest.setTargetDestination(targetDestinationName); //here local is false and alias is true as we are negotiating the destination //of type Alias and not Queue or Topic aliasDest.setLocal(false); aliasDest.setAlias(true); // set overrideOfQOSByProducerAllowed String forceReliablility = (String) aliasDestinationProperties.get(JsAdminConstants.FORCERELIABILITY); aliasDest.setDefaultReliability(forceReliablility); aliasDest.setMaximumReliability(forceReliablility); String sendAllowed = "false"; String receiveAllowed = "false"; if (destinationList.get(targetDestinationName) instanceof SIBDestination) { SIBDestination targetDestination = (SIBDestination) destinationList.get(targetDestinationName); if (targetDestination.isSendAllowed()) { sendAllowed = ((String) aliasDestinationProperties .get(JsAdminConstants.SENDALLOWED)); } receiveAllowed = String.valueOf(targetDestination.isReceiveAllowed()); } aliasDest.setSendAllowed(sendAllowed); aliasDest.setReceiveAllowed(receiveAllowed); } else { SibTr.error(tc, "NO_ID_PROVIDED_SIAS0102", new Object[] { JsAdminConstants.ALIAS }); continue; } destinationList.put(aliasDest.getName(), aliasDest); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "populateAliasDestinations", new Object[] { destinationList }); } } }
public class class_name { private void populateAliasDestinations(Map<String, Object> properties, HashMap<String, BaseDestination> destinationList, ConfigurationAdmin configAdmin) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "populateAliasDestinations", new Object[] { properties, destinationList, configAdmin }); // depends on control dependency: [if], data = [none] } String[] aliasDestinations = (String[]) properties.get(JsAdminConstants.ALIAS); if (aliasDestinations != null) { for (String aliasDestinationPid : aliasDestinations) { pids.add(aliasDestinationPid); // depends on control dependency: [for], data = [aliasDestinationPid] Configuration config = null; try { config = configAdmin.getConfiguration(aliasDestinationPid, bundleLocation); // depends on control dependency: [try], data = [none] } catch (IOException e) { SibTr.exception(tc, e); FFDCFilter.processException(e, this.getClass().getName(), "561", this); } // depends on control dependency: [catch], data = [none] Dictionary aliasDestinationProperties = config.getProperties(); AliasDestination aliasDest = new AliasDestinationImpl(); String aliasDestinationName = (String) aliasDestinationProperties.get(JsAdminConstants.ID); String targetDestinationName = (String) aliasDestinationProperties.get(JsAdminConstants.TARGETDESTINATION); if (destinationList.containsKey(aliasDestinationName)) { SibTr.error(tc, "ALIAS_SAME_DEST_ID_SIAS0125", new Object[] { aliasDestinationName }); // depends on control dependency: [if], data = [none] continue; } if (aliasDestinationName != null && !aliasDestinationName.toString().trim() .isEmpty()) { if (targetDestinationName == null || targetDestinationName.toString().trim() .isEmpty()) { SibTr.error(tc, "INVALID_TARGET_DEST_SIAS0110", new Object[] { aliasDestinationProperties.get(JsAdminConstants.ID) }); // depends on control dependency: [if], data = [none] continue; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.debug(this, tc, "Destination ID : " + aliasDestinationProperties.get(JsAdminConstants.ID)); // depends on control dependency: [if], data = [none] } // set the name of the queue.Here ID is considered as the name aliasDest.setName(aliasDestinationName); // depends on control dependency: [if], data = [(aliasDestinationName] // set the target destination aliasDest.setTargetDestination(targetDestinationName); // depends on control dependency: [if], data = [none] //here local is false and alias is true as we are negotiating the destination //of type Alias and not Queue or Topic aliasDest.setLocal(false); // depends on control dependency: [if], data = [none] aliasDest.setAlias(true); // depends on control dependency: [if], data = [none] // set overrideOfQOSByProducerAllowed String forceReliablility = (String) aliasDestinationProperties.get(JsAdminConstants.FORCERELIABILITY); aliasDest.setDefaultReliability(forceReliablility); // depends on control dependency: [if], data = [none] aliasDest.setMaximumReliability(forceReliablility); // depends on control dependency: [if], data = [none] String sendAllowed = "false"; String receiveAllowed = "false"; if (destinationList.get(targetDestinationName) instanceof SIBDestination) { SIBDestination targetDestination = (SIBDestination) destinationList.get(targetDestinationName); if (targetDestination.isSendAllowed()) { sendAllowed = ((String) aliasDestinationProperties .get(JsAdminConstants.SENDALLOWED)); // depends on control dependency: [if], data = [none] } receiveAllowed = String.valueOf(targetDestination.isReceiveAllowed()); // depends on control dependency: [if], data = [none] } aliasDest.setSendAllowed(sendAllowed); // depends on control dependency: [if], data = [none] aliasDest.setReceiveAllowed(receiveAllowed); // depends on control dependency: [if], data = [none] } else { SibTr.error(tc, "NO_ID_PROVIDED_SIAS0102", new Object[] { JsAdminConstants.ALIAS }); // depends on control dependency: [if], data = [none] continue; } destinationList.put(aliasDest.getName(), aliasDest); // depends on control dependency: [for], data = [none] } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "populateAliasDestinations", new Object[] { destinationList }); // depends on control dependency: [if], data = [none] } } }
public class class_name { private static boolean checkForMisplacedBindingAnnotations(Member member, Errors errors) { Annotation misplacedBindingAnnotation = Annotations.findBindingAnnotation( errors, member, ((AnnotatedElement) member).getAnnotations()); if (misplacedBindingAnnotation == null) { return false; } // don't warn about misplaced binding annotations on methods when there's a field with the same // name. In Scala, fields always get accessor methods (that we need to ignore). See bug 242. if (member instanceof Method) { try { if (member.getDeclaringClass().getDeclaredField(member.getName()) != null) { return false; } } catch (NoSuchFieldException ignore) { } } errors.misplacedBindingAnnotation(member, misplacedBindingAnnotation); return true; } }
public class class_name { private static boolean checkForMisplacedBindingAnnotations(Member member, Errors errors) { Annotation misplacedBindingAnnotation = Annotations.findBindingAnnotation( errors, member, ((AnnotatedElement) member).getAnnotations()); if (misplacedBindingAnnotation == null) { return false; // depends on control dependency: [if], data = [none] } // don't warn about misplaced binding annotations on methods when there's a field with the same // name. In Scala, fields always get accessor methods (that we need to ignore). See bug 242. if (member instanceof Method) { try { if (member.getDeclaringClass().getDeclaredField(member.getName()) != null) { return false; // depends on control dependency: [if], data = [none] } } catch (NoSuchFieldException ignore) { } // depends on control dependency: [catch], data = [none] } errors.misplacedBindingAnnotation(member, misplacedBindingAnnotation); return true; } }
public class class_name { @Override public void detachView() { detachView(true); if (viewRef != null) { viewRef.clear(); viewRef = null; } } }
public class class_name { @Override public void detachView() { detachView(true); if (viewRef != null) { viewRef.clear(); // depends on control dependency: [if], data = [none] viewRef = null; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void enableServletCacheStats() { final String methodName = "enableServletCacheStats"; try { if (null == _statsInstance) { _iCacheType = TYPE_SERVLET; if (tc.isDebugEnabled()) Tr.debug(tc, methodName + " Creating statistic for " + _sCacheName + " instance (servlet cache)"); _statsInstance = StatsFactory.createStatsInstance(WSDynamicCacheStats.SERVLET_CACHE_TYPE_PREFIX + _sCacheName, _cacheRootStatsGroup, null, this); if (_diskOffloadEnabled) { if (_cacheDiskStatsGroup == null) { if (tc.isDebugEnabled()) Tr.debug(tc, methodName + " Creating disk group" + " for cacheName=" + _sCacheName); _cacheDiskStatsGroup = StatsFactory.createStatsGroup(WSDynamicCacheStats.DISK_GROUP, templateCount13, _statsInstance, null, this); } _csmDisk = new CacheStatsModule(_sCacheName, WSDynamicCacheStats.DISK_OFFLOAD_ENABLED, _cacheDiskStatsGroup, this); } } } catch (StatsFactoryException e) { com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.cache.stat.CacheStatsModule", "198", this); if (tc.isDebugEnabled()) Tr.debug(tc, methodName + " Exception while enabling servlet cache stats template - cacheName=" + _sCacheName + ": " + ExceptionUtility.getStackTrace(e)); } } }
public class class_name { public void enableServletCacheStats() { final String methodName = "enableServletCacheStats"; try { if (null == _statsInstance) { _iCacheType = TYPE_SERVLET; // depends on control dependency: [if], data = [none] if (tc.isDebugEnabled()) Tr.debug(tc, methodName + " Creating statistic for " + _sCacheName + " instance (servlet cache)"); _statsInstance = StatsFactory.createStatsInstance(WSDynamicCacheStats.SERVLET_CACHE_TYPE_PREFIX + _sCacheName, _cacheRootStatsGroup, null, this); // depends on control dependency: [if], data = [none] if (_diskOffloadEnabled) { if (_cacheDiskStatsGroup == null) { if (tc.isDebugEnabled()) Tr.debug(tc, methodName + " Creating disk group" + " for cacheName=" + _sCacheName); _cacheDiskStatsGroup = StatsFactory.createStatsGroup(WSDynamicCacheStats.DISK_GROUP, templateCount13, _statsInstance, null, this); // depends on control dependency: [if], data = [none] } _csmDisk = new CacheStatsModule(_sCacheName, WSDynamicCacheStats.DISK_OFFLOAD_ENABLED, _cacheDiskStatsGroup, this); // depends on control dependency: [if], data = [none] } } } catch (StatsFactoryException e) { com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.cache.stat.CacheStatsModule", "198", this); if (tc.isDebugEnabled()) Tr.debug(tc, methodName + " Exception while enabling servlet cache stats template - cacheName=" + _sCacheName + ": " + ExceptionUtility.getStackTrace(e)); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") protected <T> T map(Config config, String path, String name, Class<T> clazz) { T instance = newInstance(config, name, clazz); // Map config property names to bean properties. Map<String, String> propertyNames = new HashMap<>(); for (Map.Entry<String, ConfigValue> configProp : config.root().entrySet()) { String originalName = configProp.getKey(); String camelName = toCamelCase(originalName); // if a setting is in there both as some hyphen name and the camel name, // the camel one wins if (!propertyNames.containsKey(camelName) || originalName.equals(camelName)) { propertyNames.put(camelName, originalName); } } // First use setters and then fall back to fields. mapSetters(instance, clazz, path, name, propertyNames, config); mapFields(instance, clazz, path, name, propertyNames, config); // If any properties present in the configuration were not found on config beans, throw an exception. if (!propertyNames.isEmpty()) { checkRemainingProperties(propertyNames.keySet(), describeProperties(instance), toPath(path, name), clazz); } return instance; } }
public class class_name { @SuppressWarnings("unchecked") protected <T> T map(Config config, String path, String name, Class<T> clazz) { T instance = newInstance(config, name, clazz); // Map config property names to bean properties. Map<String, String> propertyNames = new HashMap<>(); for (Map.Entry<String, ConfigValue> configProp : config.root().entrySet()) { String originalName = configProp.getKey(); String camelName = toCamelCase(originalName); // if a setting is in there both as some hyphen name and the camel name, // the camel one wins if (!propertyNames.containsKey(camelName) || originalName.equals(camelName)) { propertyNames.put(camelName, originalName); // depends on control dependency: [if], data = [none] } } // First use setters and then fall back to fields. mapSetters(instance, clazz, path, name, propertyNames, config); mapFields(instance, clazz, path, name, propertyNames, config); // If any properties present in the configuration were not found on config beans, throw an exception. if (!propertyNames.isEmpty()) { checkRemainingProperties(propertyNames.keySet(), describeProperties(instance), toPath(path, name), clazz); // depends on control dependency: [if], data = [none] } return instance; } }
public class class_name { public static void checkQuery(Query<?> query, Set<String> whitelist, Set<String> blacklist, InjectionManager manager) { ResourceInfo resource = manager.getInstance(ResourceInfo.class); Class<?> rc = resource.getResourceClass(); Set<String> wl = null, bl = null; if (rc != null) { Filter filter = rc.getAnnotation(Filter.class); if (filter != null) { if (filter.whitelist().length > 0) { wl = Sets.newLinkedHashSet(); Collections.addAll(wl, filter.whitelist()); } if (filter.blacklist().length > 0) { bl = Sets.newLinkedHashSet(); Collections.addAll(bl, filter.blacklist()); } } } if (whitelist != null) { if (wl == null) { wl = Sets.newLinkedHashSet(); } wl.addAll(whitelist); } if (blacklist != null) { if (bl == null) { bl = Sets.newLinkedHashSet(); } bl.addAll(blacklist); } checkQuery((SpiQuery) query, wl, bl, manager.getInstance(Application.Mode.class).isProd()); } }
public class class_name { public static void checkQuery(Query<?> query, Set<String> whitelist, Set<String> blacklist, InjectionManager manager) { ResourceInfo resource = manager.getInstance(ResourceInfo.class); Class<?> rc = resource.getResourceClass(); Set<String> wl = null, bl = null; if (rc != null) { Filter filter = rc.getAnnotation(Filter.class); if (filter != null) { if (filter.whitelist().length > 0) { wl = Sets.newLinkedHashSet(); // depends on control dependency: [if], data = [none] Collections.addAll(wl, filter.whitelist()); // depends on control dependency: [if], data = [none] } if (filter.blacklist().length > 0) { bl = Sets.newLinkedHashSet(); // depends on control dependency: [if], data = [none] Collections.addAll(bl, filter.blacklist()); // depends on control dependency: [if], data = [none] } } } if (whitelist != null) { if (wl == null) { wl = Sets.newLinkedHashSet(); // depends on control dependency: [if], data = [none] } wl.addAll(whitelist); // depends on control dependency: [if], data = [(whitelist] } if (blacklist != null) { if (bl == null) { bl = Sets.newLinkedHashSet(); // depends on control dependency: [if], data = [none] } bl.addAll(blacklist); // depends on control dependency: [if], data = [(blacklist] } checkQuery((SpiQuery) query, wl, bl, manager.getInstance(Application.Mode.class).isProd()); } }
public class class_name { public void setPrefix(@Nonnull String prefixName, @Nonnull String prefix) { if (!isUsingDefaultShortFormProvider()) { resetShortFormProvider(); } ((DefaultPrefixManager) shortFormProvider).setPrefix(prefixName, prefix); } }
public class class_name { public void setPrefix(@Nonnull String prefixName, @Nonnull String prefix) { if (!isUsingDefaultShortFormProvider()) { resetShortFormProvider(); // depends on control dependency: [if], data = [none] } ((DefaultPrefixManager) shortFormProvider).setPrefix(prefixName, prefix); } }
public class class_name { public com.google.cloud.datalabeling.v1beta1 .LabelImageOrientedBoundingBoxOperationMetadataOrBuilder getImageOrientedBoundingBoxDetailsOrBuilder() { if (detailsCase_ == 14) { return (com.google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata) details_; } return com.google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata .getDefaultInstance(); } }
public class class_name { public com.google.cloud.datalabeling.v1beta1 .LabelImageOrientedBoundingBoxOperationMetadataOrBuilder getImageOrientedBoundingBoxDetailsOrBuilder() { if (detailsCase_ == 14) { return (com.google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata) details_; // depends on control dependency: [if], data = [none] } return com.google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata .getDefaultInstance(); } }
public class class_name { public Map<String, EntityDocument> getEntityDocumentsByTitle( String siteKey, List<String> titles) throws MediaWikiApiErrorException, IOException { List<String> newTitles = new ArrayList<>(); newTitles.addAll(titles); Map<String, EntityDocument> result = new HashMap<>(); boolean moreItems = !newTitles.isEmpty(); while (moreItems) { List<String> subListOfTitles; if (newTitles.size() <= maxListSize) { subListOfTitles = newTitles; moreItems = false; } else { subListOfTitles = newTitles.subList(0, maxListSize); } WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.titles = ApiConnection.implodeObjects(subListOfTitles); properties.sites = siteKey; result.putAll(getEntityDocumentMap(subListOfTitles.size(), properties)); subListOfTitles.clear(); } return result; } }
public class class_name { public Map<String, EntityDocument> getEntityDocumentsByTitle( String siteKey, List<String> titles) throws MediaWikiApiErrorException, IOException { List<String> newTitles = new ArrayList<>(); newTitles.addAll(titles); Map<String, EntityDocument> result = new HashMap<>(); boolean moreItems = !newTitles.isEmpty(); while (moreItems) { List<String> subListOfTitles; if (newTitles.size() <= maxListSize) { subListOfTitles = newTitles; // depends on control dependency: [if], data = [none] moreItems = false; // depends on control dependency: [if], data = [none] } else { subListOfTitles = newTitles.subList(0, maxListSize); // depends on control dependency: [if], data = [none] } WbGetEntitiesActionData properties = new WbGetEntitiesActionData(); properties.titles = ApiConnection.implodeObjects(subListOfTitles); properties.sites = siteKey; result.putAll(getEntityDocumentMap(subListOfTitles.size(), properties)); subListOfTitles.clear(); } return result; } }
public class class_name { public OptionElement parseOptions(final InputStream is) throws OptionsException{ Reader rdr = null; try { rdr = new InputStreamReader(is); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(false); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(new InputSource(rdr)); /* We expect a root element named as specified */ Element root = doc.getDocumentElement(); if (!root.getNodeName().equals(outerTag.getLocalPart())) { throw new OptionsException("org.bedework.bad.options"); } OptionElement oel = new OptionElement(); oel.name = "root"; doChildren(oel, root, new Stack<Object>()); return oel; } catch (OptionsException ce) { throw ce; } catch (Throwable t) { throw new OptionsException(t); } finally { if (rdr != null) { try { rdr.close(); } catch (Throwable t) {} } } } }
public class class_name { public OptionElement parseOptions(final InputStream is) throws OptionsException{ Reader rdr = null; try { rdr = new InputStreamReader(is); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(false); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(new InputSource(rdr)); /* We expect a root element named as specified */ Element root = doc.getDocumentElement(); if (!root.getNodeName().equals(outerTag.getLocalPart())) { throw new OptionsException("org.bedework.bad.options"); } OptionElement oel = new OptionElement(); oel.name = "root"; doChildren(oel, root, new Stack<Object>()); return oel; } catch (OptionsException ce) { throw ce; } catch (Throwable t) { throw new OptionsException(t); } finally { if (rdr != null) { try { rdr.close(); // depends on control dependency: [try], data = [none] } catch (Throwable t) {} // depends on control dependency: [catch], data = [none] } } } }
public class class_name { private void processPropertyPlaceHolders() { Map<String, PropertyResourceConfigurer> prcs = applicationContext.getBeansOfType(PropertyResourceConfigurer.class); if (!prcs.isEmpty() && applicationContext instanceof ConfigurableApplicationContext) { BeanDefinition mapperScannerBean = ((ConfigurableApplicationContext) applicationContext) .getBeanFactory().getBeanDefinition(beanName); // PropertyResourceConfigurer does not expose any methods to explicitly perform // property placeholder substitution. Instead, create a BeanFactory that just // contains this mapper scanner and post process the factory. DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); factory.registerBeanDefinition(beanName, mapperScannerBean); for (PropertyResourceConfigurer prc : prcs.values()) { prc.postProcessBeanFactory(factory); } PropertyValues values = mapperScannerBean.getPropertyValues(); this.basePackage = updatePropertyValue("basePackage", values); this.sqlSessionFactoryBeanName = updatePropertyValue("sqlSessionFactoryBeanName", values); this.sqlSessionTemplateBeanName = updatePropertyValue("sqlSessionTemplateBeanName", values); } } }
public class class_name { private void processPropertyPlaceHolders() { Map<String, PropertyResourceConfigurer> prcs = applicationContext.getBeansOfType(PropertyResourceConfigurer.class); if (!prcs.isEmpty() && applicationContext instanceof ConfigurableApplicationContext) { BeanDefinition mapperScannerBean = ((ConfigurableApplicationContext) applicationContext) .getBeanFactory().getBeanDefinition(beanName); // PropertyResourceConfigurer does not expose any methods to explicitly perform // property placeholder substitution. Instead, create a BeanFactory that just // contains this mapper scanner and post process the factory. DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); factory.registerBeanDefinition(beanName, mapperScannerBean); // depends on control dependency: [if], data = [none] for (PropertyResourceConfigurer prc : prcs.values()) { prc.postProcessBeanFactory(factory); // depends on control dependency: [for], data = [prc] } PropertyValues values = mapperScannerBean.getPropertyValues(); this.basePackage = updatePropertyValue("basePackage", values); // depends on control dependency: [if], data = [none] this.sqlSessionFactoryBeanName = updatePropertyValue("sqlSessionFactoryBeanName", values); // depends on control dependency: [if], data = [none] this.sqlSessionTemplateBeanName = updatePropertyValue("sqlSessionTemplateBeanName", values); // depends on control dependency: [if], data = [none] } } }
public class class_name { public float getNodeBoost(NodeData state) { IndexingRule rule = getApplicableIndexingRule(state); if (rule != null) { return rule.getNodeBoost(); } return DEFAULT_BOOST; } }
public class class_name { public float getNodeBoost(NodeData state) { IndexingRule rule = getApplicableIndexingRule(state); if (rule != null) { return rule.getNodeBoost(); // depends on control dependency: [if], data = [none] } return DEFAULT_BOOST; } }
public class class_name { @Override protected void handleEvents(final String EVENT_TYPE) { super.handleEvents(EVENT_TYPE); if ("RECALC".equals(EVENT_TYPE)) { setBar(gauge.getCurrentValue()); resize(); redraw(); } else if ("SECTION".equals(EVENT_TYPE)) { sections = gauge.getSections(); resize(); redraw(); } else if ("VISIBILITY".equals(EVENT_TYPE)) { valueText.setVisible(gauge.isValueVisible()); valueText.setManaged(gauge.isValueVisible()); redraw(); } } }
public class class_name { @Override protected void handleEvents(final String EVENT_TYPE) { super.handleEvents(EVENT_TYPE); if ("RECALC".equals(EVENT_TYPE)) { setBar(gauge.getCurrentValue()); // depends on control dependency: [if], data = [none] resize(); // depends on control dependency: [if], data = [none] redraw(); // depends on control dependency: [if], data = [none] } else if ("SECTION".equals(EVENT_TYPE)) { sections = gauge.getSections(); // depends on control dependency: [if], data = [none] resize(); // depends on control dependency: [if], data = [none] redraw(); // depends on control dependency: [if], data = [none] } else if ("VISIBILITY".equals(EVENT_TYPE)) { valueText.setVisible(gauge.isValueVisible()); // depends on control dependency: [if], data = [none] valueText.setManaged(gauge.isValueVisible()); // depends on control dependency: [if], data = [none] redraw(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public <T> T read(Reader input, Class<T> rootType) { JodaBeanUtils.notNull(input, "input"); JodaBeanUtils.notNull(rootType, "rootType"); try { JsonInput jsonInput = new JsonInput(input); return parseRoot(jsonInput, rootType); } catch (RuntimeException ex) { throw ex; } catch (Exception ex) { throw new RuntimeException(ex); } } }
public class class_name { public <T> T read(Reader input, Class<T> rootType) { JodaBeanUtils.notNull(input, "input"); JodaBeanUtils.notNull(rootType, "rootType"); try { JsonInput jsonInput = new JsonInput(input); return parseRoot(jsonInput, rootType); // depends on control dependency: [try], data = [none] } catch (RuntimeException ex) { throw ex; } catch (Exception ex) { // depends on control dependency: [catch], data = [none] throw new RuntimeException(ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") public static void addIQProvider(String elementName, String namespace, Object provider) { validate(elementName, namespace); // First remove existing providers String key = removeIQProvider(elementName, namespace); if (provider instanceof IQProvider) { iqProviders.put(key, (IQProvider<IQ>) provider); } else { throw new IllegalArgumentException("Provider must be an IQProvider"); } } }
public class class_name { @SuppressWarnings("unchecked") public static void addIQProvider(String elementName, String namespace, Object provider) { validate(elementName, namespace); // First remove existing providers String key = removeIQProvider(elementName, namespace); if (provider instanceof IQProvider) { iqProviders.put(key, (IQProvider<IQ>) provider); // depends on control dependency: [if], data = [none] } else { throw new IllegalArgumentException("Provider must be an IQProvider"); } } }
public class class_name { public static String getNameParameterOfType(ModelMethod method, TypeName parameter) { for (Pair<String, TypeName> item : method.getParameters()) { if (item.value1.equals(parameter)) { return item.value0; } } return null; } }
public class class_name { public static String getNameParameterOfType(ModelMethod method, TypeName parameter) { for (Pair<String, TypeName> item : method.getParameters()) { if (item.value1.equals(parameter)) { return item.value0; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public static YearWeek of(int weekBasedYear, int week) { WEEK_BASED_YEAR.range().checkValidValue(weekBasedYear, WEEK_BASED_YEAR); WEEK_OF_WEEK_BASED_YEAR.range().checkValidValue(week, WEEK_OF_WEEK_BASED_YEAR); if (week == 53 && weekRange(weekBasedYear) < 53) { week = 1; weekBasedYear++; WEEK_BASED_YEAR.range().checkValidValue(weekBasedYear, WEEK_BASED_YEAR); } return new YearWeek(weekBasedYear, week); } }
public class class_name { public static YearWeek of(int weekBasedYear, int week) { WEEK_BASED_YEAR.range().checkValidValue(weekBasedYear, WEEK_BASED_YEAR); WEEK_OF_WEEK_BASED_YEAR.range().checkValidValue(week, WEEK_OF_WEEK_BASED_YEAR); if (week == 53 && weekRange(weekBasedYear) < 53) { week = 1; // depends on control dependency: [if], data = [none] weekBasedYear++; // depends on control dependency: [if], data = [none] WEEK_BASED_YEAR.range().checkValidValue(weekBasedYear, WEEK_BASED_YEAR); // depends on control dependency: [if], data = [(week] } return new YearWeek(weekBasedYear, week); } }
public class class_name { public void onTimerEvent(TimerEvent event, ActivityContextInterface aci) { // time to refresh :) if (getSubscribeRequestTypeCMP() != null) { return; } try { DialogActivity da = (DialogActivity) aci.getActivity(); Request refreshSubscribe = createRefresh(da, getSubscriptionData()); setSubscribeRequestTypeCMP(SubscribeRequestType.REFRESH); da.sendRequest(refreshSubscribe); } catch (Exception e) { if (tracer.isSevereEnabled()) { tracer.severe("Failed to send unSUBSCRIBE for forked dialog.", e); } } } }
public class class_name { public void onTimerEvent(TimerEvent event, ActivityContextInterface aci) { // time to refresh :) if (getSubscribeRequestTypeCMP() != null) { return; // depends on control dependency: [if], data = [none] } try { DialogActivity da = (DialogActivity) aci.getActivity(); Request refreshSubscribe = createRefresh(da, getSubscriptionData()); setSubscribeRequestTypeCMP(SubscribeRequestType.REFRESH); // depends on control dependency: [try], data = [none] da.sendRequest(refreshSubscribe); // depends on control dependency: [try], data = [none] } catch (Exception e) { if (tracer.isSevereEnabled()) { tracer.severe("Failed to send unSUBSCRIBE for forked dialog.", e); // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setAngles (float x1, float y1, float x2, float y2) { float cx = centerX(); float cy = centerY(); float a1 = normAngle(-FloatMath.toDegrees(FloatMath.atan2(y1 - cy, x1 - cx))); float a2 = normAngle(-FloatMath.toDegrees(FloatMath.atan2(y2 - cy, x2 - cx))); a2 -= a1; if (a2 <= 0f) { a2 += 360f; } setAngleStart(a1); setAngleExtent(a2); } }
public class class_name { public void setAngles (float x1, float y1, float x2, float y2) { float cx = centerX(); float cy = centerY(); float a1 = normAngle(-FloatMath.toDegrees(FloatMath.atan2(y1 - cy, x1 - cx))); float a2 = normAngle(-FloatMath.toDegrees(FloatMath.atan2(y2 - cy, x2 - cx))); a2 -= a1; if (a2 <= 0f) { a2 += 360f; // depends on control dependency: [if], data = [none] } setAngleStart(a1); setAngleExtent(a2); } }
public class class_name { private static DataSet<Tuple2<String, String>> getDocumentsDataSet(ExecutionEnvironment env, ParameterTool params) { // Create DataSet for documents relation (URL, Doc-Text) if (params.has("documents")) { return env.readCsvFile(params.get("documents")) .fieldDelimiter("|") .types(String.class, String.class); } else { System.out.println("Executing WebLogAnalysis example with default documents data set."); System.out.println("Use --documents to specify file input."); return WebLogData.getDocumentDataSet(env); } } }
public class class_name { private static DataSet<Tuple2<String, String>> getDocumentsDataSet(ExecutionEnvironment env, ParameterTool params) { // Create DataSet for documents relation (URL, Doc-Text) if (params.has("documents")) { return env.readCsvFile(params.get("documents")) .fieldDelimiter("|") .types(String.class, String.class); // depends on control dependency: [if], data = [none] } else { System.out.println("Executing WebLogAnalysis example with default documents data set."); // depends on control dependency: [if], data = [none] System.out.println("Use --documents to specify file input."); // depends on control dependency: [if], data = [none] return WebLogData.getDocumentDataSet(env); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void pause() { checkState(); if (mState == STATE_PLAYING) { PlaybackService.pause(getContext(), mClientKey); mState = STATE_PAUSED; } } }
public class class_name { public void pause() { checkState(); if (mState == STATE_PLAYING) { PlaybackService.pause(getContext(), mClientKey); // depends on control dependency: [if], data = [none] mState = STATE_PAUSED; // depends on control dependency: [if], data = [none] } } }